1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a mutable replica, but toggled to be only readable.
89 Read,
90 /// The buffer is a read-only replica.
91 ReadOnly,
92}
93
94impl Capability {
95 /// Returns `true` if the capability is `ReadWrite`.
96 pub fn editable(self) -> bool {
97 matches!(self, Capability::ReadWrite)
98 }
99}
100
101pub type BufferRow = u32;
102
103/// An in-memory representation of a source code file, including its text,
104/// syntax trees, git status, and diagnostics.
105pub struct Buffer {
106 text: TextBuffer,
107 branch_state: Option<BufferBranchState>,
108 /// Filesystem state, `None` when there is no path.
109 file: Option<Arc<dyn File>>,
110 /// The mtime of the file when this buffer was last loaded from
111 /// or saved to disk.
112 saved_mtime: Option<MTime>,
113 /// The version vector when this buffer was last loaded from
114 /// or saved to disk.
115 saved_version: clock::Global,
116 preview_version: clock::Global,
117 transaction_depth: usize,
118 was_dirty_before_starting_transaction: Option<bool>,
119 reload_task: Option<Task<Result<()>>>,
120 language: Option<Arc<Language>>,
121 autoindent_requests: Vec<Arc<AutoindentRequest>>,
122 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
123 pending_autoindent: Option<Task<()>>,
124 sync_parse_timeout: Duration,
125 syntax_map: Mutex<SyntaxMap>,
126 reparse: Option<Task<()>>,
127 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
128 non_text_state_update_count: usize,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 diagnostics_timestamp: clock::Lamport,
132 completion_triggers: BTreeSet<String>,
133 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
134 completion_triggers_timestamp: clock::Lamport,
135 deferred_ops: OperationQueue<Operation>,
136 capability: Capability,
137 has_conflict: bool,
138 /// Memoize calls to has_changes_since(saved_version).
139 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
140 has_unsaved_edits: Cell<(clock::Global, bool)>,
141 change_bits: Vec<rc::Weak<Cell<bool>>>,
142 _subscriptions: Vec<gpui::Subscription>,
143 tree_sitter_data: Arc<TreeSitterData>,
144 encoding: &'static Encoding,
145 has_bom: bool,
146}
147
148#[derive(Debug)]
149pub struct TreeSitterData {
150 chunks: RowChunks,
151 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
152}
153
154const MAX_ROWS_IN_A_CHUNK: u32 = 50;
155
156impl TreeSitterData {
157 fn clear(&mut self, snapshot: text::BufferSnapshot) {
158 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
159 self.brackets_by_chunks.get_mut().clear();
160 self.brackets_by_chunks
161 .get_mut()
162 .resize(self.chunks.len(), None);
163 }
164
165 fn new(snapshot: text::BufferSnapshot) -> Self {
166 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
167 Self {
168 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
169 chunks,
170 }
171 }
172
173 fn version(&self) -> &clock::Global {
174 self.chunks.version()
175 }
176}
177
178#[derive(Copy, Clone, Debug, PartialEq, Eq)]
179pub enum ParseStatus {
180 Idle,
181 Parsing,
182}
183
184struct BufferBranchState {
185 base_buffer: Entity<Buffer>,
186 merged_operations: Vec<Lamport>,
187}
188
189/// An immutable, cheaply cloneable representation of a fixed
190/// state of a buffer.
191pub struct BufferSnapshot {
192 pub text: text::BufferSnapshot,
193 pub syntax: SyntaxSnapshot,
194 file: Option<Arc<dyn File>>,
195 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
196 remote_selections: TreeMap<ReplicaId, SelectionSet>,
197 language: Option<Arc<Language>>,
198 non_text_state_update_count: usize,
199 tree_sitter_data: Arc<TreeSitterData>,
200 pub capability: Capability,
201}
202
203/// The kind and amount of indentation in a particular line. For now,
204/// assumes that indentation is all the same character.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub struct IndentSize {
207 /// The number of bytes that comprise the indentation.
208 pub len: u32,
209 /// The kind of whitespace used for indentation.
210 pub kind: IndentKind,
211}
212
213/// A whitespace character that's used for indentation.
214#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
215pub enum IndentKind {
216 /// An ASCII space character.
217 #[default]
218 Space,
219 /// An ASCII tab character.
220 Tab,
221}
222
223/// The shape of a selection cursor.
224#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
225pub enum CursorShape {
226 /// A vertical bar
227 #[default]
228 Bar,
229 /// A block that surrounds the following character
230 Block,
231 /// An underline that runs along the following character
232 Underline,
233 /// A box drawn around the following character
234 Hollow,
235}
236
237impl From<settings::CursorShape> for CursorShape {
238 fn from(shape: settings::CursorShape) -> Self {
239 match shape {
240 settings::CursorShape::Bar => CursorShape::Bar,
241 settings::CursorShape::Block => CursorShape::Block,
242 settings::CursorShape::Underline => CursorShape::Underline,
243 settings::CursorShape::Hollow => CursorShape::Hollow,
244 }
245 }
246}
247
248#[derive(Clone, Debug)]
249struct SelectionSet {
250 line_mode: bool,
251 cursor_shape: CursorShape,
252 selections: Arc<[Selection<Anchor>]>,
253 lamport_timestamp: clock::Lamport,
254}
255
256/// A diagnostic associated with a certain range of a buffer.
257#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
258pub struct Diagnostic {
259 /// The name of the service that produced this diagnostic.
260 pub source: Option<String>,
261 /// The ID provided by the dynamic registration that produced this diagnostic.
262 pub registration_id: Option<SharedString>,
263 /// A machine-readable code that identifies this diagnostic.
264 pub code: Option<NumberOrString>,
265 pub code_description: Option<lsp::Uri>,
266 /// Whether this diagnostic is a hint, warning, or error.
267 pub severity: DiagnosticSeverity,
268 /// The human-readable message associated with this diagnostic.
269 pub message: String,
270 /// The human-readable message (in markdown format)
271 pub markdown: Option<String>,
272 /// An id that identifies the group to which this diagnostic belongs.
273 ///
274 /// When a language server produces a diagnostic with
275 /// one or more associated diagnostics, those diagnostics are all
276 /// assigned a single group ID.
277 pub group_id: usize,
278 /// Whether this diagnostic is the primary diagnostic for its group.
279 ///
280 /// In a given group, the primary diagnostic is the top-level diagnostic
281 /// returned by the language server. The non-primary diagnostics are the
282 /// associated diagnostics.
283 pub is_primary: bool,
284 /// Whether this diagnostic is considered to originate from an analysis of
285 /// files on disk, as opposed to any unsaved buffer contents. This is a
286 /// property of a given diagnostic source, and is configured for a given
287 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
288 /// for the language server.
289 pub is_disk_based: bool,
290 /// Whether this diagnostic marks unnecessary code.
291 pub is_unnecessary: bool,
292 /// Quick separation of diagnostics groups based by their source.
293 pub source_kind: DiagnosticSourceKind,
294 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
295 pub data: Option<Value>,
296 /// Whether to underline the corresponding text range in the editor.
297 pub underline: bool,
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
301pub enum DiagnosticSourceKind {
302 Pulled,
303 Pushed,
304 Other,
305}
306
307/// An operation used to synchronize this buffer with its other replicas.
308#[derive(Clone, Debug, PartialEq)]
309pub enum Operation {
310 /// A text operation.
311 Buffer(text::Operation),
312
313 /// An update to the buffer's diagnostics.
314 UpdateDiagnostics {
315 /// The id of the language server that produced the new diagnostics.
316 server_id: LanguageServerId,
317 /// The diagnostics.
318 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 },
322
323 /// An update to the most recent selections in this buffer.
324 UpdateSelections {
325 /// The selections.
326 selections: Arc<[Selection<Anchor>]>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// Whether the selections are in 'line mode'.
330 line_mode: bool,
331 /// The [`CursorShape`] associated with these selections.
332 cursor_shape: CursorShape,
333 },
334
335 /// An update to the characters that should trigger autocompletion
336 /// for this buffer.
337 UpdateCompletionTriggers {
338 /// The characters that trigger autocompletion.
339 triggers: Vec<String>,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 /// The language server ID.
343 server_id: LanguageServerId,
344 },
345
346 /// An update to the line ending type of this buffer.
347 UpdateLineEnding {
348 /// The line ending type.
349 line_ending: LineEnding,
350 /// The buffer's lamport timestamp.
351 lamport_timestamp: clock::Lamport,
352 },
353}
354
355/// An event that occurs in a buffer.
356#[derive(Clone, Debug, PartialEq)]
357pub enum BufferEvent {
358 /// The buffer was changed in a way that must be
359 /// propagated to its other replicas.
360 Operation {
361 operation: Operation,
362 is_local: bool,
363 },
364 /// The buffer was edited.
365 Edited,
366 /// The buffer's `dirty` bit changed.
367 DirtyChanged,
368 /// The buffer was saved.
369 Saved,
370 /// The buffer's file was changed on disk.
371 FileHandleChanged,
372 /// The buffer was reloaded.
373 Reloaded,
374 /// The buffer is in need of a reload
375 ReloadNeeded,
376 /// The buffer's language was changed.
377 /// The boolean indicates whether this buffer did not have a language before, but does now.
378 LanguageChanged(bool),
379 /// The buffer's syntax trees were updated.
380 Reparsed,
381 /// The buffer's diagnostics were updated.
382 DiagnosticsUpdated,
383 /// The buffer gained or lost editing capabilities.
384 CapabilityChanged,
385}
386
387/// The file associated with a buffer.
388pub trait File: Send + Sync + Any {
389 /// Returns the [`LocalFile`] associated with this file, if the
390 /// file is local.
391 fn as_local(&self) -> Option<&dyn LocalFile>;
392
393 /// Returns whether this file is local.
394 fn is_local(&self) -> bool {
395 self.as_local().is_some()
396 }
397
398 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
399 /// only available in some states, such as modification time.
400 fn disk_state(&self) -> DiskState;
401
402 /// Returns the path of this file relative to the worktree's root directory.
403 fn path(&self) -> &Arc<RelPath>;
404
405 /// Returns the path of this file relative to the worktree's parent directory (this means it
406 /// includes the name of the worktree's root folder).
407 fn full_path(&self, cx: &App) -> PathBuf;
408
409 /// Returns the path style of this file.
410 fn path_style(&self, cx: &App) -> PathStyle;
411
412 /// Returns the last component of this handle's absolute path. If this handle refers to the root
413 /// of its worktree, then this method will return the name of the worktree itself.
414 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
415
416 /// Returns the id of the worktree to which this file belongs.
417 ///
418 /// This is needed for looking up project-specific settings.
419 fn worktree_id(&self, cx: &App) -> WorktreeId;
420
421 /// Converts this file into a protobuf message.
422 fn to_proto(&self, cx: &App) -> rpc::proto::File;
423
424 /// Return whether Zed considers this to be a private file.
425 fn is_private(&self) -> bool;
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// The row of the edit start in the buffer before the edit was applied.
524 /// This is stored here because the anchor in range is created after
525 /// the edit, so it cannot be used with the before_edit snapshot.
526 old_row: Option<u32>,
527 indent_size: IndentSize,
528 original_indent_column: Option<u32>,
529}
530
531#[derive(Debug)]
532struct IndentSuggestion {
533 basis_row: u32,
534 delta: Ordering,
535 within_error: bool,
536}
537
538struct BufferChunkHighlights<'a> {
539 captures: SyntaxMapCaptures<'a>,
540 next_capture: Option<SyntaxMapCapture<'a>>,
541 stack: Vec<(usize, HighlightId)>,
542 highlight_maps: Vec<HighlightMap>,
543}
544
545/// An iterator that yields chunks of a buffer's text, along with their
546/// syntax highlights and diagnostic status.
547pub struct BufferChunks<'a> {
548 buffer_snapshot: Option<&'a BufferSnapshot>,
549 range: Range<usize>,
550 chunks: text::Chunks<'a>,
551 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
552 error_depth: usize,
553 warning_depth: usize,
554 information_depth: usize,
555 hint_depth: usize,
556 unnecessary_depth: usize,
557 underline: bool,
558 highlights: Option<BufferChunkHighlights<'a>>,
559}
560
561/// A chunk of a buffer's text, along with its syntax highlight and
562/// diagnostic status.
563#[derive(Clone, Debug, Default)]
564pub struct Chunk<'a> {
565 /// The text of the chunk.
566 pub text: &'a str,
567 /// The syntax highlighting style of the chunk.
568 pub syntax_highlight_id: Option<HighlightId>,
569 /// The highlight style that has been applied to this chunk in
570 /// the editor.
571 pub highlight_style: Option<HighlightStyle>,
572 /// The severity of diagnostic associated with this chunk, if any.
573 pub diagnostic_severity: Option<DiagnosticSeverity>,
574 /// A bitset of which characters are tabs in this string.
575 pub tabs: u128,
576 /// Bitmap of character indices in this chunk
577 pub chars: u128,
578 /// Whether this chunk of text is marked as unnecessary.
579 pub is_unnecessary: bool,
580 /// Whether this chunk of text was originally a tab character.
581 pub is_tab: bool,
582 /// Whether this chunk of text was originally an inlay.
583 pub is_inlay: bool,
584 /// Whether to underline the corresponding text range in the editor.
585 pub underline: bool,
586}
587
588/// A set of edits to a given version of a buffer, computed asynchronously.
589#[derive(Debug)]
590pub struct Diff {
591 pub base_version: clock::Global,
592 pub line_ending: LineEnding,
593 pub edits: Vec<(Range<usize>, Arc<str>)>,
594}
595
596#[derive(Debug, Clone, Copy)]
597pub(crate) struct DiagnosticEndpoint {
598 offset: usize,
599 is_start: bool,
600 underline: bool,
601 severity: DiagnosticSeverity,
602 is_unnecessary: bool,
603}
604
605/// A class of characters, used for characterizing a run of text.
606#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
607pub enum CharKind {
608 /// Whitespace.
609 Whitespace,
610 /// Punctuation.
611 Punctuation,
612 /// Word.
613 Word,
614}
615
616/// Context for character classification within a specific scope.
617#[derive(Copy, Clone, Eq, PartialEq, Debug)]
618pub enum CharScopeContext {
619 /// Character classification for completion queries.
620 ///
621 /// This context treats certain characters as word constituents that would
622 /// normally be considered punctuation, such as '-' in Tailwind classes
623 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
624 Completion,
625 /// Character classification for linked edits.
626 ///
627 /// This context handles characters that should be treated as part of
628 /// identifiers during linked editing operations, such as '.' in JSX
629 /// component names like `<Animated.View>`.
630 LinkedEdit,
631}
632
633/// A runnable is a set of data about a region that could be resolved into a task
634pub struct Runnable {
635 pub tags: SmallVec<[RunnableTag; 1]>,
636 pub language: Arc<Language>,
637 pub buffer: BufferId,
638}
639
640#[derive(Default, Clone, Debug)]
641pub struct HighlightedText {
642 pub text: SharedString,
643 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
644}
645
646#[derive(Default, Debug)]
647struct HighlightedTextBuilder {
648 pub text: String,
649 highlights: Vec<(Range<usize>, HighlightStyle)>,
650}
651
652impl HighlightedText {
653 pub fn from_buffer_range<T: ToOffset>(
654 range: Range<T>,
655 snapshot: &text::BufferSnapshot,
656 syntax_snapshot: &SyntaxSnapshot,
657 override_style: Option<HighlightStyle>,
658 syntax_theme: &SyntaxTheme,
659 ) -> Self {
660 let mut highlighted_text = HighlightedTextBuilder::default();
661 highlighted_text.add_text_from_buffer_range(
662 range,
663 snapshot,
664 syntax_snapshot,
665 override_style,
666 syntax_theme,
667 );
668 highlighted_text.build()
669 }
670
671 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
672 gpui::StyledText::new(self.text.clone())
673 .with_default_highlights(default_style, self.highlights.iter().cloned())
674 }
675
676 /// Returns the first line without leading whitespace unless highlighted
677 /// and a boolean indicating if there are more lines after
678 pub fn first_line_preview(self) -> (Self, bool) {
679 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
680 let first_line = &self.text[..newline_ix];
681
682 // Trim leading whitespace, unless an edit starts prior to it.
683 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
684 if let Some((first_highlight_range, _)) = self.highlights.first() {
685 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
686 }
687
688 let preview_text = &first_line[preview_start_ix..];
689 let preview_highlights = self
690 .highlights
691 .into_iter()
692 .skip_while(|(range, _)| range.end <= preview_start_ix)
693 .take_while(|(range, _)| range.start < newline_ix)
694 .filter_map(|(mut range, highlight)| {
695 range.start = range.start.saturating_sub(preview_start_ix);
696 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
697 if range.is_empty() {
698 None
699 } else {
700 Some((range, highlight))
701 }
702 });
703
704 let preview = Self {
705 text: SharedString::new(preview_text),
706 highlights: preview_highlights.collect(),
707 };
708
709 (preview, self.text.len() > newline_ix)
710 }
711}
712
713impl HighlightedTextBuilder {
714 pub fn build(self) -> HighlightedText {
715 HighlightedText {
716 text: self.text.into(),
717 highlights: self.highlights,
718 }
719 }
720
721 pub fn add_text_from_buffer_range<T: ToOffset>(
722 &mut self,
723 range: Range<T>,
724 snapshot: &text::BufferSnapshot,
725 syntax_snapshot: &SyntaxSnapshot,
726 override_style: Option<HighlightStyle>,
727 syntax_theme: &SyntaxTheme,
728 ) {
729 let range = range.to_offset(snapshot);
730 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
731 let start = self.text.len();
732 self.text.push_str(chunk.text);
733 let end = self.text.len();
734
735 if let Some(highlight_style) = chunk
736 .syntax_highlight_id
737 .and_then(|id| id.style(syntax_theme))
738 {
739 let highlight_style = override_style.map_or(highlight_style, |override_style| {
740 highlight_style.highlight(override_style)
741 });
742 self.highlights.push((start..end, highlight_style));
743 } else if let Some(override_style) = override_style {
744 self.highlights.push((start..end, override_style));
745 }
746 }
747 }
748
749 fn highlighted_chunks<'a>(
750 range: Range<usize>,
751 snapshot: &'a text::BufferSnapshot,
752 syntax_snapshot: &'a SyntaxSnapshot,
753 ) -> BufferChunks<'a> {
754 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
755 grammar
756 .highlights_config
757 .as_ref()
758 .map(|config| &config.query)
759 });
760
761 let highlight_maps = captures
762 .grammars()
763 .iter()
764 .map(|grammar| grammar.highlight_map())
765 .collect();
766
767 BufferChunks::new(
768 snapshot.as_rope(),
769 range,
770 Some((captures, highlight_maps)),
771 false,
772 None,
773 )
774 }
775}
776
777#[derive(Clone)]
778pub struct EditPreview {
779 old_snapshot: text::BufferSnapshot,
780 applied_edits_snapshot: text::BufferSnapshot,
781 syntax_snapshot: SyntaxSnapshot,
782}
783
784impl EditPreview {
785 pub fn as_unified_diff(
786 &self,
787 file: Option<&Arc<dyn File>>,
788 edits: &[(Range<Anchor>, impl AsRef<str>)],
789 ) -> Option<String> {
790 let (first, _) = edits.first()?;
791 let (last, _) = edits.last()?;
792
793 let start = first.start.to_point(&self.old_snapshot);
794 let old_end = last.end.to_point(&self.old_snapshot);
795 let new_end = last
796 .end
797 .bias_right(&self.old_snapshot)
798 .to_point(&self.applied_edits_snapshot);
799
800 let start = Point::new(start.row.saturating_sub(3), 0);
801 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
802 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
803
804 let diff_body = unified_diff_with_offsets(
805 &self
806 .old_snapshot
807 .text_for_range(start..old_end)
808 .collect::<String>(),
809 &self
810 .applied_edits_snapshot
811 .text_for_range(start..new_end)
812 .collect::<String>(),
813 start.row,
814 start.row,
815 );
816
817 let path = file.map(|f| f.path().as_unix_str());
818 let header = match path {
819 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
820 None => String::new(),
821 };
822
823 Some(format!("{}{}", header, diff_body))
824 }
825
826 pub fn highlight_edits(
827 &self,
828 current_snapshot: &BufferSnapshot,
829 edits: &[(Range<Anchor>, impl AsRef<str>)],
830 include_deletions: bool,
831 cx: &App,
832 ) -> HighlightedText {
833 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
834 return HighlightedText::default();
835 };
836
837 let mut highlighted_text = HighlightedTextBuilder::default();
838
839 let visible_range_in_preview_snapshot =
840 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
841 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
842
843 let insertion_highlight_style = HighlightStyle {
844 background_color: Some(cx.theme().status().created_background),
845 ..Default::default()
846 };
847 let deletion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().deleted_background),
849 ..Default::default()
850 };
851 let syntax_theme = cx.theme().syntax();
852
853 for (range, edit_text) in edits {
854 let edit_new_end_in_preview_snapshot = range
855 .end
856 .bias_right(&self.old_snapshot)
857 .to_offset(&self.applied_edits_snapshot);
858 let edit_start_in_preview_snapshot =
859 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
860
861 let unchanged_range_in_preview_snapshot =
862 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
863 if !unchanged_range_in_preview_snapshot.is_empty() {
864 highlighted_text.add_text_from_buffer_range(
865 unchanged_range_in_preview_snapshot,
866 &self.applied_edits_snapshot,
867 &self.syntax_snapshot,
868 None,
869 syntax_theme,
870 );
871 }
872
873 let range_in_current_snapshot = range.to_offset(current_snapshot);
874 if include_deletions && !range_in_current_snapshot.is_empty() {
875 highlighted_text.add_text_from_buffer_range(
876 range_in_current_snapshot,
877 ¤t_snapshot.text,
878 ¤t_snapshot.syntax,
879 Some(deletion_highlight_style),
880 syntax_theme,
881 );
882 }
883
884 if !edit_text.as_ref().is_empty() {
885 highlighted_text.add_text_from_buffer_range(
886 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
887 &self.applied_edits_snapshot,
888 &self.syntax_snapshot,
889 Some(insertion_highlight_style),
890 syntax_theme,
891 );
892 }
893
894 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
895 }
896
897 highlighted_text.add_text_from_buffer_range(
898 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
899 &self.applied_edits_snapshot,
900 &self.syntax_snapshot,
901 None,
902 syntax_theme,
903 );
904
905 highlighted_text.build()
906 }
907
908 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
909 cx.new(|cx| {
910 let mut buffer = Buffer::local_normalized(
911 self.applied_edits_snapshot.as_rope().clone(),
912 self.applied_edits_snapshot.line_ending(),
913 cx,
914 );
915 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
916 buffer
917 })
918 }
919
920 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
921 let (first, _) = edits.first()?;
922 let (last, _) = edits.last()?;
923
924 let start = first
925 .start
926 .bias_left(&self.old_snapshot)
927 .to_point(&self.applied_edits_snapshot);
928 let end = last
929 .end
930 .bias_right(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932
933 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
934 let range = Point::new(start.row, 0)
935 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
936
937 Some(range)
938 }
939}
940
941#[derive(Clone, Debug, PartialEq, Eq)]
942pub struct BracketMatch<T> {
943 pub open_range: Range<T>,
944 pub close_range: Range<T>,
945 pub newline_only: bool,
946 pub syntax_layer_depth: usize,
947 pub color_index: Option<usize>,
948}
949
950impl<T> BracketMatch<T> {
951 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
952 (self.open_range, self.close_range)
953 }
954}
955
956impl Buffer {
957 /// Create a new buffer with the given base text.
958 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
959 Self::build(
960 TextBuffer::new(
961 ReplicaId::LOCAL,
962 cx.entity_id().as_non_zero_u64().into(),
963 base_text.into(),
964 ),
965 None,
966 Capability::ReadWrite,
967 )
968 }
969
970 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
971 pub fn local_normalized(
972 base_text_normalized: Rope,
973 line_ending: LineEnding,
974 cx: &Context<Self>,
975 ) -> Self {
976 Self::build(
977 TextBuffer::new_normalized(
978 ReplicaId::LOCAL,
979 cx.entity_id().as_non_zero_u64().into(),
980 line_ending,
981 base_text_normalized,
982 ),
983 None,
984 Capability::ReadWrite,
985 )
986 }
987
988 /// Create a new buffer that is a replica of a remote buffer.
989 pub fn remote(
990 remote_id: BufferId,
991 replica_id: ReplicaId,
992 capability: Capability,
993 base_text: impl Into<String>,
994 ) -> Self {
995 Self::build(
996 TextBuffer::new(replica_id, remote_id, base_text.into()),
997 None,
998 capability,
999 )
1000 }
1001
1002 /// Create a new buffer that is a replica of a remote buffer, populating its
1003 /// state from the given protobuf message.
1004 pub fn from_proto(
1005 replica_id: ReplicaId,
1006 capability: Capability,
1007 message: proto::BufferState,
1008 file: Option<Arc<dyn File>>,
1009 ) -> Result<Self> {
1010 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1011 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1012 let mut this = Self::build(buffer, file, capability);
1013 this.text.set_line_ending(proto::deserialize_line_ending(
1014 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1015 ));
1016 this.saved_version = proto::deserialize_version(&message.saved_version);
1017 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1018 Ok(this)
1019 }
1020
1021 /// Serialize the buffer's state to a protobuf message.
1022 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1023 proto::BufferState {
1024 id: self.remote_id().into(),
1025 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1026 base_text: self.base_text().to_string(),
1027 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1028 saved_version: proto::serialize_version(&self.saved_version),
1029 saved_mtime: self.saved_mtime.map(|time| time.into()),
1030 }
1031 }
1032
1033 /// Serialize as protobufs all of the changes to the buffer since the given version.
1034 pub fn serialize_ops(
1035 &self,
1036 since: Option<clock::Global>,
1037 cx: &App,
1038 ) -> Task<Vec<proto::Operation>> {
1039 let mut operations = Vec::new();
1040 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1041
1042 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1043 proto::serialize_operation(&Operation::UpdateSelections {
1044 selections: set.selections.clone(),
1045 lamport_timestamp: set.lamport_timestamp,
1046 line_mode: set.line_mode,
1047 cursor_shape: set.cursor_shape,
1048 })
1049 }));
1050
1051 for (server_id, diagnostics) in &self.diagnostics {
1052 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1053 lamport_timestamp: self.diagnostics_timestamp,
1054 server_id: *server_id,
1055 diagnostics: diagnostics.iter().cloned().collect(),
1056 }));
1057 }
1058
1059 for (server_id, completions) in &self.completion_triggers_per_language_server {
1060 operations.push(proto::serialize_operation(
1061 &Operation::UpdateCompletionTriggers {
1062 triggers: completions.iter().cloned().collect(),
1063 lamport_timestamp: self.completion_triggers_timestamp,
1064 server_id: *server_id,
1065 },
1066 ));
1067 }
1068
1069 let text_operations = self.text.operations().clone();
1070 cx.background_spawn(async move {
1071 let since = since.unwrap_or_default();
1072 operations.extend(
1073 text_operations
1074 .iter()
1075 .filter(|(_, op)| !since.observed(op.timestamp()))
1076 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1077 );
1078 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1079 operations
1080 })
1081 }
1082
1083 /// Assign a language to the buffer, returning the buffer.
1084 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1085 self.set_language_async(Some(language), cx);
1086 self
1087 }
1088
1089 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1090 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1091 self.set_language(Some(language), cx);
1092 self
1093 }
1094
1095 /// Returns the [`Capability`] of this buffer.
1096 pub fn capability(&self) -> Capability {
1097 self.capability
1098 }
1099
1100 /// Whether this buffer can only be read.
1101 pub fn read_only(&self) -> bool {
1102 !self.capability.editable()
1103 }
1104
1105 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1106 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1107 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1108 let snapshot = buffer.snapshot();
1109 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1110 let tree_sitter_data = TreeSitterData::new(snapshot);
1111 Self {
1112 saved_mtime,
1113 tree_sitter_data: Arc::new(tree_sitter_data),
1114 saved_version: buffer.version(),
1115 preview_version: buffer.version(),
1116 reload_task: None,
1117 transaction_depth: 0,
1118 was_dirty_before_starting_transaction: None,
1119 has_unsaved_edits: Cell::new((buffer.version(), false)),
1120 text: buffer,
1121 branch_state: None,
1122 file,
1123 capability,
1124 syntax_map,
1125 reparse: None,
1126 non_text_state_update_count: 0,
1127 sync_parse_timeout: Duration::from_millis(1),
1128 parse_status: watch::channel(ParseStatus::Idle),
1129 autoindent_requests: Default::default(),
1130 wait_for_autoindent_txs: Default::default(),
1131 pending_autoindent: Default::default(),
1132 language: None,
1133 remote_selections: Default::default(),
1134 diagnostics: Default::default(),
1135 diagnostics_timestamp: Lamport::MIN,
1136 completion_triggers: Default::default(),
1137 completion_triggers_per_language_server: Default::default(),
1138 completion_triggers_timestamp: Lamport::MIN,
1139 deferred_ops: OperationQueue::new(),
1140 has_conflict: false,
1141 change_bits: Default::default(),
1142 _subscriptions: Vec::new(),
1143 encoding: encoding_rs::UTF_8,
1144 has_bom: false,
1145 }
1146 }
1147
1148 pub fn build_snapshot(
1149 text: Rope,
1150 language: Option<Arc<Language>>,
1151 language_registry: Option<Arc<LanguageRegistry>>,
1152 cx: &mut App,
1153 ) -> impl Future<Output = BufferSnapshot> + use<> {
1154 let entity_id = cx.reserve_entity::<Self>().entity_id();
1155 let buffer_id = entity_id.as_non_zero_u64().into();
1156 async move {
1157 let text =
1158 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1159 .snapshot();
1160 let mut syntax = SyntaxMap::new(&text).snapshot();
1161 if let Some(language) = language.clone() {
1162 let language_registry = language_registry.clone();
1163 syntax.reparse(&text, language_registry, language);
1164 }
1165 let tree_sitter_data = TreeSitterData::new(text.clone());
1166 BufferSnapshot {
1167 text,
1168 syntax,
1169 file: None,
1170 diagnostics: Default::default(),
1171 remote_selections: Default::default(),
1172 tree_sitter_data: Arc::new(tree_sitter_data),
1173 language,
1174 non_text_state_update_count: 0,
1175 capability: Capability::ReadOnly,
1176 }
1177 }
1178 }
1179
1180 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1181 let entity_id = cx.reserve_entity::<Self>().entity_id();
1182 let buffer_id = entity_id.as_non_zero_u64().into();
1183 let text = TextBuffer::new_normalized(
1184 ReplicaId::LOCAL,
1185 buffer_id,
1186 Default::default(),
1187 Rope::new(),
1188 )
1189 .snapshot();
1190 let syntax = SyntaxMap::new(&text).snapshot();
1191 let tree_sitter_data = TreeSitterData::new(text.clone());
1192 BufferSnapshot {
1193 text,
1194 syntax,
1195 tree_sitter_data: Arc::new(tree_sitter_data),
1196 file: None,
1197 diagnostics: Default::default(),
1198 remote_selections: Default::default(),
1199 language: None,
1200 non_text_state_update_count: 0,
1201 capability: Capability::ReadOnly,
1202 }
1203 }
1204
1205 #[cfg(any(test, feature = "test-support"))]
1206 pub fn build_snapshot_sync(
1207 text: Rope,
1208 language: Option<Arc<Language>>,
1209 language_registry: Option<Arc<LanguageRegistry>>,
1210 cx: &mut App,
1211 ) -> BufferSnapshot {
1212 let entity_id = cx.reserve_entity::<Self>().entity_id();
1213 let buffer_id = entity_id.as_non_zero_u64().into();
1214 let text =
1215 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1216 .snapshot();
1217 let mut syntax = SyntaxMap::new(&text).snapshot();
1218 if let Some(language) = language.clone() {
1219 syntax.reparse(&text, language_registry, language);
1220 }
1221 let tree_sitter_data = TreeSitterData::new(text.clone());
1222 BufferSnapshot {
1223 text,
1224 syntax,
1225 tree_sitter_data: Arc::new(tree_sitter_data),
1226 file: None,
1227 diagnostics: Default::default(),
1228 remote_selections: Default::default(),
1229 language,
1230 non_text_state_update_count: 0,
1231 capability: Capability::ReadOnly,
1232 }
1233 }
1234
1235 /// Retrieve a snapshot of the buffer's current state. This is computationally
1236 /// cheap, and allows reading from the buffer on a background thread.
1237 pub fn snapshot(&self) -> BufferSnapshot {
1238 let text = self.text.snapshot();
1239 let mut syntax_map = self.syntax_map.lock();
1240 syntax_map.interpolate(&text);
1241 let syntax = syntax_map.snapshot();
1242
1243 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1244 Arc::new(TreeSitterData::new(text.clone()))
1245 } else {
1246 self.tree_sitter_data.clone()
1247 };
1248
1249 BufferSnapshot {
1250 text,
1251 syntax,
1252 tree_sitter_data,
1253 file: self.file.clone(),
1254 remote_selections: self.remote_selections.clone(),
1255 diagnostics: self.diagnostics.clone(),
1256 language: self.language.clone(),
1257 non_text_state_update_count: self.non_text_state_update_count,
1258 capability: self.capability,
1259 }
1260 }
1261
1262 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1263 let this = cx.entity();
1264 cx.new(|cx| {
1265 let mut branch = Self {
1266 branch_state: Some(BufferBranchState {
1267 base_buffer: this.clone(),
1268 merged_operations: Default::default(),
1269 }),
1270 language: self.language.clone(),
1271 has_conflict: self.has_conflict,
1272 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1273 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1274 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1275 };
1276 if let Some(language_registry) = self.language_registry() {
1277 branch.set_language_registry(language_registry);
1278 }
1279
1280 // Reparse the branch buffer so that we get syntax highlighting immediately.
1281 branch.reparse(cx, true);
1282
1283 branch
1284 })
1285 }
1286
1287 pub fn preview_edits(
1288 &self,
1289 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1290 cx: &App,
1291 ) -> Task<EditPreview> {
1292 let registry = self.language_registry();
1293 let language = self.language().cloned();
1294 let old_snapshot = self.text.snapshot();
1295 let mut branch_buffer = self.text.branch();
1296 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1297 cx.background_spawn(async move {
1298 if !edits.is_empty() {
1299 if let Some(language) = language.clone() {
1300 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1301 }
1302
1303 branch_buffer.edit(edits.iter().cloned());
1304 let snapshot = branch_buffer.snapshot();
1305 syntax_snapshot.interpolate(&snapshot);
1306
1307 if let Some(language) = language {
1308 syntax_snapshot.reparse(&snapshot, registry, language);
1309 }
1310 }
1311 EditPreview {
1312 old_snapshot,
1313 applied_edits_snapshot: branch_buffer.snapshot(),
1314 syntax_snapshot,
1315 }
1316 })
1317 }
1318
1319 /// Applies all of the changes in this buffer that intersect any of the
1320 /// given `ranges` to its base buffer.
1321 ///
1322 /// If `ranges` is empty, then all changes will be applied. This buffer must
1323 /// be a branch buffer to call this method.
1324 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1325 let Some(base_buffer) = self.base_buffer() else {
1326 debug_panic!("not a branch buffer");
1327 return;
1328 };
1329
1330 let mut ranges = if ranges.is_empty() {
1331 &[0..usize::MAX]
1332 } else {
1333 ranges.as_slice()
1334 }
1335 .iter()
1336 .peekable();
1337
1338 let mut edits = Vec::new();
1339 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1340 let mut is_included = false;
1341 while let Some(range) = ranges.peek() {
1342 if range.end < edit.new.start {
1343 ranges.next().unwrap();
1344 } else {
1345 if range.start <= edit.new.end {
1346 is_included = true;
1347 }
1348 break;
1349 }
1350 }
1351
1352 if is_included {
1353 edits.push((
1354 edit.old.clone(),
1355 self.text_for_range(edit.new.clone()).collect::<String>(),
1356 ));
1357 }
1358 }
1359
1360 let operation = base_buffer.update(cx, |base_buffer, cx| {
1361 // cx.emit(BufferEvent::DiffBaseChanged);
1362 base_buffer.edit(edits, None, cx)
1363 });
1364
1365 if let Some(operation) = operation
1366 && let Some(BufferBranchState {
1367 merged_operations, ..
1368 }) = &mut self.branch_state
1369 {
1370 merged_operations.push(operation);
1371 }
1372 }
1373
1374 fn on_base_buffer_event(
1375 &mut self,
1376 _: Entity<Buffer>,
1377 event: &BufferEvent,
1378 cx: &mut Context<Self>,
1379 ) {
1380 let BufferEvent::Operation { operation, .. } = event else {
1381 return;
1382 };
1383 let Some(BufferBranchState {
1384 merged_operations, ..
1385 }) = &mut self.branch_state
1386 else {
1387 return;
1388 };
1389
1390 let mut operation_to_undo = None;
1391 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1392 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1393 {
1394 merged_operations.remove(ix);
1395 operation_to_undo = Some(operation.timestamp);
1396 }
1397
1398 self.apply_ops([operation.clone()], cx);
1399
1400 if let Some(timestamp) = operation_to_undo {
1401 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1402 self.undo_operations(counts, cx);
1403 }
1404 }
1405
1406 #[cfg(test)]
1407 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1408 &self.text
1409 }
1410
1411 /// Retrieve a snapshot of the buffer's raw text, without any
1412 /// language-related state like the syntax tree or diagnostics.
1413 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1414 self.text.snapshot()
1415 }
1416
1417 /// The file associated with the buffer, if any.
1418 pub fn file(&self) -> Option<&Arc<dyn File>> {
1419 self.file.as_ref()
1420 }
1421
1422 /// The version of the buffer that was last saved or reloaded from disk.
1423 pub fn saved_version(&self) -> &clock::Global {
1424 &self.saved_version
1425 }
1426
1427 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1428 pub fn saved_mtime(&self) -> Option<MTime> {
1429 self.saved_mtime
1430 }
1431
1432 /// Returns the character encoding of the buffer's file.
1433 pub fn encoding(&self) -> &'static Encoding {
1434 self.encoding
1435 }
1436
1437 /// Sets the character encoding of the buffer.
1438 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1439 self.encoding = encoding;
1440 }
1441
1442 /// Returns whether the buffer has a Byte Order Mark.
1443 pub fn has_bom(&self) -> bool {
1444 self.has_bom
1445 }
1446
1447 /// Sets whether the buffer has a Byte Order Mark.
1448 pub fn set_has_bom(&mut self, has_bom: bool) {
1449 self.has_bom = has_bom;
1450 }
1451
1452 /// Assign a language to the buffer.
1453 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1454 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1455 }
1456
1457 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1458 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1459 self.set_language_(language, true, cx);
1460 }
1461
1462 fn set_language_(
1463 &mut self,
1464 language: Option<Arc<Language>>,
1465 may_block: bool,
1466 cx: &mut Context<Self>,
1467 ) {
1468 self.non_text_state_update_count += 1;
1469 self.syntax_map.lock().clear(&self.text);
1470 let old_language = std::mem::replace(&mut self.language, language);
1471 self.was_changed();
1472 self.reparse(cx, may_block);
1473 let has_fresh_language =
1474 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1475 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1476 }
1477
1478 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1479 /// other languages if parts of the buffer are written in different languages.
1480 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1481 self.syntax_map
1482 .lock()
1483 .set_language_registry(language_registry);
1484 }
1485
1486 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1487 self.syntax_map.lock().language_registry()
1488 }
1489
1490 /// Assign the line ending type to the buffer.
1491 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1492 self.text.set_line_ending(line_ending);
1493
1494 let lamport_timestamp = self.text.lamport_clock.tick();
1495 self.send_operation(
1496 Operation::UpdateLineEnding {
1497 line_ending,
1498 lamport_timestamp,
1499 },
1500 true,
1501 cx,
1502 );
1503 }
1504
1505 /// Assign the buffer a new [`Capability`].
1506 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1507 if self.capability != capability {
1508 self.capability = capability;
1509 cx.emit(BufferEvent::CapabilityChanged)
1510 }
1511 }
1512
1513 /// This method is called to signal that the buffer has been saved.
1514 pub fn did_save(
1515 &mut self,
1516 version: clock::Global,
1517 mtime: Option<MTime>,
1518 cx: &mut Context<Self>,
1519 ) {
1520 self.saved_version = version.clone();
1521 self.has_unsaved_edits.set((version, false));
1522 self.has_conflict = false;
1523 self.saved_mtime = mtime;
1524 self.was_changed();
1525 cx.emit(BufferEvent::Saved);
1526 cx.notify();
1527 }
1528
1529 /// Reloads the contents of the buffer from disk.
1530 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1531 let (tx, rx) = futures::channel::oneshot::channel();
1532 let prev_version = self.text.version();
1533 self.reload_task = Some(cx.spawn(async move |this, cx| {
1534 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1535 let file = this.file.as_ref()?.as_local()?;
1536 Some((
1537 file.disk_state().mtime(),
1538 file.load_bytes(cx),
1539 this.encoding,
1540 ))
1541 })?
1542 else {
1543 return Ok(());
1544 };
1545
1546 let bytes = load_bytes_task.await?;
1547 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1548 let new_text = cow.into_owned();
1549
1550 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1551 this.update(cx, |this, cx| {
1552 if this.version() == diff.base_version {
1553 this.finalize_last_transaction();
1554 this.apply_diff(diff, cx);
1555 tx.send(this.finalize_last_transaction().cloned()).ok();
1556 this.has_conflict = false;
1557 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1558 } else {
1559 if !diff.edits.is_empty()
1560 || this
1561 .edits_since::<usize>(&diff.base_version)
1562 .next()
1563 .is_some()
1564 {
1565 this.has_conflict = true;
1566 }
1567
1568 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1569 }
1570
1571 this.reload_task.take();
1572 })
1573 }));
1574 rx
1575 }
1576
1577 /// This method is called to signal that the buffer has been reloaded.
1578 pub fn did_reload(
1579 &mut self,
1580 version: clock::Global,
1581 line_ending: LineEnding,
1582 mtime: Option<MTime>,
1583 cx: &mut Context<Self>,
1584 ) {
1585 self.saved_version = version;
1586 self.has_unsaved_edits
1587 .set((self.saved_version.clone(), false));
1588 self.text.set_line_ending(line_ending);
1589 self.saved_mtime = mtime;
1590 cx.emit(BufferEvent::Reloaded);
1591 cx.notify();
1592 }
1593
1594 /// Updates the [`File`] backing this buffer. This should be called when
1595 /// the file has changed or has been deleted.
1596 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1597 let was_dirty = self.is_dirty();
1598 let mut file_changed = false;
1599
1600 if let Some(old_file) = self.file.as_ref() {
1601 if new_file.path() != old_file.path() {
1602 file_changed = true;
1603 }
1604
1605 let old_state = old_file.disk_state();
1606 let new_state = new_file.disk_state();
1607 if old_state != new_state {
1608 file_changed = true;
1609 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1610 cx.emit(BufferEvent::ReloadNeeded)
1611 }
1612 }
1613 } else {
1614 file_changed = true;
1615 };
1616
1617 self.file = Some(new_file);
1618 if file_changed {
1619 self.was_changed();
1620 self.non_text_state_update_count += 1;
1621 if was_dirty != self.is_dirty() {
1622 cx.emit(BufferEvent::DirtyChanged);
1623 }
1624 cx.emit(BufferEvent::FileHandleChanged);
1625 cx.notify();
1626 }
1627 }
1628
1629 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1630 Some(self.branch_state.as_ref()?.base_buffer.clone())
1631 }
1632
1633 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1634 pub fn language(&self) -> Option<&Arc<Language>> {
1635 self.language.as_ref()
1636 }
1637
1638 /// Returns the [`Language`] at the given location.
1639 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1640 let offset = position.to_offset(self);
1641 let mut is_first = true;
1642 let start_anchor = self.anchor_before(offset);
1643 let end_anchor = self.anchor_after(offset);
1644 self.syntax_map
1645 .lock()
1646 .layers_for_range(offset..offset, &self.text, false)
1647 .filter(|layer| {
1648 if is_first {
1649 is_first = false;
1650 return true;
1651 }
1652
1653 layer
1654 .included_sub_ranges
1655 .map(|sub_ranges| {
1656 sub_ranges.iter().any(|sub_range| {
1657 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1658 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1659 !is_before_start && !is_after_end
1660 })
1661 })
1662 .unwrap_or(true)
1663 })
1664 .last()
1665 .map(|info| info.language.clone())
1666 .or_else(|| self.language.clone())
1667 }
1668
1669 /// Returns each [`Language`] for the active syntax layers at the given location.
1670 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1671 let offset = position.to_offset(self);
1672 let mut languages: Vec<Arc<Language>> = self
1673 .syntax_map
1674 .lock()
1675 .layers_for_range(offset..offset, &self.text, false)
1676 .map(|info| info.language.clone())
1677 .collect();
1678
1679 if languages.is_empty()
1680 && let Some(buffer_language) = self.language()
1681 {
1682 languages.push(buffer_language.clone());
1683 }
1684
1685 languages
1686 }
1687
1688 /// An integer version number that accounts for all updates besides
1689 /// the buffer's text itself (which is versioned via a version vector).
1690 pub fn non_text_state_update_count(&self) -> usize {
1691 self.non_text_state_update_count
1692 }
1693
1694 /// Whether the buffer is being parsed in the background.
1695 #[cfg(any(test, feature = "test-support"))]
1696 pub fn is_parsing(&self) -> bool {
1697 self.reparse.is_some()
1698 }
1699
1700 /// Indicates whether the buffer contains any regions that may be
1701 /// written in a language that hasn't been loaded yet.
1702 pub fn contains_unknown_injections(&self) -> bool {
1703 self.syntax_map.lock().contains_unknown_injections()
1704 }
1705
1706 #[cfg(any(test, feature = "test-support"))]
1707 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1708 self.sync_parse_timeout = timeout;
1709 }
1710
1711 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1712 match Arc::get_mut(&mut self.tree_sitter_data) {
1713 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1714 None => {
1715 let tree_sitter_data = TreeSitterData::new(snapshot);
1716 self.tree_sitter_data = Arc::new(tree_sitter_data)
1717 }
1718 }
1719 }
1720
1721 /// Called after an edit to synchronize the buffer's main parse tree with
1722 /// the buffer's new underlying state.
1723 ///
1724 /// Locks the syntax map and interpolates the edits since the last reparse
1725 /// into the foreground syntax tree.
1726 ///
1727 /// Then takes a stable snapshot of the syntax map before unlocking it.
1728 /// The snapshot with the interpolated edits is sent to a background thread,
1729 /// where we ask Tree-sitter to perform an incremental parse.
1730 ///
1731 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1732 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1733 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1734 ///
1735 /// If we time out waiting on the parse, we spawn a second task waiting
1736 /// until the parse does complete and return with the interpolated tree still
1737 /// in the foreground. When the background parse completes, call back into
1738 /// the main thread and assign the foreground parse state.
1739 ///
1740 /// If the buffer or grammar changed since the start of the background parse,
1741 /// initiate an additional reparse recursively. To avoid concurrent parses
1742 /// for the same buffer, we only initiate a new parse if we are not already
1743 /// parsing in the background.
1744 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1745 if self.text.version() != *self.tree_sitter_data.version() {
1746 self.invalidate_tree_sitter_data(self.text.snapshot());
1747 }
1748 if self.reparse.is_some() {
1749 return;
1750 }
1751 let language = if let Some(language) = self.language.clone() {
1752 language
1753 } else {
1754 return;
1755 };
1756
1757 let text = self.text_snapshot();
1758 let parsed_version = self.version();
1759
1760 let mut syntax_map = self.syntax_map.lock();
1761 syntax_map.interpolate(&text);
1762 let language_registry = syntax_map.language_registry();
1763 let mut syntax_snapshot = syntax_map.snapshot();
1764 drop(syntax_map);
1765
1766 let parse_task = cx.background_spawn({
1767 let language = language.clone();
1768 let language_registry = language_registry.clone();
1769 async move {
1770 syntax_snapshot.reparse(&text, language_registry, language);
1771 syntax_snapshot
1772 }
1773 });
1774
1775 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1776 if may_block {
1777 match cx
1778 .background_executor()
1779 .block_with_timeout(self.sync_parse_timeout, parse_task)
1780 {
1781 Ok(new_syntax_snapshot) => {
1782 self.did_finish_parsing(new_syntax_snapshot, cx);
1783 self.reparse = None;
1784 }
1785 Err(parse_task) => {
1786 self.reparse = Some(cx.spawn(async move |this, cx| {
1787 let new_syntax_map = cx.background_spawn(parse_task).await;
1788 this.update(cx, move |this, cx| {
1789 let grammar_changed = || {
1790 this.language.as_ref().is_none_or(|current_language| {
1791 !Arc::ptr_eq(&language, current_language)
1792 })
1793 };
1794 let language_registry_changed = || {
1795 new_syntax_map.contains_unknown_injections()
1796 && language_registry.is_some_and(|registry| {
1797 registry.version()
1798 != new_syntax_map.language_registry_version()
1799 })
1800 };
1801 let parse_again = this.version.changed_since(&parsed_version)
1802 || language_registry_changed()
1803 || grammar_changed();
1804 this.did_finish_parsing(new_syntax_map, cx);
1805 this.reparse = None;
1806 if parse_again {
1807 this.reparse(cx, false);
1808 }
1809 })
1810 .ok();
1811 }));
1812 }
1813 }
1814 } else {
1815 self.reparse = Some(cx.spawn(async move |this, cx| {
1816 let new_syntax_map = cx.background_spawn(parse_task).await;
1817 this.update(cx, move |this, cx| {
1818 let grammar_changed = || {
1819 this.language.as_ref().is_none_or(|current_language| {
1820 !Arc::ptr_eq(&language, current_language)
1821 })
1822 };
1823 let language_registry_changed = || {
1824 new_syntax_map.contains_unknown_injections()
1825 && language_registry.is_some_and(|registry| {
1826 registry.version() != new_syntax_map.language_registry_version()
1827 })
1828 };
1829 let parse_again = this.version.changed_since(&parsed_version)
1830 || language_registry_changed()
1831 || grammar_changed();
1832 this.did_finish_parsing(new_syntax_map, cx);
1833 this.reparse = None;
1834 if parse_again {
1835 this.reparse(cx, false);
1836 }
1837 })
1838 .ok();
1839 }));
1840 }
1841 }
1842
1843 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1844 self.was_changed();
1845 self.non_text_state_update_count += 1;
1846 self.syntax_map.lock().did_parse(syntax_snapshot);
1847 self.request_autoindent(cx);
1848 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1849 self.invalidate_tree_sitter_data(self.text.snapshot());
1850 cx.emit(BufferEvent::Reparsed);
1851 cx.notify();
1852 }
1853
1854 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1855 self.parse_status.1.clone()
1856 }
1857
1858 /// Wait until the buffer is no longer parsing
1859 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1860 let mut parse_status = self.parse_status();
1861 async move {
1862 while *parse_status.borrow() != ParseStatus::Idle {
1863 if parse_status.changed().await.is_err() {
1864 break;
1865 }
1866 }
1867 }
1868 }
1869
1870 /// Assign to the buffer a set of diagnostics created by a given language server.
1871 pub fn update_diagnostics(
1872 &mut self,
1873 server_id: LanguageServerId,
1874 diagnostics: DiagnosticSet,
1875 cx: &mut Context<Self>,
1876 ) {
1877 let lamport_timestamp = self.text.lamport_clock.tick();
1878 let op = Operation::UpdateDiagnostics {
1879 server_id,
1880 diagnostics: diagnostics.iter().cloned().collect(),
1881 lamport_timestamp,
1882 };
1883
1884 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1885 self.send_operation(op, true, cx);
1886 }
1887
1888 pub fn buffer_diagnostics(
1889 &self,
1890 for_server: Option<LanguageServerId>,
1891 ) -> Vec<&DiagnosticEntry<Anchor>> {
1892 match for_server {
1893 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1894 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1895 Err(_) => Vec::new(),
1896 },
1897 None => self
1898 .diagnostics
1899 .iter()
1900 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1901 .collect(),
1902 }
1903 }
1904
1905 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1906 if let Some(indent_sizes) = self.compute_autoindents() {
1907 let indent_sizes = cx.background_spawn(indent_sizes);
1908 match cx
1909 .background_executor()
1910 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1911 {
1912 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1913 Err(indent_sizes) => {
1914 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1915 let indent_sizes = indent_sizes.await;
1916 this.update(cx, |this, cx| {
1917 this.apply_autoindents(indent_sizes, cx);
1918 })
1919 .ok();
1920 }));
1921 }
1922 }
1923 } else {
1924 self.autoindent_requests.clear();
1925 for tx in self.wait_for_autoindent_txs.drain(..) {
1926 tx.send(()).ok();
1927 }
1928 }
1929 }
1930
1931 fn compute_autoindents(
1932 &self,
1933 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1934 let max_rows_between_yields = 100;
1935 let snapshot = self.snapshot();
1936 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1937 return None;
1938 }
1939
1940 let autoindent_requests = self.autoindent_requests.clone();
1941 Some(async move {
1942 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1943 for request in autoindent_requests {
1944 // Resolve each edited range to its row in the current buffer and in the
1945 // buffer before this batch of edits.
1946 let mut row_ranges = Vec::new();
1947 let mut old_to_new_rows = BTreeMap::new();
1948 let mut language_indent_sizes_by_new_row = Vec::new();
1949 for entry in &request.entries {
1950 let position = entry.range.start;
1951 let new_row = position.to_point(&snapshot).row;
1952 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1953 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1954
1955 if let Some(old_row) = entry.old_row {
1956 old_to_new_rows.insert(old_row, new_row);
1957 }
1958 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1959 }
1960
1961 // Build a map containing the suggested indentation for each of the edited lines
1962 // with respect to the state of the buffer before these edits. This map is keyed
1963 // by the rows for these lines in the current state of the buffer.
1964 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1965 let old_edited_ranges =
1966 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1967 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1968 let mut language_indent_size = IndentSize::default();
1969 for old_edited_range in old_edited_ranges {
1970 let suggestions = request
1971 .before_edit
1972 .suggest_autoindents(old_edited_range.clone())
1973 .into_iter()
1974 .flatten();
1975 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1976 if let Some(suggestion) = suggestion {
1977 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1978
1979 // Find the indent size based on the language for this row.
1980 while let Some((row, size)) = language_indent_sizes.peek() {
1981 if *row > new_row {
1982 break;
1983 }
1984 language_indent_size = *size;
1985 language_indent_sizes.next();
1986 }
1987
1988 let suggested_indent = old_to_new_rows
1989 .get(&suggestion.basis_row)
1990 .and_then(|from_row| {
1991 Some(old_suggestions.get(from_row).copied()?.0)
1992 })
1993 .unwrap_or_else(|| {
1994 request
1995 .before_edit
1996 .indent_size_for_line(suggestion.basis_row)
1997 })
1998 .with_delta(suggestion.delta, language_indent_size);
1999 old_suggestions
2000 .insert(new_row, (suggested_indent, suggestion.within_error));
2001 }
2002 }
2003 yield_now().await;
2004 }
2005
2006 // Compute new suggestions for each line, but only include them in the result
2007 // if they differ from the old suggestion for that line.
2008 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
2009 let mut language_indent_size = IndentSize::default();
2010 for (row_range, original_indent_column) in row_ranges {
2011 let new_edited_row_range = if request.is_block_mode {
2012 row_range.start..row_range.start + 1
2013 } else {
2014 row_range.clone()
2015 };
2016
2017 let suggestions = snapshot
2018 .suggest_autoindents(new_edited_row_range.clone())
2019 .into_iter()
2020 .flatten();
2021 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2022 if let Some(suggestion) = suggestion {
2023 // Find the indent size based on the language for this row.
2024 while let Some((row, size)) = language_indent_sizes.peek() {
2025 if *row > new_row {
2026 break;
2027 }
2028 language_indent_size = *size;
2029 language_indent_sizes.next();
2030 }
2031
2032 let suggested_indent = indent_sizes
2033 .get(&suggestion.basis_row)
2034 .copied()
2035 .map(|e| e.0)
2036 .unwrap_or_else(|| {
2037 snapshot.indent_size_for_line(suggestion.basis_row)
2038 })
2039 .with_delta(suggestion.delta, language_indent_size);
2040
2041 if old_suggestions.get(&new_row).is_none_or(
2042 |(old_indentation, was_within_error)| {
2043 suggested_indent != *old_indentation
2044 && (!suggestion.within_error || *was_within_error)
2045 },
2046 ) {
2047 indent_sizes.insert(
2048 new_row,
2049 (suggested_indent, request.ignore_empty_lines),
2050 );
2051 }
2052 }
2053 }
2054
2055 if let (true, Some(original_indent_column)) =
2056 (request.is_block_mode, original_indent_column)
2057 {
2058 let new_indent =
2059 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2060 *indent
2061 } else {
2062 snapshot.indent_size_for_line(row_range.start)
2063 };
2064 let delta = new_indent.len as i64 - original_indent_column as i64;
2065 if delta != 0 {
2066 for row in row_range.skip(1) {
2067 indent_sizes.entry(row).or_insert_with(|| {
2068 let mut size = snapshot.indent_size_for_line(row);
2069 if size.kind == new_indent.kind {
2070 match delta.cmp(&0) {
2071 Ordering::Greater => size.len += delta as u32,
2072 Ordering::Less => {
2073 size.len = size.len.saturating_sub(-delta as u32)
2074 }
2075 Ordering::Equal => {}
2076 }
2077 }
2078 (size, request.ignore_empty_lines)
2079 });
2080 }
2081 }
2082 }
2083
2084 yield_now().await;
2085 }
2086 }
2087
2088 indent_sizes
2089 .into_iter()
2090 .filter_map(|(row, (indent, ignore_empty_lines))| {
2091 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2092 None
2093 } else {
2094 Some((row, indent))
2095 }
2096 })
2097 .collect()
2098 })
2099 }
2100
2101 fn apply_autoindents(
2102 &mut self,
2103 indent_sizes: BTreeMap<u32, IndentSize>,
2104 cx: &mut Context<Self>,
2105 ) {
2106 self.autoindent_requests.clear();
2107 for tx in self.wait_for_autoindent_txs.drain(..) {
2108 tx.send(()).ok();
2109 }
2110
2111 let edits: Vec<_> = indent_sizes
2112 .into_iter()
2113 .filter_map(|(row, indent_size)| {
2114 let current_size = indent_size_for_line(self, row);
2115 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2116 })
2117 .collect();
2118
2119 let preserve_preview = self.preserve_preview();
2120 self.edit(edits, None, cx);
2121 if preserve_preview {
2122 self.refresh_preview();
2123 }
2124 }
2125
2126 /// Create a minimal edit that will cause the given row to be indented
2127 /// with the given size. After applying this edit, the length of the line
2128 /// will always be at least `new_size.len`.
2129 pub fn edit_for_indent_size_adjustment(
2130 row: u32,
2131 current_size: IndentSize,
2132 new_size: IndentSize,
2133 ) -> Option<(Range<Point>, String)> {
2134 if new_size.kind == current_size.kind {
2135 match new_size.len.cmp(¤t_size.len) {
2136 Ordering::Greater => {
2137 let point = Point::new(row, 0);
2138 Some((
2139 point..point,
2140 iter::repeat(new_size.char())
2141 .take((new_size.len - current_size.len) as usize)
2142 .collect::<String>(),
2143 ))
2144 }
2145
2146 Ordering::Less => Some((
2147 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2148 String::new(),
2149 )),
2150
2151 Ordering::Equal => None,
2152 }
2153 } else {
2154 Some((
2155 Point::new(row, 0)..Point::new(row, current_size.len),
2156 iter::repeat(new_size.char())
2157 .take(new_size.len as usize)
2158 .collect::<String>(),
2159 ))
2160 }
2161 }
2162
2163 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2164 /// and the given new text.
2165 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2166 let old_text = self.as_rope().clone();
2167 let base_version = self.version();
2168 cx.background_executor()
2169 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2170 let old_text = old_text.to_string();
2171 let line_ending = LineEnding::detect(&new_text);
2172 LineEnding::normalize(&mut new_text);
2173 let edits = text_diff(&old_text, &new_text);
2174 Diff {
2175 base_version,
2176 line_ending,
2177 edits,
2178 }
2179 })
2180 }
2181
2182 /// Spawns a background task that searches the buffer for any whitespace
2183 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2184 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2185 let old_text = self.as_rope().clone();
2186 let line_ending = self.line_ending();
2187 let base_version = self.version();
2188 cx.background_spawn(async move {
2189 let ranges = trailing_whitespace_ranges(&old_text);
2190 let empty = Arc::<str>::from("");
2191 Diff {
2192 base_version,
2193 line_ending,
2194 edits: ranges
2195 .into_iter()
2196 .map(|range| (range, empty.clone()))
2197 .collect(),
2198 }
2199 })
2200 }
2201
2202 /// Ensures that the buffer ends with a single newline character, and
2203 /// no other whitespace. Skips if the buffer is empty.
2204 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2205 let len = self.len();
2206 if len == 0 {
2207 return;
2208 }
2209 let mut offset = len;
2210 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2211 let non_whitespace_len = chunk
2212 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2213 .len();
2214 offset -= chunk.len();
2215 offset += non_whitespace_len;
2216 if non_whitespace_len != 0 {
2217 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2218 return;
2219 }
2220 break;
2221 }
2222 }
2223 self.edit([(offset..len, "\n")], None, cx);
2224 }
2225
2226 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2227 /// calculated, then adjust the diff to account for those changes, and discard any
2228 /// parts of the diff that conflict with those changes.
2229 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2230 let snapshot = self.snapshot();
2231 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2232 let mut delta = 0;
2233 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2234 while let Some(edit_since) = edits_since.peek() {
2235 // If the edit occurs after a diff hunk, then it does not
2236 // affect that hunk.
2237 if edit_since.old.start > range.end {
2238 break;
2239 }
2240 // If the edit precedes the diff hunk, then adjust the hunk
2241 // to reflect the edit.
2242 else if edit_since.old.end < range.start {
2243 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2244 edits_since.next();
2245 }
2246 // If the edit intersects a diff hunk, then discard that hunk.
2247 else {
2248 return None;
2249 }
2250 }
2251
2252 let start = (range.start as i64 + delta) as usize;
2253 let end = (range.end as i64 + delta) as usize;
2254 Some((start..end, new_text))
2255 });
2256
2257 self.start_transaction();
2258 self.text.set_line_ending(diff.line_ending);
2259 self.edit(adjusted_edits, None, cx);
2260 self.end_transaction(cx)
2261 }
2262
2263 pub fn has_unsaved_edits(&self) -> bool {
2264 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2265
2266 if last_version == self.version {
2267 self.has_unsaved_edits
2268 .set((last_version, has_unsaved_edits));
2269 return has_unsaved_edits;
2270 }
2271
2272 let has_edits = self.has_edits_since(&self.saved_version);
2273 self.has_unsaved_edits
2274 .set((self.version.clone(), has_edits));
2275 has_edits
2276 }
2277
2278 /// Checks if the buffer has unsaved changes.
2279 pub fn is_dirty(&self) -> bool {
2280 if self.capability == Capability::ReadOnly {
2281 return false;
2282 }
2283 if self.has_conflict {
2284 return true;
2285 }
2286 match self.file.as_ref().map(|f| f.disk_state()) {
2287 Some(DiskState::New) | Some(DiskState::Deleted) => {
2288 !self.is_empty() && self.has_unsaved_edits()
2289 }
2290 _ => self.has_unsaved_edits(),
2291 }
2292 }
2293
2294 /// Marks the buffer as having a conflict regardless of current buffer state.
2295 pub fn set_conflict(&mut self) {
2296 self.has_conflict = true;
2297 }
2298
2299 /// Checks if the buffer and its file have both changed since the buffer
2300 /// was last saved or reloaded.
2301 pub fn has_conflict(&self) -> bool {
2302 if self.has_conflict {
2303 return true;
2304 }
2305 let Some(file) = self.file.as_ref() else {
2306 return false;
2307 };
2308 match file.disk_state() {
2309 DiskState::New => false,
2310 DiskState::Present { mtime } => match self.saved_mtime {
2311 Some(saved_mtime) => {
2312 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2313 }
2314 None => true,
2315 },
2316 DiskState::Deleted => false,
2317 DiskState::Historic { .. } => false,
2318 }
2319 }
2320
2321 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2322 pub fn subscribe(&mut self) -> Subscription<usize> {
2323 self.text.subscribe()
2324 }
2325
2326 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2327 ///
2328 /// This allows downstream code to check if the buffer's text has changed without
2329 /// waiting for an effect cycle, which would be required if using eents.
2330 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2331 if let Err(ix) = self
2332 .change_bits
2333 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2334 {
2335 self.change_bits.insert(ix, bit);
2336 }
2337 }
2338
2339 /// Set the change bit for all "listeners".
2340 fn was_changed(&mut self) {
2341 self.change_bits.retain(|change_bit| {
2342 change_bit
2343 .upgrade()
2344 .inspect(|bit| {
2345 _ = bit.replace(true);
2346 })
2347 .is_some()
2348 });
2349 }
2350
2351 /// Starts a transaction, if one is not already in-progress. When undoing or
2352 /// redoing edits, all of the edits performed within a transaction are undone
2353 /// or redone together.
2354 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2355 self.start_transaction_at(Instant::now())
2356 }
2357
2358 /// Starts a transaction, providing the current time. Subsequent transactions
2359 /// that occur within a short period of time will be grouped together. This
2360 /// is controlled by the buffer's undo grouping duration.
2361 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2362 self.transaction_depth += 1;
2363 if self.was_dirty_before_starting_transaction.is_none() {
2364 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2365 }
2366 self.text.start_transaction_at(now)
2367 }
2368
2369 /// Terminates the current transaction, if this is the outermost transaction.
2370 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2371 self.end_transaction_at(Instant::now(), cx)
2372 }
2373
2374 /// Terminates the current transaction, providing the current time. Subsequent transactions
2375 /// that occur within a short period of time will be grouped together. This
2376 /// is controlled by the buffer's undo grouping duration.
2377 pub fn end_transaction_at(
2378 &mut self,
2379 now: Instant,
2380 cx: &mut Context<Self>,
2381 ) -> Option<TransactionId> {
2382 assert!(self.transaction_depth > 0);
2383 self.transaction_depth -= 1;
2384 let was_dirty = if self.transaction_depth == 0 {
2385 self.was_dirty_before_starting_transaction.take().unwrap()
2386 } else {
2387 false
2388 };
2389 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2390 self.did_edit(&start_version, was_dirty, cx);
2391 Some(transaction_id)
2392 } else {
2393 None
2394 }
2395 }
2396
2397 /// Manually add a transaction to the buffer's undo history.
2398 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2399 self.text.push_transaction(transaction, now);
2400 }
2401
2402 /// Differs from `push_transaction` in that it does not clear the redo
2403 /// stack. Intended to be used to create a parent transaction to merge
2404 /// potential child transactions into.
2405 ///
2406 /// The caller is responsible for removing it from the undo history using
2407 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2408 /// are merged into this transaction, the caller is responsible for ensuring
2409 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2410 /// cleared is to create transactions with the usual `start_transaction` and
2411 /// `end_transaction` methods and merging the resulting transactions into
2412 /// the transaction created by this method
2413 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2414 self.text.push_empty_transaction(now)
2415 }
2416
2417 /// Prevent the last transaction from being grouped with any subsequent transactions,
2418 /// even if they occur with the buffer's undo grouping duration.
2419 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2420 self.text.finalize_last_transaction()
2421 }
2422
2423 /// Manually group all changes since a given transaction.
2424 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2425 self.text.group_until_transaction(transaction_id);
2426 }
2427
2428 /// Manually remove a transaction from the buffer's undo history
2429 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2430 self.text.forget_transaction(transaction_id)
2431 }
2432
2433 /// Retrieve a transaction from the buffer's undo history
2434 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2435 self.text.get_transaction(transaction_id)
2436 }
2437
2438 /// Manually merge two transactions in the buffer's undo history.
2439 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2440 self.text.merge_transactions(transaction, destination);
2441 }
2442
2443 /// Waits for the buffer to receive operations with the given timestamps.
2444 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2445 &mut self,
2446 edit_ids: It,
2447 ) -> impl Future<Output = Result<()>> + use<It> {
2448 self.text.wait_for_edits(edit_ids)
2449 }
2450
2451 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2452 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2453 &mut self,
2454 anchors: It,
2455 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2456 self.text.wait_for_anchors(anchors)
2457 }
2458
2459 /// Waits for the buffer to receive operations up to the given version.
2460 pub fn wait_for_version(
2461 &mut self,
2462 version: clock::Global,
2463 ) -> impl Future<Output = Result<()>> + use<> {
2464 self.text.wait_for_version(version)
2465 }
2466
2467 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2468 /// [`Buffer::wait_for_version`] to resolve with an error.
2469 pub fn give_up_waiting(&mut self) {
2470 self.text.give_up_waiting();
2471 }
2472
2473 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2474 let mut rx = None;
2475 if !self.autoindent_requests.is_empty() {
2476 let channel = oneshot::channel();
2477 self.wait_for_autoindent_txs.push(channel.0);
2478 rx = Some(channel.1);
2479 }
2480 rx
2481 }
2482
2483 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2484 pub fn set_active_selections(
2485 &mut self,
2486 selections: Arc<[Selection<Anchor>]>,
2487 line_mode: bool,
2488 cursor_shape: CursorShape,
2489 cx: &mut Context<Self>,
2490 ) {
2491 let lamport_timestamp = self.text.lamport_clock.tick();
2492 self.remote_selections.insert(
2493 self.text.replica_id(),
2494 SelectionSet {
2495 selections: selections.clone(),
2496 lamport_timestamp,
2497 line_mode,
2498 cursor_shape,
2499 },
2500 );
2501 self.send_operation(
2502 Operation::UpdateSelections {
2503 selections,
2504 line_mode,
2505 lamport_timestamp,
2506 cursor_shape,
2507 },
2508 true,
2509 cx,
2510 );
2511 self.non_text_state_update_count += 1;
2512 cx.notify();
2513 }
2514
2515 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2516 /// this replica.
2517 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2518 if self
2519 .remote_selections
2520 .get(&self.text.replica_id())
2521 .is_none_or(|set| !set.selections.is_empty())
2522 {
2523 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2524 }
2525 }
2526
2527 pub fn set_agent_selections(
2528 &mut self,
2529 selections: Arc<[Selection<Anchor>]>,
2530 line_mode: bool,
2531 cursor_shape: CursorShape,
2532 cx: &mut Context<Self>,
2533 ) {
2534 let lamport_timestamp = self.text.lamport_clock.tick();
2535 self.remote_selections.insert(
2536 ReplicaId::AGENT,
2537 SelectionSet {
2538 selections,
2539 lamport_timestamp,
2540 line_mode,
2541 cursor_shape,
2542 },
2543 );
2544 self.non_text_state_update_count += 1;
2545 cx.notify();
2546 }
2547
2548 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2549 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2550 }
2551
2552 /// Replaces the buffer's entire text.
2553 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2554 where
2555 T: Into<Arc<str>>,
2556 {
2557 self.autoindent_requests.clear();
2558 self.edit([(0..self.len(), text)], None, cx)
2559 }
2560
2561 /// Appends the given text to the end of the buffer.
2562 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2563 where
2564 T: Into<Arc<str>>,
2565 {
2566 self.edit([(self.len()..self.len(), text)], None, cx)
2567 }
2568
2569 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2570 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2571 ///
2572 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2573 /// request for the edited ranges, which will be processed when the buffer finishes
2574 /// parsing.
2575 ///
2576 /// Parsing takes place at the end of a transaction, and may compute synchronously
2577 /// or asynchronously, depending on the changes.
2578 pub fn edit<I, S, T>(
2579 &mut self,
2580 edits_iter: I,
2581 autoindent_mode: Option<AutoindentMode>,
2582 cx: &mut Context<Self>,
2583 ) -> Option<clock::Lamport>
2584 where
2585 I: IntoIterator<Item = (Range<S>, T)>,
2586 S: ToOffset,
2587 T: Into<Arc<str>>,
2588 {
2589 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2590 }
2591
2592 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2593 pub fn edit_non_coalesce<I, S, T>(
2594 &mut self,
2595 edits_iter: I,
2596 autoindent_mode: Option<AutoindentMode>,
2597 cx: &mut Context<Self>,
2598 ) -> Option<clock::Lamport>
2599 where
2600 I: IntoIterator<Item = (Range<S>, T)>,
2601 S: ToOffset,
2602 T: Into<Arc<str>>,
2603 {
2604 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2605 }
2606
2607 fn edit_internal<I, S, T>(
2608 &mut self,
2609 edits_iter: I,
2610 autoindent_mode: Option<AutoindentMode>,
2611 coalesce_adjacent: bool,
2612 cx: &mut Context<Self>,
2613 ) -> Option<clock::Lamport>
2614 where
2615 I: IntoIterator<Item = (Range<S>, T)>,
2616 S: ToOffset,
2617 T: Into<Arc<str>>,
2618 {
2619 // Skip invalid edits and coalesce contiguous ones.
2620 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2621
2622 for (range, new_text) in edits_iter {
2623 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2624
2625 if range.start > range.end {
2626 mem::swap(&mut range.start, &mut range.end);
2627 }
2628 let new_text = new_text.into();
2629 if !new_text.is_empty() || !range.is_empty() {
2630 let prev_edit = edits.last_mut();
2631 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2632 if coalesce_adjacent {
2633 prev_range.end >= range.start
2634 } else {
2635 prev_range.end > range.start
2636 }
2637 });
2638
2639 if let Some((prev_range, prev_text)) = prev_edit
2640 && should_coalesce
2641 {
2642 prev_range.end = cmp::max(prev_range.end, range.end);
2643 *prev_text = format!("{prev_text}{new_text}").into();
2644 } else {
2645 edits.push((range, new_text));
2646 }
2647 }
2648 }
2649 if edits.is_empty() {
2650 return None;
2651 }
2652
2653 self.start_transaction();
2654 self.pending_autoindent.take();
2655 let autoindent_request = autoindent_mode
2656 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2657
2658 let edit_operation = self.text.edit(edits.iter().cloned());
2659 let edit_id = edit_operation.timestamp();
2660
2661 if let Some((before_edit, mode)) = autoindent_request {
2662 let mut delta = 0isize;
2663 let mut previous_setting = None;
2664 let entries: Vec<_> = edits
2665 .into_iter()
2666 .enumerate()
2667 .zip(&edit_operation.as_edit().unwrap().new_text)
2668 .filter(|((_, (range, _)), _)| {
2669 let language = before_edit.language_at(range.start);
2670 let language_id = language.map(|l| l.id());
2671 if let Some((cached_language_id, auto_indent)) = previous_setting
2672 && cached_language_id == language_id
2673 {
2674 auto_indent
2675 } else {
2676 // The auto-indent setting is not present in editorconfigs, hence
2677 // we can avoid passing the file here.
2678 let auto_indent =
2679 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2680 previous_setting = Some((language_id, auto_indent));
2681 auto_indent
2682 }
2683 })
2684 .map(|((ix, (range, _)), new_text)| {
2685 let new_text_length = new_text.len();
2686 let old_start = range.start.to_point(&before_edit);
2687 let new_start = (delta + range.start as isize) as usize;
2688 let range_len = range.end - range.start;
2689 delta += new_text_length as isize - range_len as isize;
2690
2691 // Decide what range of the insertion to auto-indent, and whether
2692 // the first line of the insertion should be considered a newly-inserted line
2693 // or an edit to an existing line.
2694 let mut range_of_insertion_to_indent = 0..new_text_length;
2695 let mut first_line_is_new = true;
2696
2697 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2698 let old_line_end = before_edit.line_len(old_start.row);
2699
2700 if old_start.column > old_line_start {
2701 first_line_is_new = false;
2702 }
2703
2704 if !new_text.contains('\n')
2705 && (old_start.column + (range_len as u32) < old_line_end
2706 || old_line_end == old_line_start)
2707 {
2708 first_line_is_new = false;
2709 }
2710
2711 // When inserting text starting with a newline, avoid auto-indenting the
2712 // previous line.
2713 if new_text.starts_with('\n') {
2714 range_of_insertion_to_indent.start += 1;
2715 first_line_is_new = true;
2716 }
2717
2718 let mut original_indent_column = None;
2719 if let AutoindentMode::Block {
2720 original_indent_columns,
2721 } = &mode
2722 {
2723 original_indent_column = Some(if new_text.starts_with('\n') {
2724 indent_size_for_text(
2725 new_text[range_of_insertion_to_indent.clone()].chars(),
2726 )
2727 .len
2728 } else {
2729 original_indent_columns
2730 .get(ix)
2731 .copied()
2732 .flatten()
2733 .unwrap_or_else(|| {
2734 indent_size_for_text(
2735 new_text[range_of_insertion_to_indent.clone()].chars(),
2736 )
2737 .len
2738 })
2739 });
2740
2741 // Avoid auto-indenting the line after the edit.
2742 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2743 range_of_insertion_to_indent.end -= 1;
2744 }
2745 }
2746
2747 AutoindentRequestEntry {
2748 original_indent_column,
2749 old_row: if first_line_is_new {
2750 None
2751 } else {
2752 Some(old_start.row)
2753 },
2754 indent_size: before_edit.language_indent_size_at(range.start, cx),
2755 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2756 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2757 }
2758 })
2759 .collect();
2760
2761 if !entries.is_empty() {
2762 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2763 before_edit,
2764 entries,
2765 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2766 ignore_empty_lines: false,
2767 }));
2768 }
2769 }
2770
2771 self.end_transaction(cx);
2772 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2773 Some(edit_id)
2774 }
2775
2776 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2777 self.was_changed();
2778
2779 if self.edits_since::<usize>(old_version).next().is_none() {
2780 return;
2781 }
2782
2783 self.reparse(cx, true);
2784 cx.emit(BufferEvent::Edited);
2785 if was_dirty != self.is_dirty() {
2786 cx.emit(BufferEvent::DirtyChanged);
2787 }
2788 cx.notify();
2789 }
2790
2791 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2792 where
2793 I: IntoIterator<Item = Range<T>>,
2794 T: ToOffset + Copy,
2795 {
2796 let before_edit = self.snapshot();
2797 let entries = ranges
2798 .into_iter()
2799 .map(|range| AutoindentRequestEntry {
2800 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2801 old_row: None,
2802 indent_size: before_edit.language_indent_size_at(range.start, cx),
2803 original_indent_column: None,
2804 })
2805 .collect();
2806 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2807 before_edit,
2808 entries,
2809 is_block_mode: false,
2810 ignore_empty_lines: true,
2811 }));
2812 self.request_autoindent(cx);
2813 }
2814
2815 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2816 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2817 pub fn insert_empty_line(
2818 &mut self,
2819 position: impl ToPoint,
2820 space_above: bool,
2821 space_below: bool,
2822 cx: &mut Context<Self>,
2823 ) -> Point {
2824 let mut position = position.to_point(self);
2825
2826 self.start_transaction();
2827
2828 self.edit(
2829 [(position..position, "\n")],
2830 Some(AutoindentMode::EachLine),
2831 cx,
2832 );
2833
2834 if position.column > 0 {
2835 position += Point::new(1, 0);
2836 }
2837
2838 if !self.is_line_blank(position.row) {
2839 self.edit(
2840 [(position..position, "\n")],
2841 Some(AutoindentMode::EachLine),
2842 cx,
2843 );
2844 }
2845
2846 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2847 self.edit(
2848 [(position..position, "\n")],
2849 Some(AutoindentMode::EachLine),
2850 cx,
2851 );
2852 position.row += 1;
2853 }
2854
2855 if space_below
2856 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2857 {
2858 self.edit(
2859 [(position..position, "\n")],
2860 Some(AutoindentMode::EachLine),
2861 cx,
2862 );
2863 }
2864
2865 self.end_transaction(cx);
2866
2867 position
2868 }
2869
2870 /// Applies the given remote operations to the buffer.
2871 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2872 self.pending_autoindent.take();
2873 let was_dirty = self.is_dirty();
2874 let old_version = self.version.clone();
2875 let mut deferred_ops = Vec::new();
2876 let buffer_ops = ops
2877 .into_iter()
2878 .filter_map(|op| match op {
2879 Operation::Buffer(op) => Some(op),
2880 _ => {
2881 if self.can_apply_op(&op) {
2882 self.apply_op(op, cx);
2883 } else {
2884 deferred_ops.push(op);
2885 }
2886 None
2887 }
2888 })
2889 .collect::<Vec<_>>();
2890 for operation in buffer_ops.iter() {
2891 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2892 }
2893 self.text.apply_ops(buffer_ops);
2894 self.deferred_ops.insert(deferred_ops);
2895 self.flush_deferred_ops(cx);
2896 self.did_edit(&old_version, was_dirty, cx);
2897 // Notify independently of whether the buffer was edited as the operations could include a
2898 // selection update.
2899 cx.notify();
2900 }
2901
2902 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2903 let mut deferred_ops = Vec::new();
2904 for op in self.deferred_ops.drain().iter().cloned() {
2905 if self.can_apply_op(&op) {
2906 self.apply_op(op, cx);
2907 } else {
2908 deferred_ops.push(op);
2909 }
2910 }
2911 self.deferred_ops.insert(deferred_ops);
2912 }
2913
2914 pub fn has_deferred_ops(&self) -> bool {
2915 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2916 }
2917
2918 fn can_apply_op(&self, operation: &Operation) -> bool {
2919 match operation {
2920 Operation::Buffer(_) => {
2921 unreachable!("buffer operations should never be applied at this layer")
2922 }
2923 Operation::UpdateDiagnostics {
2924 diagnostics: diagnostic_set,
2925 ..
2926 } => diagnostic_set.iter().all(|diagnostic| {
2927 self.text.can_resolve(&diagnostic.range.start)
2928 && self.text.can_resolve(&diagnostic.range.end)
2929 }),
2930 Operation::UpdateSelections { selections, .. } => selections
2931 .iter()
2932 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2933 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2934 }
2935 }
2936
2937 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2938 match operation {
2939 Operation::Buffer(_) => {
2940 unreachable!("buffer operations should never be applied at this layer")
2941 }
2942 Operation::UpdateDiagnostics {
2943 server_id,
2944 diagnostics: diagnostic_set,
2945 lamport_timestamp,
2946 } => {
2947 let snapshot = self.snapshot();
2948 self.apply_diagnostic_update(
2949 server_id,
2950 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2951 lamport_timestamp,
2952 cx,
2953 );
2954 }
2955 Operation::UpdateSelections {
2956 selections,
2957 lamport_timestamp,
2958 line_mode,
2959 cursor_shape,
2960 } => {
2961 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2962 && set.lamport_timestamp > lamport_timestamp
2963 {
2964 return;
2965 }
2966
2967 self.remote_selections.insert(
2968 lamport_timestamp.replica_id,
2969 SelectionSet {
2970 selections,
2971 lamport_timestamp,
2972 line_mode,
2973 cursor_shape,
2974 },
2975 );
2976 self.text.lamport_clock.observe(lamport_timestamp);
2977 self.non_text_state_update_count += 1;
2978 }
2979 Operation::UpdateCompletionTriggers {
2980 triggers,
2981 lamport_timestamp,
2982 server_id,
2983 } => {
2984 if triggers.is_empty() {
2985 self.completion_triggers_per_language_server
2986 .remove(&server_id);
2987 self.completion_triggers = self
2988 .completion_triggers_per_language_server
2989 .values()
2990 .flat_map(|triggers| triggers.iter().cloned())
2991 .collect();
2992 } else {
2993 self.completion_triggers_per_language_server
2994 .insert(server_id, triggers.iter().cloned().collect());
2995 self.completion_triggers.extend(triggers);
2996 }
2997 self.text.lamport_clock.observe(lamport_timestamp);
2998 }
2999 Operation::UpdateLineEnding {
3000 line_ending,
3001 lamport_timestamp,
3002 } => {
3003 self.text.set_line_ending(line_ending);
3004 self.text.lamport_clock.observe(lamport_timestamp);
3005 }
3006 }
3007 }
3008
3009 fn apply_diagnostic_update(
3010 &mut self,
3011 server_id: LanguageServerId,
3012 diagnostics: DiagnosticSet,
3013 lamport_timestamp: clock::Lamport,
3014 cx: &mut Context<Self>,
3015 ) {
3016 if lamport_timestamp > self.diagnostics_timestamp {
3017 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3018 if diagnostics.is_empty() {
3019 if let Ok(ix) = ix {
3020 self.diagnostics.remove(ix);
3021 }
3022 } else {
3023 match ix {
3024 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3025 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3026 };
3027 }
3028 self.diagnostics_timestamp = lamport_timestamp;
3029 self.non_text_state_update_count += 1;
3030 self.text.lamport_clock.observe(lamport_timestamp);
3031 cx.notify();
3032 cx.emit(BufferEvent::DiagnosticsUpdated);
3033 }
3034 }
3035
3036 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3037 self.was_changed();
3038 cx.emit(BufferEvent::Operation {
3039 operation,
3040 is_local,
3041 });
3042 }
3043
3044 /// Removes the selections for a given peer.
3045 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3046 self.remote_selections.remove(&replica_id);
3047 cx.notify();
3048 }
3049
3050 /// Undoes the most recent transaction.
3051 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3052 let was_dirty = self.is_dirty();
3053 let old_version = self.version.clone();
3054
3055 if let Some((transaction_id, operation)) = self.text.undo() {
3056 self.send_operation(Operation::Buffer(operation), true, cx);
3057 self.did_edit(&old_version, was_dirty, cx);
3058 Some(transaction_id)
3059 } else {
3060 None
3061 }
3062 }
3063
3064 /// Manually undoes a specific transaction in the buffer's undo history.
3065 pub fn undo_transaction(
3066 &mut self,
3067 transaction_id: TransactionId,
3068 cx: &mut Context<Self>,
3069 ) -> bool {
3070 let was_dirty = self.is_dirty();
3071 let old_version = self.version.clone();
3072 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3073 self.send_operation(Operation::Buffer(operation), true, cx);
3074 self.did_edit(&old_version, was_dirty, cx);
3075 true
3076 } else {
3077 false
3078 }
3079 }
3080
3081 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3082 pub fn undo_to_transaction(
3083 &mut self,
3084 transaction_id: TransactionId,
3085 cx: &mut Context<Self>,
3086 ) -> bool {
3087 let was_dirty = self.is_dirty();
3088 let old_version = self.version.clone();
3089
3090 let operations = self.text.undo_to_transaction(transaction_id);
3091 let undone = !operations.is_empty();
3092 for operation in operations {
3093 self.send_operation(Operation::Buffer(operation), true, cx);
3094 }
3095 if undone {
3096 self.did_edit(&old_version, was_dirty, cx)
3097 }
3098 undone
3099 }
3100
3101 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3102 let was_dirty = self.is_dirty();
3103 let operation = self.text.undo_operations(counts);
3104 let old_version = self.version.clone();
3105 self.send_operation(Operation::Buffer(operation), true, cx);
3106 self.did_edit(&old_version, was_dirty, cx);
3107 }
3108
3109 /// Manually redoes a specific transaction in the buffer's redo history.
3110 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3111 let was_dirty = self.is_dirty();
3112 let old_version = self.version.clone();
3113
3114 if let Some((transaction_id, operation)) = self.text.redo() {
3115 self.send_operation(Operation::Buffer(operation), true, cx);
3116 self.did_edit(&old_version, was_dirty, cx);
3117 Some(transaction_id)
3118 } else {
3119 None
3120 }
3121 }
3122
3123 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3124 pub fn redo_to_transaction(
3125 &mut self,
3126 transaction_id: TransactionId,
3127 cx: &mut Context<Self>,
3128 ) -> bool {
3129 let was_dirty = self.is_dirty();
3130 let old_version = self.version.clone();
3131
3132 let operations = self.text.redo_to_transaction(transaction_id);
3133 let redone = !operations.is_empty();
3134 for operation in operations {
3135 self.send_operation(Operation::Buffer(operation), true, cx);
3136 }
3137 if redone {
3138 self.did_edit(&old_version, was_dirty, cx)
3139 }
3140 redone
3141 }
3142
3143 /// Override current completion triggers with the user-provided completion triggers.
3144 pub fn set_completion_triggers(
3145 &mut self,
3146 server_id: LanguageServerId,
3147 triggers: BTreeSet<String>,
3148 cx: &mut Context<Self>,
3149 ) {
3150 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3151 if triggers.is_empty() {
3152 self.completion_triggers_per_language_server
3153 .remove(&server_id);
3154 self.completion_triggers = self
3155 .completion_triggers_per_language_server
3156 .values()
3157 .flat_map(|triggers| triggers.iter().cloned())
3158 .collect();
3159 } else {
3160 self.completion_triggers_per_language_server
3161 .insert(server_id, triggers.clone());
3162 self.completion_triggers.extend(triggers.iter().cloned());
3163 }
3164 self.send_operation(
3165 Operation::UpdateCompletionTriggers {
3166 triggers: triggers.into_iter().collect(),
3167 lamport_timestamp: self.completion_triggers_timestamp,
3168 server_id,
3169 },
3170 true,
3171 cx,
3172 );
3173 cx.notify();
3174 }
3175
3176 /// Returns a list of strings which trigger a completion menu for this language.
3177 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3178 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3179 &self.completion_triggers
3180 }
3181
3182 /// Call this directly after performing edits to prevent the preview tab
3183 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3184 /// to return false until there are additional edits.
3185 pub fn refresh_preview(&mut self) {
3186 self.preview_version = self.version.clone();
3187 }
3188
3189 /// Whether we should preserve the preview status of a tab containing this buffer.
3190 pub fn preserve_preview(&self) -> bool {
3191 !self.has_edits_since(&self.preview_version)
3192 }
3193}
3194
3195#[doc(hidden)]
3196#[cfg(any(test, feature = "test-support"))]
3197impl Buffer {
3198 pub fn edit_via_marked_text(
3199 &mut self,
3200 marked_string: &str,
3201 autoindent_mode: Option<AutoindentMode>,
3202 cx: &mut Context<Self>,
3203 ) {
3204 let edits = self.edits_for_marked_text(marked_string);
3205 self.edit(edits, autoindent_mode, cx);
3206 }
3207
3208 pub fn set_group_interval(&mut self, group_interval: Duration) {
3209 self.text.set_group_interval(group_interval);
3210 }
3211
3212 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3213 where
3214 T: rand::Rng,
3215 {
3216 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3217 let mut last_end = None;
3218 for _ in 0..old_range_count {
3219 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3220 break;
3221 }
3222
3223 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3224 let mut range = self.random_byte_range(new_start, rng);
3225 if rng.random_bool(0.2) {
3226 mem::swap(&mut range.start, &mut range.end);
3227 }
3228 last_end = Some(range.end);
3229
3230 let new_text_len = rng.random_range(0..10);
3231 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3232 new_text = new_text.to_uppercase();
3233
3234 edits.push((range, new_text));
3235 }
3236 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3237 self.edit(edits, None, cx);
3238 }
3239
3240 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3241 let was_dirty = self.is_dirty();
3242 let old_version = self.version.clone();
3243
3244 let ops = self.text.randomly_undo_redo(rng);
3245 if !ops.is_empty() {
3246 for op in ops {
3247 self.send_operation(Operation::Buffer(op), true, cx);
3248 self.did_edit(&old_version, was_dirty, cx);
3249 }
3250 }
3251 }
3252}
3253
3254impl EventEmitter<BufferEvent> for Buffer {}
3255
3256impl Deref for Buffer {
3257 type Target = TextBuffer;
3258
3259 fn deref(&self) -> &Self::Target {
3260 &self.text
3261 }
3262}
3263
3264impl BufferSnapshot {
3265 /// Returns [`IndentSize`] for a given line that respects user settings and
3266 /// language preferences.
3267 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3268 indent_size_for_line(self, row)
3269 }
3270
3271 /// Returns [`IndentSize`] for a given position that respects user settings
3272 /// and language preferences.
3273 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3274 let settings = language_settings(
3275 self.language_at(position).map(|l| l.name()),
3276 self.file(),
3277 cx,
3278 );
3279 if settings.hard_tabs {
3280 IndentSize::tab()
3281 } else {
3282 IndentSize::spaces(settings.tab_size.get())
3283 }
3284 }
3285
3286 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3287 /// is passed in as `single_indent_size`.
3288 pub fn suggested_indents(
3289 &self,
3290 rows: impl Iterator<Item = u32>,
3291 single_indent_size: IndentSize,
3292 ) -> BTreeMap<u32, IndentSize> {
3293 let mut result = BTreeMap::new();
3294
3295 for row_range in contiguous_ranges(rows, 10) {
3296 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3297 Some(suggestions) => suggestions,
3298 _ => break,
3299 };
3300
3301 for (row, suggestion) in row_range.zip(suggestions) {
3302 let indent_size = if let Some(suggestion) = suggestion {
3303 result
3304 .get(&suggestion.basis_row)
3305 .copied()
3306 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3307 .with_delta(suggestion.delta, single_indent_size)
3308 } else {
3309 self.indent_size_for_line(row)
3310 };
3311
3312 result.insert(row, indent_size);
3313 }
3314 }
3315
3316 result
3317 }
3318
3319 fn suggest_autoindents(
3320 &self,
3321 row_range: Range<u32>,
3322 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3323 let config = &self.language.as_ref()?.config;
3324 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3325
3326 #[derive(Debug, Clone)]
3327 struct StartPosition {
3328 start: Point,
3329 suffix: SharedString,
3330 language: Arc<Language>,
3331 }
3332
3333 // Find the suggested indentation ranges based on the syntax tree.
3334 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3335 let end = Point::new(row_range.end, 0);
3336 let range = (start..end).to_offset(&self.text);
3337 let mut matches = self.syntax.matches_with_options(
3338 range.clone(),
3339 &self.text,
3340 TreeSitterOptions {
3341 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3342 max_start_depth: None,
3343 },
3344 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3345 );
3346 let indent_configs = matches
3347 .grammars()
3348 .iter()
3349 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3350 .collect::<Vec<_>>();
3351
3352 let mut indent_ranges = Vec::<Range<Point>>::new();
3353 let mut start_positions = Vec::<StartPosition>::new();
3354 let mut outdent_positions = Vec::<Point>::new();
3355 while let Some(mat) = matches.peek() {
3356 let mut start: Option<Point> = None;
3357 let mut end: Option<Point> = None;
3358
3359 let config = indent_configs[mat.grammar_index];
3360 for capture in mat.captures {
3361 if capture.index == config.indent_capture_ix {
3362 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3363 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3364 } else if Some(capture.index) == config.start_capture_ix {
3365 start = Some(Point::from_ts_point(capture.node.end_position()));
3366 } else if Some(capture.index) == config.end_capture_ix {
3367 end = Some(Point::from_ts_point(capture.node.start_position()));
3368 } else if Some(capture.index) == config.outdent_capture_ix {
3369 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3370 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3371 start_positions.push(StartPosition {
3372 start: Point::from_ts_point(capture.node.start_position()),
3373 suffix: suffix.clone(),
3374 language: mat.language.clone(),
3375 });
3376 }
3377 }
3378
3379 matches.advance();
3380 if let Some((start, end)) = start.zip(end) {
3381 if start.row == end.row {
3382 continue;
3383 }
3384 let range = start..end;
3385 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3386 Err(ix) => indent_ranges.insert(ix, range),
3387 Ok(ix) => {
3388 let prev_range = &mut indent_ranges[ix];
3389 prev_range.end = prev_range.end.max(range.end);
3390 }
3391 }
3392 }
3393 }
3394
3395 let mut error_ranges = Vec::<Range<Point>>::new();
3396 let mut matches = self
3397 .syntax
3398 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3399 while let Some(mat) = matches.peek() {
3400 let node = mat.captures[0].node;
3401 let start = Point::from_ts_point(node.start_position());
3402 let end = Point::from_ts_point(node.end_position());
3403 let range = start..end;
3404 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3405 Ok(ix) | Err(ix) => ix,
3406 };
3407 let mut end_ix = ix;
3408 while let Some(existing_range) = error_ranges.get(end_ix) {
3409 if existing_range.end < end {
3410 end_ix += 1;
3411 } else {
3412 break;
3413 }
3414 }
3415 error_ranges.splice(ix..end_ix, [range]);
3416 matches.advance();
3417 }
3418
3419 outdent_positions.sort();
3420 for outdent_position in outdent_positions {
3421 // find the innermost indent range containing this outdent_position
3422 // set its end to the outdent position
3423 if let Some(range_to_truncate) = indent_ranges
3424 .iter_mut()
3425 .rfind(|indent_range| indent_range.contains(&outdent_position))
3426 {
3427 range_to_truncate.end = outdent_position;
3428 }
3429 }
3430
3431 start_positions.sort_by_key(|b| b.start);
3432
3433 // Find the suggested indentation increases and decreased based on regexes.
3434 let mut regex_outdent_map = HashMap::default();
3435 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3436 let mut start_positions_iter = start_positions.iter().peekable();
3437
3438 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3439 self.for_each_line(
3440 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3441 ..Point::new(row_range.end, 0),
3442 |row, line| {
3443 let indent_len = self.indent_size_for_line(row).len;
3444 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3445 let row_language_config = row_language
3446 .as_ref()
3447 .map(|lang| lang.config())
3448 .unwrap_or(config);
3449
3450 if row_language_config
3451 .decrease_indent_pattern
3452 .as_ref()
3453 .is_some_and(|regex| regex.is_match(line))
3454 {
3455 indent_change_rows.push((row, Ordering::Less));
3456 }
3457 if row_language_config
3458 .increase_indent_pattern
3459 .as_ref()
3460 .is_some_and(|regex| regex.is_match(line))
3461 {
3462 indent_change_rows.push((row + 1, Ordering::Greater));
3463 }
3464 while let Some(pos) = start_positions_iter.peek() {
3465 if pos.start.row < row {
3466 let pos = start_positions_iter.next().unwrap().clone();
3467 last_seen_suffix
3468 .entry(pos.suffix.to_string())
3469 .or_default()
3470 .push(pos);
3471 } else {
3472 break;
3473 }
3474 }
3475 for rule in &row_language_config.decrease_indent_patterns {
3476 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3477 let row_start_column = self.indent_size_for_line(row).len;
3478 let basis_row = rule
3479 .valid_after
3480 .iter()
3481 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3482 .flatten()
3483 .filter(|pos| {
3484 row_language
3485 .as_ref()
3486 .or(self.language.as_ref())
3487 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3488 })
3489 .filter(|pos| pos.start.column <= row_start_column)
3490 .max_by_key(|pos| pos.start.row);
3491 if let Some(outdent_to) = basis_row {
3492 regex_outdent_map.insert(row, outdent_to.start.row);
3493 }
3494 break;
3495 }
3496 }
3497 },
3498 );
3499
3500 let mut indent_changes = indent_change_rows.into_iter().peekable();
3501 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3502 prev_non_blank_row.unwrap_or(0)
3503 } else {
3504 row_range.start.saturating_sub(1)
3505 };
3506
3507 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3508 Some(row_range.map(move |row| {
3509 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3510
3511 let mut indent_from_prev_row = false;
3512 let mut outdent_from_prev_row = false;
3513 let mut outdent_to_row = u32::MAX;
3514 let mut from_regex = false;
3515
3516 while let Some((indent_row, delta)) = indent_changes.peek() {
3517 match indent_row.cmp(&row) {
3518 Ordering::Equal => match delta {
3519 Ordering::Less => {
3520 from_regex = true;
3521 outdent_from_prev_row = true
3522 }
3523 Ordering::Greater => {
3524 indent_from_prev_row = true;
3525 from_regex = true
3526 }
3527 _ => {}
3528 },
3529
3530 Ordering::Greater => break,
3531 Ordering::Less => {}
3532 }
3533
3534 indent_changes.next();
3535 }
3536
3537 for range in &indent_ranges {
3538 if range.start.row >= row {
3539 break;
3540 }
3541 if range.start.row == prev_row && range.end > row_start {
3542 indent_from_prev_row = true;
3543 }
3544 if range.end > prev_row_start && range.end <= row_start {
3545 outdent_to_row = outdent_to_row.min(range.start.row);
3546 }
3547 }
3548
3549 if let Some(basis_row) = regex_outdent_map.get(&row) {
3550 indent_from_prev_row = false;
3551 outdent_to_row = *basis_row;
3552 from_regex = true;
3553 }
3554
3555 let within_error = error_ranges
3556 .iter()
3557 .any(|e| e.start.row < row && e.end > row_start);
3558
3559 let suggestion = if outdent_to_row == prev_row
3560 || (outdent_from_prev_row && indent_from_prev_row)
3561 {
3562 Some(IndentSuggestion {
3563 basis_row: prev_row,
3564 delta: Ordering::Equal,
3565 within_error: within_error && !from_regex,
3566 })
3567 } else if indent_from_prev_row {
3568 Some(IndentSuggestion {
3569 basis_row: prev_row,
3570 delta: Ordering::Greater,
3571 within_error: within_error && !from_regex,
3572 })
3573 } else if outdent_to_row < prev_row {
3574 Some(IndentSuggestion {
3575 basis_row: outdent_to_row,
3576 delta: Ordering::Equal,
3577 within_error: within_error && !from_regex,
3578 })
3579 } else if outdent_from_prev_row {
3580 Some(IndentSuggestion {
3581 basis_row: prev_row,
3582 delta: Ordering::Less,
3583 within_error: within_error && !from_regex,
3584 })
3585 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3586 {
3587 Some(IndentSuggestion {
3588 basis_row: prev_row,
3589 delta: Ordering::Equal,
3590 within_error: within_error && !from_regex,
3591 })
3592 } else {
3593 None
3594 };
3595
3596 prev_row = row;
3597 prev_row_start = row_start;
3598 suggestion
3599 }))
3600 }
3601
3602 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3603 while row > 0 {
3604 row -= 1;
3605 if !self.is_line_blank(row) {
3606 return Some(row);
3607 }
3608 }
3609 None
3610 }
3611
3612 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3613 let captures = self.syntax.captures(range, &self.text, |grammar| {
3614 grammar
3615 .highlights_config
3616 .as_ref()
3617 .map(|config| &config.query)
3618 });
3619 let highlight_maps = captures
3620 .grammars()
3621 .iter()
3622 .map(|grammar| grammar.highlight_map())
3623 .collect();
3624 (captures, highlight_maps)
3625 }
3626
3627 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3628 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3629 /// returned in chunks where each chunk has a single syntax highlighting style and
3630 /// diagnostic status.
3631 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3632 let range = range.start.to_offset(self)..range.end.to_offset(self);
3633
3634 let mut syntax = None;
3635 if language_aware {
3636 syntax = Some(self.get_highlights(range.clone()));
3637 }
3638 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3639 let diagnostics = language_aware;
3640 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3641 }
3642
3643 pub fn highlighted_text_for_range<T: ToOffset>(
3644 &self,
3645 range: Range<T>,
3646 override_style: Option<HighlightStyle>,
3647 syntax_theme: &SyntaxTheme,
3648 ) -> HighlightedText {
3649 HighlightedText::from_buffer_range(
3650 range,
3651 &self.text,
3652 &self.syntax,
3653 override_style,
3654 syntax_theme,
3655 )
3656 }
3657
3658 /// Invokes the given callback for each line of text in the given range of the buffer.
3659 /// Uses callback to avoid allocating a string for each line.
3660 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3661 let mut line = String::new();
3662 let mut row = range.start.row;
3663 for chunk in self
3664 .as_rope()
3665 .chunks_in_range(range.to_offset(self))
3666 .chain(["\n"])
3667 {
3668 for (newline_ix, text) in chunk.split('\n').enumerate() {
3669 if newline_ix > 0 {
3670 callback(row, &line);
3671 row += 1;
3672 line.clear();
3673 }
3674 line.push_str(text);
3675 }
3676 }
3677 }
3678
3679 /// Iterates over every [`SyntaxLayer`] in the buffer.
3680 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3681 self.syntax_layers_for_range(0..self.len(), true)
3682 }
3683
3684 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3685 let offset = position.to_offset(self);
3686 self.syntax_layers_for_range(offset..offset, false)
3687 .filter(|l| {
3688 if let Some(ranges) = l.included_sub_ranges {
3689 ranges.iter().any(|range| {
3690 let start = range.start.to_offset(self);
3691 start <= offset && {
3692 let end = range.end.to_offset(self);
3693 offset < end
3694 }
3695 })
3696 } else {
3697 l.node().start_byte() <= offset && l.node().end_byte() > offset
3698 }
3699 })
3700 .last()
3701 }
3702
3703 pub fn syntax_layers_for_range<D: ToOffset>(
3704 &self,
3705 range: Range<D>,
3706 include_hidden: bool,
3707 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3708 self.syntax
3709 .layers_for_range(range, &self.text, include_hidden)
3710 }
3711
3712 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3713 &self,
3714 range: Range<D>,
3715 ) -> Option<SyntaxLayer<'_>> {
3716 let range = range.to_offset(self);
3717 self.syntax
3718 .layers_for_range(range, &self.text, false)
3719 .max_by(|a, b| {
3720 if a.depth != b.depth {
3721 a.depth.cmp(&b.depth)
3722 } else if a.offset.0 != b.offset.0 {
3723 a.offset.0.cmp(&b.offset.0)
3724 } else {
3725 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3726 }
3727 })
3728 }
3729
3730 /// Returns the main [`Language`].
3731 pub fn language(&self) -> Option<&Arc<Language>> {
3732 self.language.as_ref()
3733 }
3734
3735 /// Returns the [`Language`] at the given location.
3736 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3737 self.syntax_layer_at(position)
3738 .map(|info| info.language)
3739 .or(self.language.as_ref())
3740 }
3741
3742 /// Returns the settings for the language at the given location.
3743 pub fn settings_at<'a, D: ToOffset>(
3744 &'a self,
3745 position: D,
3746 cx: &'a App,
3747 ) -> Cow<'a, LanguageSettings> {
3748 language_settings(
3749 self.language_at(position).map(|l| l.name()),
3750 self.file.as_ref(),
3751 cx,
3752 )
3753 }
3754
3755 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3756 CharClassifier::new(self.language_scope_at(point))
3757 }
3758
3759 /// Returns the [`LanguageScope`] at the given location.
3760 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3761 let offset = position.to_offset(self);
3762 let mut scope = None;
3763 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3764
3765 // Use the layer that has the smallest node intersecting the given point.
3766 for layer in self
3767 .syntax
3768 .layers_for_range(offset..offset, &self.text, false)
3769 {
3770 let mut cursor = layer.node().walk();
3771
3772 let mut range = None;
3773 loop {
3774 let child_range = cursor.node().byte_range();
3775 if !child_range.contains(&offset) {
3776 break;
3777 }
3778
3779 range = Some(child_range);
3780 if cursor.goto_first_child_for_byte(offset).is_none() {
3781 break;
3782 }
3783 }
3784
3785 if let Some(range) = range
3786 && smallest_range_and_depth.as_ref().is_none_or(
3787 |(smallest_range, smallest_range_depth)| {
3788 if layer.depth > *smallest_range_depth {
3789 true
3790 } else if layer.depth == *smallest_range_depth {
3791 range.len() < smallest_range.len()
3792 } else {
3793 false
3794 }
3795 },
3796 )
3797 {
3798 smallest_range_and_depth = Some((range, layer.depth));
3799 scope = Some(LanguageScope {
3800 language: layer.language.clone(),
3801 override_id: layer.override_id(offset, &self.text),
3802 });
3803 }
3804 }
3805
3806 scope.or_else(|| {
3807 self.language.clone().map(|language| LanguageScope {
3808 language,
3809 override_id: None,
3810 })
3811 })
3812 }
3813
3814 /// Returns a tuple of the range and character kind of the word
3815 /// surrounding the given position.
3816 pub fn surrounding_word<T: ToOffset>(
3817 &self,
3818 start: T,
3819 scope_context: Option<CharScopeContext>,
3820 ) -> (Range<usize>, Option<CharKind>) {
3821 let mut start = start.to_offset(self);
3822 let mut end = start;
3823 let mut next_chars = self.chars_at(start).take(128).peekable();
3824 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3825
3826 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3827 let word_kind = cmp::max(
3828 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3829 next_chars.peek().copied().map(|c| classifier.kind(c)),
3830 );
3831
3832 for ch in prev_chars {
3833 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3834 start -= ch.len_utf8();
3835 } else {
3836 break;
3837 }
3838 }
3839
3840 for ch in next_chars {
3841 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3842 end += ch.len_utf8();
3843 } else {
3844 break;
3845 }
3846 }
3847
3848 (start..end, word_kind)
3849 }
3850
3851 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3852 /// range. When `require_larger` is true, the node found must be larger than the query range.
3853 ///
3854 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3855 /// be moved to the root of the tree.
3856 fn goto_node_enclosing_range(
3857 cursor: &mut tree_sitter::TreeCursor,
3858 query_range: &Range<usize>,
3859 require_larger: bool,
3860 ) -> bool {
3861 let mut ascending = false;
3862 loop {
3863 let mut range = cursor.node().byte_range();
3864 if query_range.is_empty() {
3865 // When the query range is empty and the current node starts after it, move to the
3866 // previous sibling to find the node the containing node.
3867 if range.start > query_range.start {
3868 cursor.goto_previous_sibling();
3869 range = cursor.node().byte_range();
3870 }
3871 } else {
3872 // When the query range is non-empty and the current node ends exactly at the start,
3873 // move to the next sibling to find a node that extends beyond the start.
3874 if range.end == query_range.start {
3875 cursor.goto_next_sibling();
3876 range = cursor.node().byte_range();
3877 }
3878 }
3879
3880 let encloses = range.contains_inclusive(query_range)
3881 && (!require_larger || range.len() > query_range.len());
3882 if !encloses {
3883 ascending = true;
3884 if !cursor.goto_parent() {
3885 return false;
3886 }
3887 continue;
3888 } else if ascending {
3889 return true;
3890 }
3891
3892 // Descend into the current node.
3893 if cursor
3894 .goto_first_child_for_byte(query_range.start)
3895 .is_none()
3896 {
3897 return true;
3898 }
3899 }
3900 }
3901
3902 pub fn syntax_ancestor<'a, T: ToOffset>(
3903 &'a self,
3904 range: Range<T>,
3905 ) -> Option<tree_sitter::Node<'a>> {
3906 let range = range.start.to_offset(self)..range.end.to_offset(self);
3907 let mut result: Option<tree_sitter::Node<'a>> = None;
3908 for layer in self
3909 .syntax
3910 .layers_for_range(range.clone(), &self.text, true)
3911 {
3912 let mut cursor = layer.node().walk();
3913
3914 // Find the node that both contains the range and is larger than it.
3915 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3916 continue;
3917 }
3918
3919 let left_node = cursor.node();
3920 let mut layer_result = left_node;
3921
3922 // For an empty range, try to find another node immediately to the right of the range.
3923 if left_node.end_byte() == range.start {
3924 let mut right_node = None;
3925 while !cursor.goto_next_sibling() {
3926 if !cursor.goto_parent() {
3927 break;
3928 }
3929 }
3930
3931 while cursor.node().start_byte() == range.start {
3932 right_node = Some(cursor.node());
3933 if !cursor.goto_first_child() {
3934 break;
3935 }
3936 }
3937
3938 // If there is a candidate node on both sides of the (empty) range, then
3939 // decide between the two by favoring a named node over an anonymous token.
3940 // If both nodes are the same in that regard, favor the right one.
3941 if let Some(right_node) = right_node
3942 && (right_node.is_named() || !left_node.is_named())
3943 {
3944 layer_result = right_node;
3945 }
3946 }
3947
3948 if let Some(previous_result) = &result
3949 && previous_result.byte_range().len() < layer_result.byte_range().len()
3950 {
3951 continue;
3952 }
3953 result = Some(layer_result);
3954 }
3955
3956 result
3957 }
3958
3959 /// Find the previous sibling syntax node at the given range.
3960 ///
3961 /// This function locates the syntax node that precedes the node containing
3962 /// the given range. It searches hierarchically by:
3963 /// 1. Finding the node that contains the given range
3964 /// 2. Looking for the previous sibling at the same tree level
3965 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3966 ///
3967 /// Returns `None` if there is no previous sibling at any ancestor level.
3968 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3969 &'a self,
3970 range: Range<T>,
3971 ) -> Option<tree_sitter::Node<'a>> {
3972 let range = range.start.to_offset(self)..range.end.to_offset(self);
3973 let mut result: Option<tree_sitter::Node<'a>> = None;
3974
3975 for layer in self
3976 .syntax
3977 .layers_for_range(range.clone(), &self.text, true)
3978 {
3979 let mut cursor = layer.node().walk();
3980
3981 // Find the node that contains the range
3982 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3983 continue;
3984 }
3985
3986 // Look for the previous sibling, moving up ancestor levels if needed
3987 loop {
3988 if cursor.goto_previous_sibling() {
3989 let layer_result = cursor.node();
3990
3991 if let Some(previous_result) = &result {
3992 if previous_result.byte_range().end < layer_result.byte_range().end {
3993 continue;
3994 }
3995 }
3996 result = Some(layer_result);
3997 break;
3998 }
3999
4000 // No sibling found at this level, try moving up to parent
4001 if !cursor.goto_parent() {
4002 break;
4003 }
4004 }
4005 }
4006
4007 result
4008 }
4009
4010 /// Find the next sibling syntax node at the given range.
4011 ///
4012 /// This function locates the syntax node that follows the node containing
4013 /// the given range. It searches hierarchically by:
4014 /// 1. Finding the node that contains the given range
4015 /// 2. Looking for the next sibling at the same tree level
4016 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4017 ///
4018 /// Returns `None` if there is no next sibling at any ancestor level.
4019 pub fn syntax_next_sibling<'a, T: ToOffset>(
4020 &'a self,
4021 range: Range<T>,
4022 ) -> Option<tree_sitter::Node<'a>> {
4023 let range = range.start.to_offset(self)..range.end.to_offset(self);
4024 let mut result: Option<tree_sitter::Node<'a>> = None;
4025
4026 for layer in self
4027 .syntax
4028 .layers_for_range(range.clone(), &self.text, true)
4029 {
4030 let mut cursor = layer.node().walk();
4031
4032 // Find the node that contains the range
4033 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4034 continue;
4035 }
4036
4037 // Look for the next sibling, moving up ancestor levels if needed
4038 loop {
4039 if cursor.goto_next_sibling() {
4040 let layer_result = cursor.node();
4041
4042 if let Some(previous_result) = &result {
4043 if previous_result.byte_range().start > layer_result.byte_range().start {
4044 continue;
4045 }
4046 }
4047 result = Some(layer_result);
4048 break;
4049 }
4050
4051 // No sibling found at this level, try moving up to parent
4052 if !cursor.goto_parent() {
4053 break;
4054 }
4055 }
4056 }
4057
4058 result
4059 }
4060
4061 /// Returns the root syntax node within the given row
4062 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4063 let start_offset = position.to_offset(self);
4064
4065 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4066
4067 let layer = self
4068 .syntax
4069 .layers_for_range(start_offset..start_offset, &self.text, true)
4070 .next()?;
4071
4072 let mut cursor = layer.node().walk();
4073
4074 // Descend to the first leaf that touches the start of the range.
4075 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4076 if cursor.node().end_byte() == start_offset {
4077 cursor.goto_next_sibling();
4078 }
4079 }
4080
4081 // Ascend to the root node within the same row.
4082 while cursor.goto_parent() {
4083 if cursor.node().start_position().row != row {
4084 break;
4085 }
4086 }
4087
4088 Some(cursor.node())
4089 }
4090
4091 /// Returns the outline for the buffer.
4092 ///
4093 /// This method allows passing an optional [`SyntaxTheme`] to
4094 /// syntax-highlight the returned symbols.
4095 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4096 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4097 }
4098
4099 /// Returns all the symbols that contain the given position.
4100 ///
4101 /// This method allows passing an optional [`SyntaxTheme`] to
4102 /// syntax-highlight the returned symbols.
4103 pub fn symbols_containing<T: ToOffset>(
4104 &self,
4105 position: T,
4106 theme: Option<&SyntaxTheme>,
4107 ) -> Vec<OutlineItem<Anchor>> {
4108 let position = position.to_offset(self);
4109 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4110 let end = self.clip_offset(position + 1, Bias::Right);
4111 let mut items = self.outline_items_containing(start..end, false, theme);
4112 let mut prev_depth = None;
4113 items.retain(|item| {
4114 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4115 prev_depth = Some(item.depth);
4116 result
4117 });
4118 items
4119 }
4120
4121 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4122 let range = range.to_offset(self);
4123 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4124 grammar.outline_config.as_ref().map(|c| &c.query)
4125 });
4126 let configs = matches
4127 .grammars()
4128 .iter()
4129 .map(|g| g.outline_config.as_ref().unwrap())
4130 .collect::<Vec<_>>();
4131
4132 while let Some(mat) = matches.peek() {
4133 let config = &configs[mat.grammar_index];
4134 let containing_item_node = maybe!({
4135 let item_node = mat.captures.iter().find_map(|cap| {
4136 if cap.index == config.item_capture_ix {
4137 Some(cap.node)
4138 } else {
4139 None
4140 }
4141 })?;
4142
4143 let item_byte_range = item_node.byte_range();
4144 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4145 None
4146 } else {
4147 Some(item_node)
4148 }
4149 });
4150
4151 if let Some(item_node) = containing_item_node {
4152 return Some(
4153 Point::from_ts_point(item_node.start_position())
4154 ..Point::from_ts_point(item_node.end_position()),
4155 );
4156 }
4157
4158 matches.advance();
4159 }
4160 None
4161 }
4162
4163 pub fn outline_items_containing<T: ToOffset>(
4164 &self,
4165 range: Range<T>,
4166 include_extra_context: bool,
4167 theme: Option<&SyntaxTheme>,
4168 ) -> Vec<OutlineItem<Anchor>> {
4169 self.outline_items_containing_internal(
4170 range,
4171 include_extra_context,
4172 theme,
4173 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4174 )
4175 }
4176
4177 pub fn outline_items_as_points_containing<T: ToOffset>(
4178 &self,
4179 range: Range<T>,
4180 include_extra_context: bool,
4181 theme: Option<&SyntaxTheme>,
4182 ) -> Vec<OutlineItem<Point>> {
4183 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4184 range
4185 })
4186 }
4187
4188 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4189 &self,
4190 range: Range<T>,
4191 include_extra_context: bool,
4192 theme: Option<&SyntaxTheme>,
4193 ) -> Vec<OutlineItem<usize>> {
4194 self.outline_items_containing_internal(
4195 range,
4196 include_extra_context,
4197 theme,
4198 |buffer, range| range.to_offset(buffer),
4199 )
4200 }
4201
4202 fn outline_items_containing_internal<T: ToOffset, U>(
4203 &self,
4204 range: Range<T>,
4205 include_extra_context: bool,
4206 theme: Option<&SyntaxTheme>,
4207 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4208 ) -> Vec<OutlineItem<U>> {
4209 let range = range.to_offset(self);
4210 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4211 grammar.outline_config.as_ref().map(|c| &c.query)
4212 });
4213
4214 let mut items = Vec::new();
4215 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4216 while let Some(mat) = matches.peek() {
4217 let config = matches.grammars()[mat.grammar_index]
4218 .outline_config
4219 .as_ref()
4220 .unwrap();
4221 if let Some(item) =
4222 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4223 {
4224 items.push(item);
4225 } else if let Some(capture) = mat
4226 .captures
4227 .iter()
4228 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4229 {
4230 let capture_range = capture.node.start_position()..capture.node.end_position();
4231 let mut capture_row_range =
4232 capture_range.start.row as u32..capture_range.end.row as u32;
4233 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4234 {
4235 capture_row_range.end -= 1;
4236 }
4237 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4238 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4239 last_row_range.end = capture_row_range.end;
4240 } else {
4241 annotation_row_ranges.push(capture_row_range);
4242 }
4243 } else {
4244 annotation_row_ranges.push(capture_row_range);
4245 }
4246 }
4247 matches.advance();
4248 }
4249
4250 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4251
4252 // Assign depths based on containment relationships and convert to anchors.
4253 let mut item_ends_stack = Vec::<Point>::new();
4254 let mut anchor_items = Vec::new();
4255 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4256 for item in items {
4257 while let Some(last_end) = item_ends_stack.last().copied() {
4258 if last_end < item.range.end {
4259 item_ends_stack.pop();
4260 } else {
4261 break;
4262 }
4263 }
4264
4265 let mut annotation_row_range = None;
4266 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4267 let row_preceding_item = item.range.start.row.saturating_sub(1);
4268 if next_annotation_row_range.end < row_preceding_item {
4269 annotation_row_ranges.next();
4270 } else {
4271 if next_annotation_row_range.end == row_preceding_item {
4272 annotation_row_range = Some(next_annotation_row_range.clone());
4273 annotation_row_ranges.next();
4274 }
4275 break;
4276 }
4277 }
4278
4279 anchor_items.push(OutlineItem {
4280 depth: item_ends_stack.len(),
4281 range: range_callback(self, item.range.clone()),
4282 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4283 text: item.text,
4284 highlight_ranges: item.highlight_ranges,
4285 name_ranges: item.name_ranges,
4286 body_range: item.body_range.map(|r| range_callback(self, r)),
4287 annotation_range: annotation_row_range.map(|annotation_range| {
4288 let point_range = Point::new(annotation_range.start, 0)
4289 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4290 range_callback(self, point_range)
4291 }),
4292 });
4293 item_ends_stack.push(item.range.end);
4294 }
4295
4296 anchor_items
4297 }
4298
4299 fn next_outline_item(
4300 &self,
4301 config: &OutlineConfig,
4302 mat: &SyntaxMapMatch,
4303 range: &Range<usize>,
4304 include_extra_context: bool,
4305 theme: Option<&SyntaxTheme>,
4306 ) -> Option<OutlineItem<Point>> {
4307 let item_node = mat.captures.iter().find_map(|cap| {
4308 if cap.index == config.item_capture_ix {
4309 Some(cap.node)
4310 } else {
4311 None
4312 }
4313 })?;
4314
4315 let item_byte_range = item_node.byte_range();
4316 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4317 return None;
4318 }
4319 let item_point_range = Point::from_ts_point(item_node.start_position())
4320 ..Point::from_ts_point(item_node.end_position());
4321
4322 let mut open_point = None;
4323 let mut close_point = None;
4324
4325 let mut buffer_ranges = Vec::new();
4326 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4327 let mut range = node.start_byte()..node.end_byte();
4328 let start = node.start_position();
4329 if node.end_position().row > start.row {
4330 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4331 }
4332
4333 if !range.is_empty() {
4334 buffer_ranges.push((range, node_is_name));
4335 }
4336 };
4337
4338 for capture in mat.captures {
4339 if capture.index == config.name_capture_ix {
4340 add_to_buffer_ranges(capture.node, true);
4341 } else if Some(capture.index) == config.context_capture_ix
4342 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4343 {
4344 add_to_buffer_ranges(capture.node, false);
4345 } else {
4346 if Some(capture.index) == config.open_capture_ix {
4347 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4348 } else if Some(capture.index) == config.close_capture_ix {
4349 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4350 }
4351 }
4352 }
4353
4354 if buffer_ranges.is_empty() {
4355 return None;
4356 }
4357 let source_range_for_text =
4358 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4359
4360 let mut text = String::new();
4361 let mut highlight_ranges = Vec::new();
4362 let mut name_ranges = Vec::new();
4363 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4364 let mut last_buffer_range_end = 0;
4365 for (buffer_range, is_name) in buffer_ranges {
4366 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4367 if space_added {
4368 text.push(' ');
4369 }
4370 let before_append_len = text.len();
4371 let mut offset = buffer_range.start;
4372 chunks.seek(buffer_range.clone());
4373 for mut chunk in chunks.by_ref() {
4374 if chunk.text.len() > buffer_range.end - offset {
4375 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4376 offset = buffer_range.end;
4377 } else {
4378 offset += chunk.text.len();
4379 }
4380 let style = chunk
4381 .syntax_highlight_id
4382 .zip(theme)
4383 .and_then(|(highlight, theme)| highlight.style(theme));
4384 if let Some(style) = style {
4385 let start = text.len();
4386 let end = start + chunk.text.len();
4387 highlight_ranges.push((start..end, style));
4388 }
4389 text.push_str(chunk.text);
4390 if offset >= buffer_range.end {
4391 break;
4392 }
4393 }
4394 if is_name {
4395 let after_append_len = text.len();
4396 let start = if space_added && !name_ranges.is_empty() {
4397 before_append_len - 1
4398 } else {
4399 before_append_len
4400 };
4401 name_ranges.push(start..after_append_len);
4402 }
4403 last_buffer_range_end = buffer_range.end;
4404 }
4405
4406 Some(OutlineItem {
4407 depth: 0, // We'll calculate the depth later
4408 range: item_point_range,
4409 source_range_for_text: source_range_for_text.to_point(self),
4410 text,
4411 highlight_ranges,
4412 name_ranges,
4413 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4414 annotation_range: None,
4415 })
4416 }
4417
4418 pub fn function_body_fold_ranges<T: ToOffset>(
4419 &self,
4420 within: Range<T>,
4421 ) -> impl Iterator<Item = Range<usize>> + '_ {
4422 self.text_object_ranges(within, TreeSitterOptions::default())
4423 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4424 }
4425
4426 /// For each grammar in the language, runs the provided
4427 /// [`tree_sitter::Query`] against the given range.
4428 pub fn matches(
4429 &self,
4430 range: Range<usize>,
4431 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4432 ) -> SyntaxMapMatches<'_> {
4433 self.syntax.matches(range, self, query)
4434 }
4435
4436 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4437 /// Hence, may return more bracket pairs than the range contains.
4438 ///
4439 /// Will omit known chunks.
4440 /// The resulting bracket match collections are not ordered.
4441 pub fn fetch_bracket_ranges(
4442 &self,
4443 range: Range<usize>,
4444 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4445 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4446 let mut all_bracket_matches = HashMap::default();
4447
4448 for chunk in self
4449 .tree_sitter_data
4450 .chunks
4451 .applicable_chunks(&[range.to_point(self)])
4452 {
4453 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4454 continue;
4455 }
4456 let chunk_range = chunk.anchor_range();
4457 let chunk_range = chunk_range.to_offset(&self);
4458
4459 if let Some(cached_brackets) =
4460 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4461 {
4462 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4463 continue;
4464 }
4465
4466 let mut all_brackets = Vec::new();
4467 let mut opens = Vec::new();
4468 let mut color_pairs = Vec::new();
4469
4470 let mut matches = self.syntax.matches_with_options(
4471 chunk_range.clone(),
4472 &self.text,
4473 TreeSitterOptions {
4474 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4475 max_start_depth: None,
4476 },
4477 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4478 );
4479 let configs = matches
4480 .grammars()
4481 .iter()
4482 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4483 .collect::<Vec<_>>();
4484
4485 while let Some(mat) = matches.peek() {
4486 let mut open = None;
4487 let mut close = None;
4488 let syntax_layer_depth = mat.depth;
4489 let config = configs[mat.grammar_index];
4490 let pattern = &config.patterns[mat.pattern_index];
4491 for capture in mat.captures {
4492 if capture.index == config.open_capture_ix {
4493 open = Some(capture.node.byte_range());
4494 } else if capture.index == config.close_capture_ix {
4495 close = Some(capture.node.byte_range());
4496 }
4497 }
4498
4499 matches.advance();
4500
4501 let Some((open_range, close_range)) = open.zip(close) else {
4502 continue;
4503 };
4504
4505 let bracket_range = open_range.start..=close_range.end;
4506 if !bracket_range.overlaps(&chunk_range) {
4507 continue;
4508 }
4509
4510 let index = all_brackets.len();
4511 all_brackets.push(BracketMatch {
4512 open_range: open_range.clone(),
4513 close_range: close_range.clone(),
4514 newline_only: pattern.newline_only,
4515 syntax_layer_depth,
4516 color_index: None,
4517 });
4518
4519 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4520 // bracket will match the entire tag with all text inside.
4521 // For now, avoid highlighting any pair that has more than single char in each bracket.
4522 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4523 let should_color =
4524 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4525 if should_color {
4526 opens.push(open_range.clone());
4527 color_pairs.push((open_range, close_range, index));
4528 }
4529 }
4530
4531 opens.sort_by_key(|r| (r.start, r.end));
4532 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4533 color_pairs.sort_by_key(|(_, close, _)| close.end);
4534
4535 let mut open_stack = Vec::new();
4536 let mut open_index = 0;
4537 for (open, close, index) in color_pairs {
4538 while open_index < opens.len() && opens[open_index].start < close.start {
4539 open_stack.push(opens[open_index].clone());
4540 open_index += 1;
4541 }
4542
4543 if open_stack.last() == Some(&open) {
4544 let depth_index = open_stack.len() - 1;
4545 all_brackets[index].color_index = Some(depth_index);
4546 open_stack.pop();
4547 }
4548 }
4549
4550 all_brackets.sort_by_key(|bracket_match| {
4551 (bracket_match.open_range.start, bracket_match.open_range.end)
4552 });
4553
4554 if let empty_slot @ None =
4555 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4556 {
4557 *empty_slot = Some(all_brackets.clone());
4558 }
4559 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4560 }
4561
4562 all_bracket_matches
4563 }
4564
4565 pub fn all_bracket_ranges(
4566 &self,
4567 range: Range<usize>,
4568 ) -> impl Iterator<Item = BracketMatch<usize>> {
4569 self.fetch_bracket_ranges(range.clone(), None)
4570 .into_values()
4571 .flatten()
4572 .filter(move |bracket_match| {
4573 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4574 bracket_range.overlaps(&range)
4575 })
4576 }
4577
4578 /// Returns bracket range pairs overlapping or adjacent to `range`
4579 pub fn bracket_ranges<T: ToOffset>(
4580 &self,
4581 range: Range<T>,
4582 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4583 // Find bracket pairs that *inclusively* contain the given range.
4584 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4585 self.all_bracket_ranges(range)
4586 .filter(|pair| !pair.newline_only)
4587 }
4588
4589 pub fn debug_variables_query<T: ToOffset>(
4590 &self,
4591 range: Range<T>,
4592 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4593 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4594
4595 let mut matches = self.syntax.matches_with_options(
4596 range.clone(),
4597 &self.text,
4598 TreeSitterOptions::default(),
4599 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4600 );
4601
4602 let configs = matches
4603 .grammars()
4604 .iter()
4605 .map(|grammar| grammar.debug_variables_config.as_ref())
4606 .collect::<Vec<_>>();
4607
4608 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4609
4610 iter::from_fn(move || {
4611 loop {
4612 while let Some(capture) = captures.pop() {
4613 if capture.0.overlaps(&range) {
4614 return Some(capture);
4615 }
4616 }
4617
4618 let mat = matches.peek()?;
4619
4620 let Some(config) = configs[mat.grammar_index].as_ref() else {
4621 matches.advance();
4622 continue;
4623 };
4624
4625 for capture in mat.captures {
4626 let Some(ix) = config
4627 .objects_by_capture_ix
4628 .binary_search_by_key(&capture.index, |e| e.0)
4629 .ok()
4630 else {
4631 continue;
4632 };
4633 let text_object = config.objects_by_capture_ix[ix].1;
4634 let byte_range = capture.node.byte_range();
4635
4636 let mut found = false;
4637 for (range, existing) in captures.iter_mut() {
4638 if existing == &text_object {
4639 range.start = range.start.min(byte_range.start);
4640 range.end = range.end.max(byte_range.end);
4641 found = true;
4642 break;
4643 }
4644 }
4645
4646 if !found {
4647 captures.push((byte_range, text_object));
4648 }
4649 }
4650
4651 matches.advance();
4652 }
4653 })
4654 }
4655
4656 pub fn text_object_ranges<T: ToOffset>(
4657 &self,
4658 range: Range<T>,
4659 options: TreeSitterOptions,
4660 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4661 let range =
4662 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4663
4664 let mut matches =
4665 self.syntax
4666 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4667 grammar.text_object_config.as_ref().map(|c| &c.query)
4668 });
4669
4670 let configs = matches
4671 .grammars()
4672 .iter()
4673 .map(|grammar| grammar.text_object_config.as_ref())
4674 .collect::<Vec<_>>();
4675
4676 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4677
4678 iter::from_fn(move || {
4679 loop {
4680 while let Some(capture) = captures.pop() {
4681 if capture.0.overlaps(&range) {
4682 return Some(capture);
4683 }
4684 }
4685
4686 let mat = matches.peek()?;
4687
4688 let Some(config) = configs[mat.grammar_index].as_ref() else {
4689 matches.advance();
4690 continue;
4691 };
4692
4693 for capture in mat.captures {
4694 let Some(ix) = config
4695 .text_objects_by_capture_ix
4696 .binary_search_by_key(&capture.index, |e| e.0)
4697 .ok()
4698 else {
4699 continue;
4700 };
4701 let text_object = config.text_objects_by_capture_ix[ix].1;
4702 let byte_range = capture.node.byte_range();
4703
4704 let mut found = false;
4705 for (range, existing) in captures.iter_mut() {
4706 if existing == &text_object {
4707 range.start = range.start.min(byte_range.start);
4708 range.end = range.end.max(byte_range.end);
4709 found = true;
4710 break;
4711 }
4712 }
4713
4714 if !found {
4715 captures.push((byte_range, text_object));
4716 }
4717 }
4718
4719 matches.advance();
4720 }
4721 })
4722 }
4723
4724 /// Returns enclosing bracket ranges containing the given range
4725 pub fn enclosing_bracket_ranges<T: ToOffset>(
4726 &self,
4727 range: Range<T>,
4728 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4729 let range = range.start.to_offset(self)..range.end.to_offset(self);
4730
4731 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4732 let max_depth = result
4733 .iter()
4734 .map(|mat| mat.syntax_layer_depth)
4735 .max()
4736 .unwrap_or(0);
4737 result.into_iter().filter(move |pair| {
4738 pair.open_range.start <= range.start
4739 && pair.close_range.end >= range.end
4740 && pair.syntax_layer_depth == max_depth
4741 })
4742 }
4743
4744 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4745 ///
4746 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4747 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4748 &self,
4749 range: Range<T>,
4750 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4751 ) -> Option<(Range<usize>, Range<usize>)> {
4752 let range = range.start.to_offset(self)..range.end.to_offset(self);
4753
4754 // Get the ranges of the innermost pair of brackets.
4755 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4756
4757 for pair in self.enclosing_bracket_ranges(range) {
4758 if let Some(range_filter) = range_filter
4759 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4760 {
4761 continue;
4762 }
4763
4764 let len = pair.close_range.end - pair.open_range.start;
4765
4766 if let Some((existing_open, existing_close)) = &result {
4767 let existing_len = existing_close.end - existing_open.start;
4768 if len > existing_len {
4769 continue;
4770 }
4771 }
4772
4773 result = Some((pair.open_range, pair.close_range));
4774 }
4775
4776 result
4777 }
4778
4779 /// Returns anchor ranges for any matches of the redaction query.
4780 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4781 /// will be run on the relevant section of the buffer.
4782 pub fn redacted_ranges<T: ToOffset>(
4783 &self,
4784 range: Range<T>,
4785 ) -> impl Iterator<Item = Range<usize>> + '_ {
4786 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4787 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4788 grammar
4789 .redactions_config
4790 .as_ref()
4791 .map(|config| &config.query)
4792 });
4793
4794 let configs = syntax_matches
4795 .grammars()
4796 .iter()
4797 .map(|grammar| grammar.redactions_config.as_ref())
4798 .collect::<Vec<_>>();
4799
4800 iter::from_fn(move || {
4801 let redacted_range = syntax_matches
4802 .peek()
4803 .and_then(|mat| {
4804 configs[mat.grammar_index].and_then(|config| {
4805 mat.captures
4806 .iter()
4807 .find(|capture| capture.index == config.redaction_capture_ix)
4808 })
4809 })
4810 .map(|mat| mat.node.byte_range());
4811 syntax_matches.advance();
4812 redacted_range
4813 })
4814 }
4815
4816 pub fn injections_intersecting_range<T: ToOffset>(
4817 &self,
4818 range: Range<T>,
4819 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4820 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4821
4822 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4823 grammar
4824 .injection_config
4825 .as_ref()
4826 .map(|config| &config.query)
4827 });
4828
4829 let configs = syntax_matches
4830 .grammars()
4831 .iter()
4832 .map(|grammar| grammar.injection_config.as_ref())
4833 .collect::<Vec<_>>();
4834
4835 iter::from_fn(move || {
4836 let ranges = syntax_matches.peek().and_then(|mat| {
4837 let config = &configs[mat.grammar_index]?;
4838 let content_capture_range = mat.captures.iter().find_map(|capture| {
4839 if capture.index == config.content_capture_ix {
4840 Some(capture.node.byte_range())
4841 } else {
4842 None
4843 }
4844 })?;
4845 let language = self.language_at(content_capture_range.start)?;
4846 Some((content_capture_range, language))
4847 });
4848 syntax_matches.advance();
4849 ranges
4850 })
4851 }
4852
4853 pub fn runnable_ranges(
4854 &self,
4855 offset_range: Range<usize>,
4856 ) -> impl Iterator<Item = RunnableRange> + '_ {
4857 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4858 grammar.runnable_config.as_ref().map(|config| &config.query)
4859 });
4860
4861 let test_configs = syntax_matches
4862 .grammars()
4863 .iter()
4864 .map(|grammar| grammar.runnable_config.as_ref())
4865 .collect::<Vec<_>>();
4866
4867 iter::from_fn(move || {
4868 loop {
4869 let mat = syntax_matches.peek()?;
4870
4871 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4872 let mut run_range = None;
4873 let full_range = mat.captures.iter().fold(
4874 Range {
4875 start: usize::MAX,
4876 end: 0,
4877 },
4878 |mut acc, next| {
4879 let byte_range = next.node.byte_range();
4880 if acc.start > byte_range.start {
4881 acc.start = byte_range.start;
4882 }
4883 if acc.end < byte_range.end {
4884 acc.end = byte_range.end;
4885 }
4886 acc
4887 },
4888 );
4889 if full_range.start > full_range.end {
4890 // We did not find a full spanning range of this match.
4891 return None;
4892 }
4893 let extra_captures: SmallVec<[_; 1]> =
4894 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4895 test_configs
4896 .extra_captures
4897 .get(capture.index as usize)
4898 .cloned()
4899 .and_then(|tag_name| match tag_name {
4900 RunnableCapture::Named(name) => {
4901 Some((capture.node.byte_range(), name))
4902 }
4903 RunnableCapture::Run => {
4904 let _ = run_range.insert(capture.node.byte_range());
4905 None
4906 }
4907 })
4908 }));
4909 let run_range = run_range?;
4910 let tags = test_configs
4911 .query
4912 .property_settings(mat.pattern_index)
4913 .iter()
4914 .filter_map(|property| {
4915 if *property.key == *"tag" {
4916 property
4917 .value
4918 .as_ref()
4919 .map(|value| RunnableTag(value.to_string().into()))
4920 } else {
4921 None
4922 }
4923 })
4924 .collect();
4925 let extra_captures = extra_captures
4926 .into_iter()
4927 .map(|(range, name)| {
4928 (
4929 name.to_string(),
4930 self.text_for_range(range).collect::<String>(),
4931 )
4932 })
4933 .collect();
4934 // All tags should have the same range.
4935 Some(RunnableRange {
4936 run_range,
4937 full_range,
4938 runnable: Runnable {
4939 tags,
4940 language: mat.language,
4941 buffer: self.remote_id(),
4942 },
4943 extra_captures,
4944 buffer_id: self.remote_id(),
4945 })
4946 });
4947
4948 syntax_matches.advance();
4949 if test_range.is_some() {
4950 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4951 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4952 return test_range;
4953 }
4954 }
4955 })
4956 }
4957
4958 /// Returns selections for remote peers intersecting the given range.
4959 #[allow(clippy::type_complexity)]
4960 pub fn selections_in_range(
4961 &self,
4962 range: Range<Anchor>,
4963 include_local: bool,
4964 ) -> impl Iterator<
4965 Item = (
4966 ReplicaId,
4967 bool,
4968 CursorShape,
4969 impl Iterator<Item = &Selection<Anchor>> + '_,
4970 ),
4971 > + '_ {
4972 self.remote_selections
4973 .iter()
4974 .filter(move |(replica_id, set)| {
4975 (include_local || **replica_id != self.text.replica_id())
4976 && !set.selections.is_empty()
4977 })
4978 .map(move |(replica_id, set)| {
4979 let start_ix = match set.selections.binary_search_by(|probe| {
4980 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4981 }) {
4982 Ok(ix) | Err(ix) => ix,
4983 };
4984 let end_ix = match set.selections.binary_search_by(|probe| {
4985 probe.start.cmp(&range.end, self).then(Ordering::Less)
4986 }) {
4987 Ok(ix) | Err(ix) => ix,
4988 };
4989
4990 (
4991 *replica_id,
4992 set.line_mode,
4993 set.cursor_shape,
4994 set.selections[start_ix..end_ix].iter(),
4995 )
4996 })
4997 }
4998
4999 /// Returns if the buffer contains any diagnostics.
5000 pub fn has_diagnostics(&self) -> bool {
5001 !self.diagnostics.is_empty()
5002 }
5003
5004 /// Returns all the diagnostics intersecting the given range.
5005 pub fn diagnostics_in_range<'a, T, O>(
5006 &'a self,
5007 search_range: Range<T>,
5008 reversed: bool,
5009 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5010 where
5011 T: 'a + Clone + ToOffset,
5012 O: 'a + FromAnchor,
5013 {
5014 let mut iterators: Vec<_> = self
5015 .diagnostics
5016 .iter()
5017 .map(|(_, collection)| {
5018 collection
5019 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5020 .peekable()
5021 })
5022 .collect();
5023
5024 std::iter::from_fn(move || {
5025 let (next_ix, _) = iterators
5026 .iter_mut()
5027 .enumerate()
5028 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5029 .min_by(|(_, a), (_, b)| {
5030 let cmp = a
5031 .range
5032 .start
5033 .cmp(&b.range.start, self)
5034 // when range is equal, sort by diagnostic severity
5035 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5036 // and stabilize order with group_id
5037 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5038 if reversed { cmp.reverse() } else { cmp }
5039 })?;
5040 iterators[next_ix]
5041 .next()
5042 .map(
5043 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5044 diagnostic,
5045 range: FromAnchor::from_anchor(&range.start, self)
5046 ..FromAnchor::from_anchor(&range.end, self),
5047 },
5048 )
5049 })
5050 }
5051
5052 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5053 /// should be used instead.
5054 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5055 &self.diagnostics
5056 }
5057
5058 /// Returns all the diagnostic groups associated with the given
5059 /// language server ID. If no language server ID is provided,
5060 /// all diagnostics groups are returned.
5061 pub fn diagnostic_groups(
5062 &self,
5063 language_server_id: Option<LanguageServerId>,
5064 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5065 let mut groups = Vec::new();
5066
5067 if let Some(language_server_id) = language_server_id {
5068 if let Ok(ix) = self
5069 .diagnostics
5070 .binary_search_by_key(&language_server_id, |e| e.0)
5071 {
5072 self.diagnostics[ix]
5073 .1
5074 .groups(language_server_id, &mut groups, self);
5075 }
5076 } else {
5077 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5078 diagnostics.groups(*language_server_id, &mut groups, self);
5079 }
5080 }
5081
5082 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5083 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5084 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5085 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5086 });
5087
5088 groups
5089 }
5090
5091 /// Returns an iterator over the diagnostics for the given group.
5092 pub fn diagnostic_group<O>(
5093 &self,
5094 group_id: usize,
5095 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5096 where
5097 O: FromAnchor + 'static,
5098 {
5099 self.diagnostics
5100 .iter()
5101 .flat_map(move |(_, set)| set.group(group_id, self))
5102 }
5103
5104 /// An integer version number that accounts for all updates besides
5105 /// the buffer's text itself (which is versioned via a version vector).
5106 pub fn non_text_state_update_count(&self) -> usize {
5107 self.non_text_state_update_count
5108 }
5109
5110 /// An integer version that changes when the buffer's syntax changes.
5111 pub fn syntax_update_count(&self) -> usize {
5112 self.syntax.update_count()
5113 }
5114
5115 /// Returns a snapshot of underlying file.
5116 pub fn file(&self) -> Option<&Arc<dyn File>> {
5117 self.file.as_ref()
5118 }
5119
5120 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5121 if let Some(file) = self.file() {
5122 if file.path().file_name().is_none() || include_root {
5123 Some(file.full_path(cx).to_string_lossy().into_owned())
5124 } else {
5125 Some(file.path().display(file.path_style(cx)).to_string())
5126 }
5127 } else {
5128 None
5129 }
5130 }
5131
5132 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5133 let query_str = query.fuzzy_contents;
5134 if query_str.is_some_and(|query| query.is_empty()) {
5135 return BTreeMap::default();
5136 }
5137
5138 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5139 language,
5140 override_id: None,
5141 }));
5142
5143 let mut query_ix = 0;
5144 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5145 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5146
5147 let mut words = BTreeMap::default();
5148 let mut current_word_start_ix = None;
5149 let mut chunk_ix = query.range.start;
5150 for chunk in self.chunks(query.range, false) {
5151 for (i, c) in chunk.text.char_indices() {
5152 let ix = chunk_ix + i;
5153 if classifier.is_word(c) {
5154 if current_word_start_ix.is_none() {
5155 current_word_start_ix = Some(ix);
5156 }
5157
5158 if let Some(query_chars) = &query_chars
5159 && query_ix < query_len
5160 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5161 {
5162 query_ix += 1;
5163 }
5164 continue;
5165 } else if let Some(word_start) = current_word_start_ix.take()
5166 && query_ix == query_len
5167 {
5168 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5169 let mut word_text = self.text_for_range(word_start..ix).peekable();
5170 let first_char = word_text
5171 .peek()
5172 .and_then(|first_chunk| first_chunk.chars().next());
5173 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5174 if !query.skip_digits
5175 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5176 {
5177 words.insert(word_text.collect(), word_range);
5178 }
5179 }
5180 query_ix = 0;
5181 }
5182 chunk_ix += chunk.text.len();
5183 }
5184
5185 words
5186 }
5187}
5188
5189pub struct WordsQuery<'a> {
5190 /// Only returns words with all chars from the fuzzy string in them.
5191 pub fuzzy_contents: Option<&'a str>,
5192 /// Skips words that start with a digit.
5193 pub skip_digits: bool,
5194 /// Buffer offset range, to look for words.
5195 pub range: Range<usize>,
5196}
5197
5198fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5199 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5200}
5201
5202fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5203 let mut result = IndentSize::spaces(0);
5204 for c in text {
5205 let kind = match c {
5206 ' ' => IndentKind::Space,
5207 '\t' => IndentKind::Tab,
5208 _ => break,
5209 };
5210 if result.len == 0 {
5211 result.kind = kind;
5212 }
5213 result.len += 1;
5214 }
5215 result
5216}
5217
5218impl Clone for BufferSnapshot {
5219 fn clone(&self) -> Self {
5220 Self {
5221 text: self.text.clone(),
5222 syntax: self.syntax.clone(),
5223 file: self.file.clone(),
5224 remote_selections: self.remote_selections.clone(),
5225 diagnostics: self.diagnostics.clone(),
5226 language: self.language.clone(),
5227 tree_sitter_data: self.tree_sitter_data.clone(),
5228 non_text_state_update_count: self.non_text_state_update_count,
5229 capability: self.capability,
5230 }
5231 }
5232}
5233
5234impl Deref for BufferSnapshot {
5235 type Target = text::BufferSnapshot;
5236
5237 fn deref(&self) -> &Self::Target {
5238 &self.text
5239 }
5240}
5241
5242unsafe impl Send for BufferChunks<'_> {}
5243
5244impl<'a> BufferChunks<'a> {
5245 pub(crate) fn new(
5246 text: &'a Rope,
5247 range: Range<usize>,
5248 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5249 diagnostics: bool,
5250 buffer_snapshot: Option<&'a BufferSnapshot>,
5251 ) -> Self {
5252 let mut highlights = None;
5253 if let Some((captures, highlight_maps)) = syntax {
5254 highlights = Some(BufferChunkHighlights {
5255 captures,
5256 next_capture: None,
5257 stack: Default::default(),
5258 highlight_maps,
5259 })
5260 }
5261
5262 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5263 let chunks = text.chunks_in_range(range.clone());
5264
5265 let mut this = BufferChunks {
5266 range,
5267 buffer_snapshot,
5268 chunks,
5269 diagnostic_endpoints,
5270 error_depth: 0,
5271 warning_depth: 0,
5272 information_depth: 0,
5273 hint_depth: 0,
5274 unnecessary_depth: 0,
5275 underline: true,
5276 highlights,
5277 };
5278 this.initialize_diagnostic_endpoints();
5279 this
5280 }
5281
5282 /// Seeks to the given byte offset in the buffer.
5283 pub fn seek(&mut self, range: Range<usize>) {
5284 let old_range = std::mem::replace(&mut self.range, range.clone());
5285 self.chunks.set_range(self.range.clone());
5286 if let Some(highlights) = self.highlights.as_mut() {
5287 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5288 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5289 highlights
5290 .stack
5291 .retain(|(end_offset, _)| *end_offset > range.start);
5292 if let Some(capture) = &highlights.next_capture
5293 && range.start >= capture.node.start_byte()
5294 {
5295 let next_capture_end = capture.node.end_byte();
5296 if range.start < next_capture_end {
5297 highlights.stack.push((
5298 next_capture_end,
5299 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5300 ));
5301 }
5302 highlights.next_capture.take();
5303 }
5304 } else if let Some(snapshot) = self.buffer_snapshot {
5305 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5306 *highlights = BufferChunkHighlights {
5307 captures,
5308 next_capture: None,
5309 stack: Default::default(),
5310 highlight_maps,
5311 };
5312 } else {
5313 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5314 // Seeking such BufferChunks is not supported.
5315 debug_assert!(
5316 false,
5317 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5318 );
5319 }
5320
5321 highlights.captures.set_byte_range(self.range.clone());
5322 self.initialize_diagnostic_endpoints();
5323 }
5324 }
5325
5326 fn initialize_diagnostic_endpoints(&mut self) {
5327 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5328 && let Some(buffer) = self.buffer_snapshot
5329 {
5330 let mut diagnostic_endpoints = Vec::new();
5331 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5332 diagnostic_endpoints.push(DiagnosticEndpoint {
5333 offset: entry.range.start,
5334 is_start: true,
5335 severity: entry.diagnostic.severity,
5336 is_unnecessary: entry.diagnostic.is_unnecessary,
5337 underline: entry.diagnostic.underline,
5338 });
5339 diagnostic_endpoints.push(DiagnosticEndpoint {
5340 offset: entry.range.end,
5341 is_start: false,
5342 severity: entry.diagnostic.severity,
5343 is_unnecessary: entry.diagnostic.is_unnecessary,
5344 underline: entry.diagnostic.underline,
5345 });
5346 }
5347 diagnostic_endpoints
5348 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5349 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5350 self.hint_depth = 0;
5351 self.error_depth = 0;
5352 self.warning_depth = 0;
5353 self.information_depth = 0;
5354 }
5355 }
5356
5357 /// The current byte offset in the buffer.
5358 pub fn offset(&self) -> usize {
5359 self.range.start
5360 }
5361
5362 pub fn range(&self) -> Range<usize> {
5363 self.range.clone()
5364 }
5365
5366 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5367 let depth = match endpoint.severity {
5368 DiagnosticSeverity::ERROR => &mut self.error_depth,
5369 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5370 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5371 DiagnosticSeverity::HINT => &mut self.hint_depth,
5372 _ => return,
5373 };
5374 if endpoint.is_start {
5375 *depth += 1;
5376 } else {
5377 *depth -= 1;
5378 }
5379
5380 if endpoint.is_unnecessary {
5381 if endpoint.is_start {
5382 self.unnecessary_depth += 1;
5383 } else {
5384 self.unnecessary_depth -= 1;
5385 }
5386 }
5387 }
5388
5389 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5390 if self.error_depth > 0 {
5391 Some(DiagnosticSeverity::ERROR)
5392 } else if self.warning_depth > 0 {
5393 Some(DiagnosticSeverity::WARNING)
5394 } else if self.information_depth > 0 {
5395 Some(DiagnosticSeverity::INFORMATION)
5396 } else if self.hint_depth > 0 {
5397 Some(DiagnosticSeverity::HINT)
5398 } else {
5399 None
5400 }
5401 }
5402
5403 fn current_code_is_unnecessary(&self) -> bool {
5404 self.unnecessary_depth > 0
5405 }
5406}
5407
5408impl<'a> Iterator for BufferChunks<'a> {
5409 type Item = Chunk<'a>;
5410
5411 fn next(&mut self) -> Option<Self::Item> {
5412 let mut next_capture_start = usize::MAX;
5413 let mut next_diagnostic_endpoint = usize::MAX;
5414
5415 if let Some(highlights) = self.highlights.as_mut() {
5416 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5417 if *parent_capture_end <= self.range.start {
5418 highlights.stack.pop();
5419 } else {
5420 break;
5421 }
5422 }
5423
5424 if highlights.next_capture.is_none() {
5425 highlights.next_capture = highlights.captures.next();
5426 }
5427
5428 while let Some(capture) = highlights.next_capture.as_ref() {
5429 if self.range.start < capture.node.start_byte() {
5430 next_capture_start = capture.node.start_byte();
5431 break;
5432 } else {
5433 let highlight_id =
5434 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5435 highlights
5436 .stack
5437 .push((capture.node.end_byte(), highlight_id));
5438 highlights.next_capture = highlights.captures.next();
5439 }
5440 }
5441 }
5442
5443 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5444 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5445 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5446 if endpoint.offset <= self.range.start {
5447 self.update_diagnostic_depths(endpoint);
5448 diagnostic_endpoints.next();
5449 self.underline = endpoint.underline;
5450 } else {
5451 next_diagnostic_endpoint = endpoint.offset;
5452 break;
5453 }
5454 }
5455 }
5456 self.diagnostic_endpoints = diagnostic_endpoints;
5457
5458 if let Some(ChunkBitmaps {
5459 text: chunk,
5460 chars: chars_map,
5461 tabs,
5462 }) = self.chunks.peek_with_bitmaps()
5463 {
5464 let chunk_start = self.range.start;
5465 let mut chunk_end = (self.chunks.offset() + chunk.len())
5466 .min(next_capture_start)
5467 .min(next_diagnostic_endpoint);
5468 let mut highlight_id = None;
5469 if let Some(highlights) = self.highlights.as_ref()
5470 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5471 {
5472 chunk_end = chunk_end.min(*parent_capture_end);
5473 highlight_id = Some(*parent_highlight_id);
5474 }
5475 let bit_start = chunk_start - self.chunks.offset();
5476 let bit_end = chunk_end - self.chunks.offset();
5477
5478 let slice = &chunk[bit_start..bit_end];
5479
5480 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5481 let tabs = (tabs >> bit_start) & mask;
5482 let chars = (chars_map >> bit_start) & mask;
5483
5484 self.range.start = chunk_end;
5485 if self.range.start == self.chunks.offset() + chunk.len() {
5486 self.chunks.next().unwrap();
5487 }
5488
5489 Some(Chunk {
5490 text: slice,
5491 syntax_highlight_id: highlight_id,
5492 underline: self.underline,
5493 diagnostic_severity: self.current_diagnostic_severity(),
5494 is_unnecessary: self.current_code_is_unnecessary(),
5495 tabs,
5496 chars,
5497 ..Chunk::default()
5498 })
5499 } else {
5500 None
5501 }
5502 }
5503}
5504
5505impl operation_queue::Operation for Operation {
5506 fn lamport_timestamp(&self) -> clock::Lamport {
5507 match self {
5508 Operation::Buffer(_) => {
5509 unreachable!("buffer operations should never be deferred at this layer")
5510 }
5511 Operation::UpdateDiagnostics {
5512 lamport_timestamp, ..
5513 }
5514 | Operation::UpdateSelections {
5515 lamport_timestamp, ..
5516 }
5517 | Operation::UpdateCompletionTriggers {
5518 lamport_timestamp, ..
5519 }
5520 | Operation::UpdateLineEnding {
5521 lamport_timestamp, ..
5522 } => *lamport_timestamp,
5523 }
5524 }
5525}
5526
5527impl Default for Diagnostic {
5528 fn default() -> Self {
5529 Self {
5530 source: Default::default(),
5531 source_kind: DiagnosticSourceKind::Other,
5532 code: None,
5533 code_description: None,
5534 severity: DiagnosticSeverity::ERROR,
5535 message: Default::default(),
5536 markdown: None,
5537 group_id: 0,
5538 is_primary: false,
5539 is_disk_based: false,
5540 is_unnecessary: false,
5541 underline: true,
5542 data: None,
5543 registration_id: None,
5544 }
5545 }
5546}
5547
5548impl IndentSize {
5549 /// Returns an [`IndentSize`] representing the given spaces.
5550 pub fn spaces(len: u32) -> Self {
5551 Self {
5552 len,
5553 kind: IndentKind::Space,
5554 }
5555 }
5556
5557 /// Returns an [`IndentSize`] representing a tab.
5558 pub fn tab() -> Self {
5559 Self {
5560 len: 1,
5561 kind: IndentKind::Tab,
5562 }
5563 }
5564
5565 /// An iterator over the characters represented by this [`IndentSize`].
5566 pub fn chars(&self) -> impl Iterator<Item = char> {
5567 iter::repeat(self.char()).take(self.len as usize)
5568 }
5569
5570 /// The character representation of this [`IndentSize`].
5571 pub fn char(&self) -> char {
5572 match self.kind {
5573 IndentKind::Space => ' ',
5574 IndentKind::Tab => '\t',
5575 }
5576 }
5577
5578 /// Consumes the current [`IndentSize`] and returns a new one that has
5579 /// been shrunk or enlarged by the given size along the given direction.
5580 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5581 match direction {
5582 Ordering::Less => {
5583 if self.kind == size.kind && self.len >= size.len {
5584 self.len -= size.len;
5585 }
5586 }
5587 Ordering::Equal => {}
5588 Ordering::Greater => {
5589 if self.len == 0 {
5590 self = size;
5591 } else if self.kind == size.kind {
5592 self.len += size.len;
5593 }
5594 }
5595 }
5596 self
5597 }
5598
5599 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5600 match self.kind {
5601 IndentKind::Space => self.len as usize,
5602 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5603 }
5604 }
5605}
5606
5607#[cfg(any(test, feature = "test-support"))]
5608pub struct TestFile {
5609 pub path: Arc<RelPath>,
5610 pub root_name: String,
5611 pub local_root: Option<PathBuf>,
5612}
5613
5614#[cfg(any(test, feature = "test-support"))]
5615impl File for TestFile {
5616 fn path(&self) -> &Arc<RelPath> {
5617 &self.path
5618 }
5619
5620 fn full_path(&self, _: &gpui::App) -> PathBuf {
5621 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5622 }
5623
5624 fn as_local(&self) -> Option<&dyn LocalFile> {
5625 if self.local_root.is_some() {
5626 Some(self)
5627 } else {
5628 None
5629 }
5630 }
5631
5632 fn disk_state(&self) -> DiskState {
5633 unimplemented!()
5634 }
5635
5636 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5637 self.path().file_name().unwrap_or(self.root_name.as_ref())
5638 }
5639
5640 fn worktree_id(&self, _: &App) -> WorktreeId {
5641 WorktreeId::from_usize(0)
5642 }
5643
5644 fn to_proto(&self, _: &App) -> rpc::proto::File {
5645 unimplemented!()
5646 }
5647
5648 fn is_private(&self) -> bool {
5649 false
5650 }
5651
5652 fn path_style(&self, _cx: &App) -> PathStyle {
5653 PathStyle::local()
5654 }
5655}
5656
5657#[cfg(any(test, feature = "test-support"))]
5658impl LocalFile for TestFile {
5659 fn abs_path(&self, _cx: &App) -> PathBuf {
5660 PathBuf::from(self.local_root.as_ref().unwrap())
5661 .join(&self.root_name)
5662 .join(self.path.as_std_path())
5663 }
5664
5665 fn load(&self, _cx: &App) -> Task<Result<String>> {
5666 unimplemented!()
5667 }
5668
5669 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5670 unimplemented!()
5671 }
5672}
5673
5674pub(crate) fn contiguous_ranges(
5675 values: impl Iterator<Item = u32>,
5676 max_len: usize,
5677) -> impl Iterator<Item = Range<u32>> {
5678 let mut values = values;
5679 let mut current_range: Option<Range<u32>> = None;
5680 std::iter::from_fn(move || {
5681 loop {
5682 if let Some(value) = values.next() {
5683 if let Some(range) = &mut current_range
5684 && value == range.end
5685 && range.len() < max_len
5686 {
5687 range.end += 1;
5688 continue;
5689 }
5690
5691 let prev_range = current_range.clone();
5692 current_range = Some(value..(value + 1));
5693 if prev_range.is_some() {
5694 return prev_range;
5695 }
5696 } else {
5697 return current_range.take();
5698 }
5699 }
5700 })
5701}
5702
5703#[derive(Default, Debug)]
5704pub struct CharClassifier {
5705 scope: Option<LanguageScope>,
5706 scope_context: Option<CharScopeContext>,
5707 ignore_punctuation: bool,
5708}
5709
5710impl CharClassifier {
5711 pub fn new(scope: Option<LanguageScope>) -> Self {
5712 Self {
5713 scope,
5714 scope_context: None,
5715 ignore_punctuation: false,
5716 }
5717 }
5718
5719 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5720 Self {
5721 scope_context,
5722 ..self
5723 }
5724 }
5725
5726 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5727 Self {
5728 ignore_punctuation,
5729 ..self
5730 }
5731 }
5732
5733 pub fn is_whitespace(&self, c: char) -> bool {
5734 self.kind(c) == CharKind::Whitespace
5735 }
5736
5737 pub fn is_word(&self, c: char) -> bool {
5738 self.kind(c) == CharKind::Word
5739 }
5740
5741 pub fn is_punctuation(&self, c: char) -> bool {
5742 self.kind(c) == CharKind::Punctuation
5743 }
5744
5745 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5746 if c.is_alphanumeric() || c == '_' {
5747 return CharKind::Word;
5748 }
5749
5750 if let Some(scope) = &self.scope {
5751 let characters = match self.scope_context {
5752 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5753 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5754 None => scope.word_characters(),
5755 };
5756 if let Some(characters) = characters
5757 && characters.contains(&c)
5758 {
5759 return CharKind::Word;
5760 }
5761 }
5762
5763 if c.is_whitespace() {
5764 return CharKind::Whitespace;
5765 }
5766
5767 if ignore_punctuation {
5768 CharKind::Word
5769 } else {
5770 CharKind::Punctuation
5771 }
5772 }
5773
5774 pub fn kind(&self, c: char) -> CharKind {
5775 self.kind_with(c, self.ignore_punctuation)
5776 }
5777}
5778
5779/// Find all of the ranges of whitespace that occur at the ends of lines
5780/// in the given rope.
5781///
5782/// This could also be done with a regex search, but this implementation
5783/// avoids copying text.
5784pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5785 let mut ranges = Vec::new();
5786
5787 let mut offset = 0;
5788 let mut prev_chunk_trailing_whitespace_range = 0..0;
5789 for chunk in rope.chunks() {
5790 let mut prev_line_trailing_whitespace_range = 0..0;
5791 for (i, line) in chunk.split('\n').enumerate() {
5792 let line_end_offset = offset + line.len();
5793 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5794 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5795
5796 if i == 0 && trimmed_line_len == 0 {
5797 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5798 }
5799 if !prev_line_trailing_whitespace_range.is_empty() {
5800 ranges.push(prev_line_trailing_whitespace_range);
5801 }
5802
5803 offset = line_end_offset + 1;
5804 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5805 }
5806
5807 offset -= 1;
5808 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5809 }
5810
5811 if !prev_chunk_trailing_whitespace_range.is_empty() {
5812 ranges.push(prev_chunk_trailing_whitespace_range);
5813 }
5814
5815 ranges
5816}