1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a mutable replica, but toggled to be only readable.
89 Read,
90 /// The buffer is a read-only replica.
91 ReadOnly,
92}
93
94impl Capability {
95 /// Returns `true` if the capability is `ReadWrite`.
96 pub fn editable(self) -> bool {
97 matches!(self, Capability::ReadWrite)
98 }
99}
100
101pub type BufferRow = u32;
102
103/// An in-memory representation of a source code file, including its text,
104/// syntax trees, git status, and diagnostics.
105pub struct Buffer {
106 text: TextBuffer,
107 branch_state: Option<BufferBranchState>,
108 /// Filesystem state, `None` when there is no path.
109 file: Option<Arc<dyn File>>,
110 /// The mtime of the file when this buffer was last loaded from
111 /// or saved to disk.
112 saved_mtime: Option<MTime>,
113 /// The version vector when this buffer was last loaded from
114 /// or saved to disk.
115 saved_version: clock::Global,
116 preview_version: clock::Global,
117 transaction_depth: usize,
118 was_dirty_before_starting_transaction: Option<bool>,
119 reload_task: Option<Task<Result<()>>>,
120 language: Option<Arc<Language>>,
121 autoindent_requests: Vec<Arc<AutoindentRequest>>,
122 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
123 pending_autoindent: Option<Task<()>>,
124 sync_parse_timeout: Option<Duration>,
125 syntax_map: Mutex<SyntaxMap>,
126 reparse: Option<Task<()>>,
127 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
128 non_text_state_update_count: usize,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 diagnostics_timestamp: clock::Lamport,
132 completion_triggers: BTreeSet<String>,
133 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
134 completion_triggers_timestamp: clock::Lamport,
135 deferred_ops: OperationQueue<Operation>,
136 capability: Capability,
137 has_conflict: bool,
138 /// Memoize calls to has_changes_since(saved_version).
139 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
140 has_unsaved_edits: Cell<(clock::Global, bool)>,
141 change_bits: Vec<rc::Weak<Cell<bool>>>,
142 _subscriptions: Vec<gpui::Subscription>,
143 tree_sitter_data: Arc<TreeSitterData>,
144 encoding: &'static Encoding,
145 has_bom: bool,
146}
147
148#[derive(Debug)]
149pub struct TreeSitterData {
150 chunks: RowChunks,
151 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
152}
153
154const MAX_ROWS_IN_A_CHUNK: u32 = 50;
155
156impl TreeSitterData {
157 fn clear(&mut self, snapshot: text::BufferSnapshot) {
158 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
159 self.brackets_by_chunks.get_mut().clear();
160 self.brackets_by_chunks
161 .get_mut()
162 .resize(self.chunks.len(), None);
163 }
164
165 fn new(snapshot: text::BufferSnapshot) -> Self {
166 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
167 Self {
168 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
169 chunks,
170 }
171 }
172
173 fn version(&self) -> &clock::Global {
174 self.chunks.version()
175 }
176}
177
178#[derive(Copy, Clone, Debug, PartialEq, Eq)]
179pub enum ParseStatus {
180 Idle,
181 Parsing,
182}
183
184struct BufferBranchState {
185 base_buffer: Entity<Buffer>,
186 merged_operations: Vec<Lamport>,
187}
188
189/// An immutable, cheaply cloneable representation of a fixed
190/// state of a buffer.
191pub struct BufferSnapshot {
192 pub text: text::BufferSnapshot,
193 pub syntax: SyntaxSnapshot,
194 file: Option<Arc<dyn File>>,
195 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
196 remote_selections: TreeMap<ReplicaId, SelectionSet>,
197 language: Option<Arc<Language>>,
198 non_text_state_update_count: usize,
199 tree_sitter_data: Arc<TreeSitterData>,
200 pub capability: Capability,
201}
202
203/// The kind and amount of indentation in a particular line. For now,
204/// assumes that indentation is all the same character.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub struct IndentSize {
207 /// The number of bytes that comprise the indentation.
208 pub len: u32,
209 /// The kind of whitespace used for indentation.
210 pub kind: IndentKind,
211}
212
213/// A whitespace character that's used for indentation.
214#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
215pub enum IndentKind {
216 /// An ASCII space character.
217 #[default]
218 Space,
219 /// An ASCII tab character.
220 Tab,
221}
222
223/// The shape of a selection cursor.
224#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
225pub enum CursorShape {
226 /// A vertical bar
227 #[default]
228 Bar,
229 /// A block that surrounds the following character
230 Block,
231 /// An underline that runs along the following character
232 Underline,
233 /// A box drawn around the following character
234 Hollow,
235}
236
237impl From<settings::CursorShape> for CursorShape {
238 fn from(shape: settings::CursorShape) -> Self {
239 match shape {
240 settings::CursorShape::Bar => CursorShape::Bar,
241 settings::CursorShape::Block => CursorShape::Block,
242 settings::CursorShape::Underline => CursorShape::Underline,
243 settings::CursorShape::Hollow => CursorShape::Hollow,
244 }
245 }
246}
247
248#[derive(Clone, Debug)]
249struct SelectionSet {
250 line_mode: bool,
251 cursor_shape: CursorShape,
252 selections: Arc<[Selection<Anchor>]>,
253 lamport_timestamp: clock::Lamport,
254}
255
256/// A diagnostic associated with a certain range of a buffer.
257#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
258pub struct Diagnostic {
259 /// The name of the service that produced this diagnostic.
260 pub source: Option<String>,
261 /// The ID provided by the dynamic registration that produced this diagnostic.
262 pub registration_id: Option<SharedString>,
263 /// A machine-readable code that identifies this diagnostic.
264 pub code: Option<NumberOrString>,
265 pub code_description: Option<lsp::Uri>,
266 /// Whether this diagnostic is a hint, warning, or error.
267 pub severity: DiagnosticSeverity,
268 /// The human-readable message associated with this diagnostic.
269 pub message: String,
270 /// The human-readable message (in markdown format)
271 pub markdown: Option<String>,
272 /// An id that identifies the group to which this diagnostic belongs.
273 ///
274 /// When a language server produces a diagnostic with
275 /// one or more associated diagnostics, those diagnostics are all
276 /// assigned a single group ID.
277 pub group_id: usize,
278 /// Whether this diagnostic is the primary diagnostic for its group.
279 ///
280 /// In a given group, the primary diagnostic is the top-level diagnostic
281 /// returned by the language server. The non-primary diagnostics are the
282 /// associated diagnostics.
283 pub is_primary: bool,
284 /// Whether this diagnostic is considered to originate from an analysis of
285 /// files on disk, as opposed to any unsaved buffer contents. This is a
286 /// property of a given diagnostic source, and is configured for a given
287 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
288 /// for the language server.
289 pub is_disk_based: bool,
290 /// Whether this diagnostic marks unnecessary code.
291 pub is_unnecessary: bool,
292 /// Quick separation of diagnostics groups based by their source.
293 pub source_kind: DiagnosticSourceKind,
294 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
295 pub data: Option<Value>,
296 /// Whether to underline the corresponding text range in the editor.
297 pub underline: bool,
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
301pub enum DiagnosticSourceKind {
302 Pulled,
303 Pushed,
304 Other,
305}
306
307/// An operation used to synchronize this buffer with its other replicas.
308#[derive(Clone, Debug, PartialEq)]
309pub enum Operation {
310 /// A text operation.
311 Buffer(text::Operation),
312
313 /// An update to the buffer's diagnostics.
314 UpdateDiagnostics {
315 /// The id of the language server that produced the new diagnostics.
316 server_id: LanguageServerId,
317 /// The diagnostics.
318 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 },
322
323 /// An update to the most recent selections in this buffer.
324 UpdateSelections {
325 /// The selections.
326 selections: Arc<[Selection<Anchor>]>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// Whether the selections are in 'line mode'.
330 line_mode: bool,
331 /// The [`CursorShape`] associated with these selections.
332 cursor_shape: CursorShape,
333 },
334
335 /// An update to the characters that should trigger autocompletion
336 /// for this buffer.
337 UpdateCompletionTriggers {
338 /// The characters that trigger autocompletion.
339 triggers: Vec<String>,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 /// The language server ID.
343 server_id: LanguageServerId,
344 },
345
346 /// An update to the line ending type of this buffer.
347 UpdateLineEnding {
348 /// The line ending type.
349 line_ending: LineEnding,
350 /// The buffer's lamport timestamp.
351 lamport_timestamp: clock::Lamport,
352 },
353}
354
355/// An event that occurs in a buffer.
356#[derive(Clone, Debug, PartialEq)]
357pub enum BufferEvent {
358 /// The buffer was changed in a way that must be
359 /// propagated to its other replicas.
360 Operation {
361 operation: Operation,
362 is_local: bool,
363 },
364 /// The buffer was edited.
365 Edited,
366 /// The buffer's `dirty` bit changed.
367 DirtyChanged,
368 /// The buffer was saved.
369 Saved,
370 /// The buffer's file was changed on disk.
371 FileHandleChanged,
372 /// The buffer was reloaded.
373 Reloaded,
374 /// The buffer is in need of a reload
375 ReloadNeeded,
376 /// The buffer's language was changed.
377 /// The boolean indicates whether this buffer did not have a language before, but does now.
378 LanguageChanged(bool),
379 /// The buffer's syntax trees were updated.
380 Reparsed,
381 /// The buffer's diagnostics were updated.
382 DiagnosticsUpdated,
383 /// The buffer gained or lost editing capabilities.
384 CapabilityChanged,
385}
386
387/// The file associated with a buffer.
388pub trait File: Send + Sync + Any {
389 /// Returns the [`LocalFile`] associated with this file, if the
390 /// file is local.
391 fn as_local(&self) -> Option<&dyn LocalFile>;
392
393 /// Returns whether this file is local.
394 fn is_local(&self) -> bool {
395 self.as_local().is_some()
396 }
397
398 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
399 /// only available in some states, such as modification time.
400 fn disk_state(&self) -> DiskState;
401
402 /// Returns the path of this file relative to the worktree's root directory.
403 fn path(&self) -> &Arc<RelPath>;
404
405 /// Returns the path of this file relative to the worktree's parent directory (this means it
406 /// includes the name of the worktree's root folder).
407 fn full_path(&self, cx: &App) -> PathBuf;
408
409 /// Returns the path style of this file.
410 fn path_style(&self, cx: &App) -> PathStyle;
411
412 /// Returns the last component of this handle's absolute path. If this handle refers to the root
413 /// of its worktree, then this method will return the name of the worktree itself.
414 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
415
416 /// Returns the id of the worktree to which this file belongs.
417 ///
418 /// This is needed for looking up project-specific settings.
419 fn worktree_id(&self, cx: &App) -> WorktreeId;
420
421 /// Converts this file into a protobuf message.
422 fn to_proto(&self, cx: &App) -> rpc::proto::File;
423
424 /// Return whether Zed considers this to be a private file.
425 fn is_private(&self) -> bool;
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// The row of the edit start in the buffer before the edit was applied.
524 /// This is stored here because the anchor in range is created after
525 /// the edit, so it cannot be used with the before_edit snapshot.
526 old_row: Option<u32>,
527 indent_size: IndentSize,
528 original_indent_column: Option<u32>,
529}
530
531#[derive(Debug)]
532struct IndentSuggestion {
533 basis_row: u32,
534 delta: Ordering,
535 within_error: bool,
536}
537
538struct BufferChunkHighlights<'a> {
539 captures: SyntaxMapCaptures<'a>,
540 next_capture: Option<SyntaxMapCapture<'a>>,
541 stack: Vec<(usize, HighlightId)>,
542 highlight_maps: Vec<HighlightMap>,
543}
544
545/// An iterator that yields chunks of a buffer's text, along with their
546/// syntax highlights and diagnostic status.
547pub struct BufferChunks<'a> {
548 buffer_snapshot: Option<&'a BufferSnapshot>,
549 range: Range<usize>,
550 chunks: text::Chunks<'a>,
551 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
552 error_depth: usize,
553 warning_depth: usize,
554 information_depth: usize,
555 hint_depth: usize,
556 unnecessary_depth: usize,
557 underline: bool,
558 highlights: Option<BufferChunkHighlights<'a>>,
559}
560
561/// A chunk of a buffer's text, along with its syntax highlight and
562/// diagnostic status.
563#[derive(Clone, Debug, Default)]
564pub struct Chunk<'a> {
565 /// The text of the chunk.
566 pub text: &'a str,
567 /// The syntax highlighting style of the chunk.
568 pub syntax_highlight_id: Option<HighlightId>,
569 /// The highlight style that has been applied to this chunk in
570 /// the editor.
571 pub highlight_style: Option<HighlightStyle>,
572 /// The severity of diagnostic associated with this chunk, if any.
573 pub diagnostic_severity: Option<DiagnosticSeverity>,
574 /// A bitset of which characters are tabs in this string.
575 pub tabs: u128,
576 /// Bitmap of character indices in this chunk
577 pub chars: u128,
578 /// Whether this chunk of text is marked as unnecessary.
579 pub is_unnecessary: bool,
580 /// Whether this chunk of text was originally a tab character.
581 pub is_tab: bool,
582 /// Whether this chunk of text was originally an inlay.
583 pub is_inlay: bool,
584 /// Whether to underline the corresponding text range in the editor.
585 pub underline: bool,
586}
587
588/// A set of edits to a given version of a buffer, computed asynchronously.
589#[derive(Debug)]
590pub struct Diff {
591 pub base_version: clock::Global,
592 pub line_ending: LineEnding,
593 pub edits: Vec<(Range<usize>, Arc<str>)>,
594}
595
596#[derive(Debug, Clone, Copy)]
597pub(crate) struct DiagnosticEndpoint {
598 offset: usize,
599 is_start: bool,
600 underline: bool,
601 severity: DiagnosticSeverity,
602 is_unnecessary: bool,
603}
604
605/// A class of characters, used for characterizing a run of text.
606#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
607pub enum CharKind {
608 /// Whitespace.
609 Whitespace,
610 /// Punctuation.
611 Punctuation,
612 /// Word.
613 Word,
614}
615
616/// Context for character classification within a specific scope.
617#[derive(Copy, Clone, Eq, PartialEq, Debug)]
618pub enum CharScopeContext {
619 /// Character classification for completion queries.
620 ///
621 /// This context treats certain characters as word constituents that would
622 /// normally be considered punctuation, such as '-' in Tailwind classes
623 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
624 Completion,
625 /// Character classification for linked edits.
626 ///
627 /// This context handles characters that should be treated as part of
628 /// identifiers during linked editing operations, such as '.' in JSX
629 /// component names like `<Animated.View>`.
630 LinkedEdit,
631}
632
633/// A runnable is a set of data about a region that could be resolved into a task
634pub struct Runnable {
635 pub tags: SmallVec<[RunnableTag; 1]>,
636 pub language: Arc<Language>,
637 pub buffer: BufferId,
638}
639
640#[derive(Default, Clone, Debug)]
641pub struct HighlightedText {
642 pub text: SharedString,
643 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
644}
645
646#[derive(Default, Debug)]
647struct HighlightedTextBuilder {
648 pub text: String,
649 highlights: Vec<(Range<usize>, HighlightStyle)>,
650}
651
652impl HighlightedText {
653 pub fn from_buffer_range<T: ToOffset>(
654 range: Range<T>,
655 snapshot: &text::BufferSnapshot,
656 syntax_snapshot: &SyntaxSnapshot,
657 override_style: Option<HighlightStyle>,
658 syntax_theme: &SyntaxTheme,
659 ) -> Self {
660 let mut highlighted_text = HighlightedTextBuilder::default();
661 highlighted_text.add_text_from_buffer_range(
662 range,
663 snapshot,
664 syntax_snapshot,
665 override_style,
666 syntax_theme,
667 );
668 highlighted_text.build()
669 }
670
671 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
672 gpui::StyledText::new(self.text.clone())
673 .with_default_highlights(default_style, self.highlights.iter().cloned())
674 }
675
676 /// Returns the first line without leading whitespace unless highlighted
677 /// and a boolean indicating if there are more lines after
678 pub fn first_line_preview(self) -> (Self, bool) {
679 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
680 let first_line = &self.text[..newline_ix];
681
682 // Trim leading whitespace, unless an edit starts prior to it.
683 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
684 if let Some((first_highlight_range, _)) = self.highlights.first() {
685 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
686 }
687
688 let preview_text = &first_line[preview_start_ix..];
689 let preview_highlights = self
690 .highlights
691 .into_iter()
692 .skip_while(|(range, _)| range.end <= preview_start_ix)
693 .take_while(|(range, _)| range.start < newline_ix)
694 .filter_map(|(mut range, highlight)| {
695 range.start = range.start.saturating_sub(preview_start_ix);
696 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
697 if range.is_empty() {
698 None
699 } else {
700 Some((range, highlight))
701 }
702 });
703
704 let preview = Self {
705 text: SharedString::new(preview_text),
706 highlights: preview_highlights.collect(),
707 };
708
709 (preview, self.text.len() > newline_ix)
710 }
711}
712
713impl HighlightedTextBuilder {
714 pub fn build(self) -> HighlightedText {
715 HighlightedText {
716 text: self.text.into(),
717 highlights: self.highlights,
718 }
719 }
720
721 pub fn add_text_from_buffer_range<T: ToOffset>(
722 &mut self,
723 range: Range<T>,
724 snapshot: &text::BufferSnapshot,
725 syntax_snapshot: &SyntaxSnapshot,
726 override_style: Option<HighlightStyle>,
727 syntax_theme: &SyntaxTheme,
728 ) {
729 let range = range.to_offset(snapshot);
730 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
731 let start = self.text.len();
732 self.text.push_str(chunk.text);
733 let end = self.text.len();
734
735 if let Some(highlight_style) = chunk
736 .syntax_highlight_id
737 .and_then(|id| id.style(syntax_theme))
738 {
739 let highlight_style = override_style.map_or(highlight_style, |override_style| {
740 highlight_style.highlight(override_style)
741 });
742 self.highlights.push((start..end, highlight_style));
743 } else if let Some(override_style) = override_style {
744 self.highlights.push((start..end, override_style));
745 }
746 }
747 }
748
749 fn highlighted_chunks<'a>(
750 range: Range<usize>,
751 snapshot: &'a text::BufferSnapshot,
752 syntax_snapshot: &'a SyntaxSnapshot,
753 ) -> BufferChunks<'a> {
754 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
755 grammar
756 .highlights_config
757 .as_ref()
758 .map(|config| &config.query)
759 });
760
761 let highlight_maps = captures
762 .grammars()
763 .iter()
764 .map(|grammar| grammar.highlight_map())
765 .collect();
766
767 BufferChunks::new(
768 snapshot.as_rope(),
769 range,
770 Some((captures, highlight_maps)),
771 false,
772 None,
773 )
774 }
775}
776
777#[derive(Clone)]
778pub struct EditPreview {
779 old_snapshot: text::BufferSnapshot,
780 applied_edits_snapshot: text::BufferSnapshot,
781 syntax_snapshot: SyntaxSnapshot,
782}
783
784impl EditPreview {
785 pub fn as_unified_diff(
786 &self,
787 file: Option<&Arc<dyn File>>,
788 edits: &[(Range<Anchor>, impl AsRef<str>)],
789 ) -> Option<String> {
790 let (first, _) = edits.first()?;
791 let (last, _) = edits.last()?;
792
793 let start = first.start.to_point(&self.old_snapshot);
794 let old_end = last.end.to_point(&self.old_snapshot);
795 let new_end = last
796 .end
797 .bias_right(&self.old_snapshot)
798 .to_point(&self.applied_edits_snapshot);
799
800 let start = Point::new(start.row.saturating_sub(3), 0);
801 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
802 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
803
804 let diff_body = unified_diff_with_offsets(
805 &self
806 .old_snapshot
807 .text_for_range(start..old_end)
808 .collect::<String>(),
809 &self
810 .applied_edits_snapshot
811 .text_for_range(start..new_end)
812 .collect::<String>(),
813 start.row,
814 start.row,
815 );
816
817 let path = file.map(|f| f.path().as_unix_str());
818 let header = match path {
819 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
820 None => String::new(),
821 };
822
823 Some(format!("{}{}", header, diff_body))
824 }
825
826 pub fn highlight_edits(
827 &self,
828 current_snapshot: &BufferSnapshot,
829 edits: &[(Range<Anchor>, impl AsRef<str>)],
830 include_deletions: bool,
831 cx: &App,
832 ) -> HighlightedText {
833 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
834 return HighlightedText::default();
835 };
836
837 let mut highlighted_text = HighlightedTextBuilder::default();
838
839 let visible_range_in_preview_snapshot =
840 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
841 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
842
843 let insertion_highlight_style = HighlightStyle {
844 background_color: Some(cx.theme().status().created_background),
845 ..Default::default()
846 };
847 let deletion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().deleted_background),
849 ..Default::default()
850 };
851 let syntax_theme = cx.theme().syntax();
852
853 for (range, edit_text) in edits {
854 let edit_new_end_in_preview_snapshot = range
855 .end
856 .bias_right(&self.old_snapshot)
857 .to_offset(&self.applied_edits_snapshot);
858 let edit_start_in_preview_snapshot =
859 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
860
861 let unchanged_range_in_preview_snapshot =
862 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
863 if !unchanged_range_in_preview_snapshot.is_empty() {
864 highlighted_text.add_text_from_buffer_range(
865 unchanged_range_in_preview_snapshot,
866 &self.applied_edits_snapshot,
867 &self.syntax_snapshot,
868 None,
869 syntax_theme,
870 );
871 }
872
873 let range_in_current_snapshot = range.to_offset(current_snapshot);
874 if include_deletions && !range_in_current_snapshot.is_empty() {
875 highlighted_text.add_text_from_buffer_range(
876 range_in_current_snapshot,
877 ¤t_snapshot.text,
878 ¤t_snapshot.syntax,
879 Some(deletion_highlight_style),
880 syntax_theme,
881 );
882 }
883
884 if !edit_text.as_ref().is_empty() {
885 highlighted_text.add_text_from_buffer_range(
886 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
887 &self.applied_edits_snapshot,
888 &self.syntax_snapshot,
889 Some(insertion_highlight_style),
890 syntax_theme,
891 );
892 }
893
894 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
895 }
896
897 highlighted_text.add_text_from_buffer_range(
898 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
899 &self.applied_edits_snapshot,
900 &self.syntax_snapshot,
901 None,
902 syntax_theme,
903 );
904
905 highlighted_text.build()
906 }
907
908 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
909 cx.new(|cx| {
910 let mut buffer = Buffer::local_normalized(
911 self.applied_edits_snapshot.as_rope().clone(),
912 self.applied_edits_snapshot.line_ending(),
913 cx,
914 );
915 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
916 buffer
917 })
918 }
919
920 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
921 let (first, _) = edits.first()?;
922 let (last, _) = edits.last()?;
923
924 let start = first
925 .start
926 .bias_left(&self.old_snapshot)
927 .to_point(&self.applied_edits_snapshot);
928 let end = last
929 .end
930 .bias_right(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932
933 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
934 let range = Point::new(start.row, 0)
935 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
936
937 Some(range)
938 }
939}
940
941#[derive(Clone, Debug, PartialEq, Eq)]
942pub struct BracketMatch<T> {
943 pub open_range: Range<T>,
944 pub close_range: Range<T>,
945 pub newline_only: bool,
946 pub syntax_layer_depth: usize,
947 pub color_index: Option<usize>,
948}
949
950impl<T> BracketMatch<T> {
951 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
952 (self.open_range, self.close_range)
953 }
954}
955
956impl Buffer {
957 /// Create a new buffer with the given base text.
958 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
959 Self::build(
960 TextBuffer::new(
961 ReplicaId::LOCAL,
962 cx.entity_id().as_non_zero_u64().into(),
963 base_text.into(),
964 ),
965 None,
966 Capability::ReadWrite,
967 )
968 }
969
970 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
971 pub fn local_normalized(
972 base_text_normalized: Rope,
973 line_ending: LineEnding,
974 cx: &Context<Self>,
975 ) -> Self {
976 Self::build(
977 TextBuffer::new_normalized(
978 ReplicaId::LOCAL,
979 cx.entity_id().as_non_zero_u64().into(),
980 line_ending,
981 base_text_normalized,
982 ),
983 None,
984 Capability::ReadWrite,
985 )
986 }
987
988 /// Create a new buffer that is a replica of a remote buffer.
989 pub fn remote(
990 remote_id: BufferId,
991 replica_id: ReplicaId,
992 capability: Capability,
993 base_text: impl Into<String>,
994 ) -> Self {
995 Self::build(
996 TextBuffer::new(replica_id, remote_id, base_text.into()),
997 None,
998 capability,
999 )
1000 }
1001
1002 /// Create a new buffer that is a replica of a remote buffer, populating its
1003 /// state from the given protobuf message.
1004 pub fn from_proto(
1005 replica_id: ReplicaId,
1006 capability: Capability,
1007 message: proto::BufferState,
1008 file: Option<Arc<dyn File>>,
1009 ) -> Result<Self> {
1010 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1011 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1012 let mut this = Self::build(buffer, file, capability);
1013 this.text.set_line_ending(proto::deserialize_line_ending(
1014 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1015 ));
1016 this.saved_version = proto::deserialize_version(&message.saved_version);
1017 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1018 Ok(this)
1019 }
1020
1021 /// Serialize the buffer's state to a protobuf message.
1022 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1023 proto::BufferState {
1024 id: self.remote_id().into(),
1025 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1026 base_text: self.base_text().to_string(),
1027 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1028 saved_version: proto::serialize_version(&self.saved_version),
1029 saved_mtime: self.saved_mtime.map(|time| time.into()),
1030 }
1031 }
1032
1033 /// Serialize as protobufs all of the changes to the buffer since the given version.
1034 pub fn serialize_ops(
1035 &self,
1036 since: Option<clock::Global>,
1037 cx: &App,
1038 ) -> Task<Vec<proto::Operation>> {
1039 let mut operations = Vec::new();
1040 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1041
1042 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1043 proto::serialize_operation(&Operation::UpdateSelections {
1044 selections: set.selections.clone(),
1045 lamport_timestamp: set.lamport_timestamp,
1046 line_mode: set.line_mode,
1047 cursor_shape: set.cursor_shape,
1048 })
1049 }));
1050
1051 for (server_id, diagnostics) in &self.diagnostics {
1052 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1053 lamport_timestamp: self.diagnostics_timestamp,
1054 server_id: *server_id,
1055 diagnostics: diagnostics.iter().cloned().collect(),
1056 }));
1057 }
1058
1059 for (server_id, completions) in &self.completion_triggers_per_language_server {
1060 operations.push(proto::serialize_operation(
1061 &Operation::UpdateCompletionTriggers {
1062 triggers: completions.iter().cloned().collect(),
1063 lamport_timestamp: self.completion_triggers_timestamp,
1064 server_id: *server_id,
1065 },
1066 ));
1067 }
1068
1069 let text_operations = self.text.operations().clone();
1070 cx.background_spawn(async move {
1071 let since = since.unwrap_or_default();
1072 operations.extend(
1073 text_operations
1074 .iter()
1075 .filter(|(_, op)| !since.observed(op.timestamp()))
1076 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1077 );
1078 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1079 operations
1080 })
1081 }
1082
1083 /// Assign a language to the buffer, returning the buffer.
1084 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1085 self.set_language_async(Some(language), cx);
1086 self
1087 }
1088
1089 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1090 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1091 self.set_language(Some(language), cx);
1092 self
1093 }
1094
1095 /// Returns the [`Capability`] of this buffer.
1096 pub fn capability(&self) -> Capability {
1097 self.capability
1098 }
1099
1100 /// Whether this buffer can only be read.
1101 pub fn read_only(&self) -> bool {
1102 !self.capability.editable()
1103 }
1104
1105 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1106 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1107 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1108 let snapshot = buffer.snapshot();
1109 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1110 let tree_sitter_data = TreeSitterData::new(snapshot);
1111 Self {
1112 saved_mtime,
1113 tree_sitter_data: Arc::new(tree_sitter_data),
1114 saved_version: buffer.version(),
1115 preview_version: buffer.version(),
1116 reload_task: None,
1117 transaction_depth: 0,
1118 was_dirty_before_starting_transaction: None,
1119 has_unsaved_edits: Cell::new((buffer.version(), false)),
1120 text: buffer,
1121 branch_state: None,
1122 file,
1123 capability,
1124 syntax_map,
1125 reparse: None,
1126 non_text_state_update_count: 0,
1127 sync_parse_timeout: Some(Duration::from_millis(1)),
1128 parse_status: watch::channel(ParseStatus::Idle),
1129 autoindent_requests: Default::default(),
1130 wait_for_autoindent_txs: Default::default(),
1131 pending_autoindent: Default::default(),
1132 language: None,
1133 remote_selections: Default::default(),
1134 diagnostics: Default::default(),
1135 diagnostics_timestamp: Lamport::MIN,
1136 completion_triggers: Default::default(),
1137 completion_triggers_per_language_server: Default::default(),
1138 completion_triggers_timestamp: Lamport::MIN,
1139 deferred_ops: OperationQueue::new(),
1140 has_conflict: false,
1141 change_bits: Default::default(),
1142 _subscriptions: Vec::new(),
1143 encoding: encoding_rs::UTF_8,
1144 has_bom: false,
1145 }
1146 }
1147
1148 pub fn build_snapshot(
1149 text: Rope,
1150 language: Option<Arc<Language>>,
1151 language_registry: Option<Arc<LanguageRegistry>>,
1152 cx: &mut App,
1153 ) -> impl Future<Output = BufferSnapshot> + use<> {
1154 let entity_id = cx.reserve_entity::<Self>().entity_id();
1155 let buffer_id = entity_id.as_non_zero_u64().into();
1156 async move {
1157 let text =
1158 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1159 .snapshot();
1160 let mut syntax = SyntaxMap::new(&text).snapshot();
1161 if let Some(language) = language.clone() {
1162 let language_registry = language_registry.clone();
1163 syntax.reparse(&text, language_registry, language);
1164 }
1165 let tree_sitter_data = TreeSitterData::new(text.clone());
1166 BufferSnapshot {
1167 text,
1168 syntax,
1169 file: None,
1170 diagnostics: Default::default(),
1171 remote_selections: Default::default(),
1172 tree_sitter_data: Arc::new(tree_sitter_data),
1173 language,
1174 non_text_state_update_count: 0,
1175 capability: Capability::ReadOnly,
1176 }
1177 }
1178 }
1179
1180 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1181 let entity_id = cx.reserve_entity::<Self>().entity_id();
1182 let buffer_id = entity_id.as_non_zero_u64().into();
1183 let text = TextBuffer::new_normalized(
1184 ReplicaId::LOCAL,
1185 buffer_id,
1186 Default::default(),
1187 Rope::new(),
1188 )
1189 .snapshot();
1190 let syntax = SyntaxMap::new(&text).snapshot();
1191 let tree_sitter_data = TreeSitterData::new(text.clone());
1192 BufferSnapshot {
1193 text,
1194 syntax,
1195 tree_sitter_data: Arc::new(tree_sitter_data),
1196 file: None,
1197 diagnostics: Default::default(),
1198 remote_selections: Default::default(),
1199 language: None,
1200 non_text_state_update_count: 0,
1201 capability: Capability::ReadOnly,
1202 }
1203 }
1204
1205 #[cfg(any(test, feature = "test-support"))]
1206 pub fn build_snapshot_sync(
1207 text: Rope,
1208 language: Option<Arc<Language>>,
1209 language_registry: Option<Arc<LanguageRegistry>>,
1210 cx: &mut App,
1211 ) -> BufferSnapshot {
1212 let entity_id = cx.reserve_entity::<Self>().entity_id();
1213 let buffer_id = entity_id.as_non_zero_u64().into();
1214 let text =
1215 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1216 .snapshot();
1217 let mut syntax = SyntaxMap::new(&text).snapshot();
1218 if let Some(language) = language.clone() {
1219 syntax.reparse(&text, language_registry, language);
1220 }
1221 let tree_sitter_data = TreeSitterData::new(text.clone());
1222 BufferSnapshot {
1223 text,
1224 syntax,
1225 tree_sitter_data: Arc::new(tree_sitter_data),
1226 file: None,
1227 diagnostics: Default::default(),
1228 remote_selections: Default::default(),
1229 language,
1230 non_text_state_update_count: 0,
1231 capability: Capability::ReadOnly,
1232 }
1233 }
1234
1235 /// Retrieve a snapshot of the buffer's current state. This is computationally
1236 /// cheap, and allows reading from the buffer on a background thread.
1237 pub fn snapshot(&self) -> BufferSnapshot {
1238 let text = self.text.snapshot();
1239 let mut syntax_map = self.syntax_map.lock();
1240 syntax_map.interpolate(&text);
1241 let syntax = syntax_map.snapshot();
1242
1243 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1244 Arc::new(TreeSitterData::new(text.clone()))
1245 } else {
1246 self.tree_sitter_data.clone()
1247 };
1248
1249 BufferSnapshot {
1250 text,
1251 syntax,
1252 tree_sitter_data,
1253 file: self.file.clone(),
1254 remote_selections: self.remote_selections.clone(),
1255 diagnostics: self.diagnostics.clone(),
1256 language: self.language.clone(),
1257 non_text_state_update_count: self.non_text_state_update_count,
1258 capability: self.capability,
1259 }
1260 }
1261
1262 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1263 let this = cx.entity();
1264 cx.new(|cx| {
1265 let mut branch = Self {
1266 branch_state: Some(BufferBranchState {
1267 base_buffer: this.clone(),
1268 merged_operations: Default::default(),
1269 }),
1270 language: self.language.clone(),
1271 has_conflict: self.has_conflict,
1272 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1273 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1274 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1275 };
1276 if let Some(language_registry) = self.language_registry() {
1277 branch.set_language_registry(language_registry);
1278 }
1279
1280 // Reparse the branch buffer so that we get syntax highlighting immediately.
1281 branch.reparse(cx, true);
1282
1283 branch
1284 })
1285 }
1286
1287 pub fn preview_edits(
1288 &self,
1289 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1290 cx: &App,
1291 ) -> Task<EditPreview> {
1292 let registry = self.language_registry();
1293 let language = self.language().cloned();
1294 let old_snapshot = self.text.snapshot();
1295 let mut branch_buffer = self.text.branch();
1296 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1297 cx.background_spawn(async move {
1298 if !edits.is_empty() {
1299 if let Some(language) = language.clone() {
1300 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1301 }
1302
1303 branch_buffer.edit(edits.iter().cloned());
1304 let snapshot = branch_buffer.snapshot();
1305 syntax_snapshot.interpolate(&snapshot);
1306
1307 if let Some(language) = language {
1308 syntax_snapshot.reparse(&snapshot, registry, language);
1309 }
1310 }
1311 EditPreview {
1312 old_snapshot,
1313 applied_edits_snapshot: branch_buffer.snapshot(),
1314 syntax_snapshot,
1315 }
1316 })
1317 }
1318
1319 /// Applies all of the changes in this buffer that intersect any of the
1320 /// given `ranges` to its base buffer.
1321 ///
1322 /// If `ranges` is empty, then all changes will be applied. This buffer must
1323 /// be a branch buffer to call this method.
1324 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1325 let Some(base_buffer) = self.base_buffer() else {
1326 debug_panic!("not a branch buffer");
1327 return;
1328 };
1329
1330 let mut ranges = if ranges.is_empty() {
1331 &[0..usize::MAX]
1332 } else {
1333 ranges.as_slice()
1334 }
1335 .iter()
1336 .peekable();
1337
1338 let mut edits = Vec::new();
1339 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1340 let mut is_included = false;
1341 while let Some(range) = ranges.peek() {
1342 if range.end < edit.new.start {
1343 ranges.next().unwrap();
1344 } else {
1345 if range.start <= edit.new.end {
1346 is_included = true;
1347 }
1348 break;
1349 }
1350 }
1351
1352 if is_included {
1353 edits.push((
1354 edit.old.clone(),
1355 self.text_for_range(edit.new.clone()).collect::<String>(),
1356 ));
1357 }
1358 }
1359
1360 let operation = base_buffer.update(cx, |base_buffer, cx| {
1361 // cx.emit(BufferEvent::DiffBaseChanged);
1362 base_buffer.edit(edits, None, cx)
1363 });
1364
1365 if let Some(operation) = operation
1366 && let Some(BufferBranchState {
1367 merged_operations, ..
1368 }) = &mut self.branch_state
1369 {
1370 merged_operations.push(operation);
1371 }
1372 }
1373
1374 fn on_base_buffer_event(
1375 &mut self,
1376 _: Entity<Buffer>,
1377 event: &BufferEvent,
1378 cx: &mut Context<Self>,
1379 ) {
1380 let BufferEvent::Operation { operation, .. } = event else {
1381 return;
1382 };
1383 let Some(BufferBranchState {
1384 merged_operations, ..
1385 }) = &mut self.branch_state
1386 else {
1387 return;
1388 };
1389
1390 let mut operation_to_undo = None;
1391 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1392 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1393 {
1394 merged_operations.remove(ix);
1395 operation_to_undo = Some(operation.timestamp);
1396 }
1397
1398 self.apply_ops([operation.clone()], cx);
1399
1400 if let Some(timestamp) = operation_to_undo {
1401 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1402 self.undo_operations(counts, cx);
1403 }
1404 }
1405
1406 #[cfg(test)]
1407 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1408 &self.text
1409 }
1410
1411 /// Retrieve a snapshot of the buffer's raw text, without any
1412 /// language-related state like the syntax tree or diagnostics.
1413 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1414 self.text.snapshot()
1415 }
1416
1417 /// The file associated with the buffer, if any.
1418 pub fn file(&self) -> Option<&Arc<dyn File>> {
1419 self.file.as_ref()
1420 }
1421
1422 /// The version of the buffer that was last saved or reloaded from disk.
1423 pub fn saved_version(&self) -> &clock::Global {
1424 &self.saved_version
1425 }
1426
1427 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1428 pub fn saved_mtime(&self) -> Option<MTime> {
1429 self.saved_mtime
1430 }
1431
1432 /// Returns the character encoding of the buffer's file.
1433 pub fn encoding(&self) -> &'static Encoding {
1434 self.encoding
1435 }
1436
1437 /// Sets the character encoding of the buffer.
1438 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1439 self.encoding = encoding;
1440 }
1441
1442 /// Returns whether the buffer has a Byte Order Mark.
1443 pub fn has_bom(&self) -> bool {
1444 self.has_bom
1445 }
1446
1447 /// Sets whether the buffer has a Byte Order Mark.
1448 pub fn set_has_bom(&mut self, has_bom: bool) {
1449 self.has_bom = has_bom;
1450 }
1451
1452 /// Assign a language to the buffer.
1453 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1454 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1455 }
1456
1457 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1458 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1459 self.set_language_(language, true, cx);
1460 }
1461
1462 fn set_language_(
1463 &mut self,
1464 language: Option<Arc<Language>>,
1465 may_block: bool,
1466 cx: &mut Context<Self>,
1467 ) {
1468 self.non_text_state_update_count += 1;
1469 self.syntax_map.lock().clear(&self.text);
1470 let old_language = std::mem::replace(&mut self.language, language);
1471 self.was_changed();
1472 self.reparse(cx, may_block);
1473 let has_fresh_language =
1474 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1475 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1476 }
1477
1478 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1479 /// other languages if parts of the buffer are written in different languages.
1480 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1481 self.syntax_map
1482 .lock()
1483 .set_language_registry(language_registry);
1484 }
1485
1486 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1487 self.syntax_map.lock().language_registry()
1488 }
1489
1490 /// Assign the line ending type to the buffer.
1491 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1492 self.text.set_line_ending(line_ending);
1493
1494 let lamport_timestamp = self.text.lamport_clock.tick();
1495 self.send_operation(
1496 Operation::UpdateLineEnding {
1497 line_ending,
1498 lamport_timestamp,
1499 },
1500 true,
1501 cx,
1502 );
1503 }
1504
1505 /// Assign the buffer a new [`Capability`].
1506 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1507 if self.capability != capability {
1508 self.capability = capability;
1509 cx.emit(BufferEvent::CapabilityChanged)
1510 }
1511 }
1512
1513 /// This method is called to signal that the buffer has been saved.
1514 pub fn did_save(
1515 &mut self,
1516 version: clock::Global,
1517 mtime: Option<MTime>,
1518 cx: &mut Context<Self>,
1519 ) {
1520 self.saved_version = version.clone();
1521 self.has_unsaved_edits.set((version, false));
1522 self.has_conflict = false;
1523 self.saved_mtime = mtime;
1524 self.was_changed();
1525 cx.emit(BufferEvent::Saved);
1526 cx.notify();
1527 }
1528
1529 /// Reloads the contents of the buffer from disk.
1530 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1531 let (tx, rx) = futures::channel::oneshot::channel();
1532 let prev_version = self.text.version();
1533 self.reload_task = Some(cx.spawn(async move |this, cx| {
1534 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1535 let file = this.file.as_ref()?.as_local()?;
1536 Some((
1537 file.disk_state().mtime(),
1538 file.load_bytes(cx),
1539 this.encoding,
1540 ))
1541 })?
1542 else {
1543 return Ok(());
1544 };
1545
1546 let bytes = load_bytes_task.await?;
1547 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1548 let new_text = cow.into_owned();
1549
1550 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1551 this.update(cx, |this, cx| {
1552 if this.version() == diff.base_version {
1553 this.finalize_last_transaction();
1554 this.apply_diff(diff, cx);
1555 tx.send(this.finalize_last_transaction().cloned()).ok();
1556 this.has_conflict = false;
1557 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1558 } else {
1559 if !diff.edits.is_empty()
1560 || this
1561 .edits_since::<usize>(&diff.base_version)
1562 .next()
1563 .is_some()
1564 {
1565 this.has_conflict = true;
1566 }
1567
1568 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1569 }
1570
1571 this.reload_task.take();
1572 })
1573 }));
1574 rx
1575 }
1576
1577 /// This method is called to signal that the buffer has been reloaded.
1578 pub fn did_reload(
1579 &mut self,
1580 version: clock::Global,
1581 line_ending: LineEnding,
1582 mtime: Option<MTime>,
1583 cx: &mut Context<Self>,
1584 ) {
1585 self.saved_version = version;
1586 self.has_unsaved_edits
1587 .set((self.saved_version.clone(), false));
1588 self.text.set_line_ending(line_ending);
1589 self.saved_mtime = mtime;
1590 cx.emit(BufferEvent::Reloaded);
1591 cx.notify();
1592 }
1593
1594 /// Updates the [`File`] backing this buffer. This should be called when
1595 /// the file has changed or has been deleted.
1596 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1597 let was_dirty = self.is_dirty();
1598 let mut file_changed = false;
1599
1600 if let Some(old_file) = self.file.as_ref() {
1601 if new_file.path() != old_file.path() {
1602 file_changed = true;
1603 }
1604
1605 let old_state = old_file.disk_state();
1606 let new_state = new_file.disk_state();
1607 if old_state != new_state {
1608 file_changed = true;
1609 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1610 cx.emit(BufferEvent::ReloadNeeded)
1611 }
1612 }
1613 } else {
1614 file_changed = true;
1615 };
1616
1617 self.file = Some(new_file);
1618 if file_changed {
1619 self.was_changed();
1620 self.non_text_state_update_count += 1;
1621 if was_dirty != self.is_dirty() {
1622 cx.emit(BufferEvent::DirtyChanged);
1623 }
1624 cx.emit(BufferEvent::FileHandleChanged);
1625 cx.notify();
1626 }
1627 }
1628
1629 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1630 Some(self.branch_state.as_ref()?.base_buffer.clone())
1631 }
1632
1633 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1634 pub fn language(&self) -> Option<&Arc<Language>> {
1635 self.language.as_ref()
1636 }
1637
1638 /// Returns the [`Language`] at the given location.
1639 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1640 let offset = position.to_offset(self);
1641 let mut is_first = true;
1642 let start_anchor = self.anchor_before(offset);
1643 let end_anchor = self.anchor_after(offset);
1644 self.syntax_map
1645 .lock()
1646 .layers_for_range(offset..offset, &self.text, false)
1647 .filter(|layer| {
1648 if is_first {
1649 is_first = false;
1650 return true;
1651 }
1652
1653 layer
1654 .included_sub_ranges
1655 .map(|sub_ranges| {
1656 sub_ranges.iter().any(|sub_range| {
1657 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1658 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1659 !is_before_start && !is_after_end
1660 })
1661 })
1662 .unwrap_or(true)
1663 })
1664 .last()
1665 .map(|info| info.language.clone())
1666 .or_else(|| self.language.clone())
1667 }
1668
1669 /// Returns each [`Language`] for the active syntax layers at the given location.
1670 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1671 let offset = position.to_offset(self);
1672 let mut languages: Vec<Arc<Language>> = self
1673 .syntax_map
1674 .lock()
1675 .layers_for_range(offset..offset, &self.text, false)
1676 .map(|info| info.language.clone())
1677 .collect();
1678
1679 if languages.is_empty()
1680 && let Some(buffer_language) = self.language()
1681 {
1682 languages.push(buffer_language.clone());
1683 }
1684
1685 languages
1686 }
1687
1688 /// An integer version number that accounts for all updates besides
1689 /// the buffer's text itself (which is versioned via a version vector).
1690 pub fn non_text_state_update_count(&self) -> usize {
1691 self.non_text_state_update_count
1692 }
1693
1694 /// Whether the buffer is being parsed in the background.
1695 #[cfg(any(test, feature = "test-support"))]
1696 pub fn is_parsing(&self) -> bool {
1697 self.reparse.is_some()
1698 }
1699
1700 /// Indicates whether the buffer contains any regions that may be
1701 /// written in a language that hasn't been loaded yet.
1702 pub fn contains_unknown_injections(&self) -> bool {
1703 self.syntax_map.lock().contains_unknown_injections()
1704 }
1705
1706 #[cfg(any(test, feature = "test-support"))]
1707 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1708 self.sync_parse_timeout = timeout;
1709 }
1710
1711 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1712 match Arc::get_mut(&mut self.tree_sitter_data) {
1713 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1714 None => {
1715 let tree_sitter_data = TreeSitterData::new(snapshot);
1716 self.tree_sitter_data = Arc::new(tree_sitter_data)
1717 }
1718 }
1719 }
1720
1721 /// Called after an edit to synchronize the buffer's main parse tree with
1722 /// the buffer's new underlying state.
1723 ///
1724 /// Locks the syntax map and interpolates the edits since the last reparse
1725 /// into the foreground syntax tree.
1726 ///
1727 /// Then takes a stable snapshot of the syntax map before unlocking it.
1728 /// The snapshot with the interpolated edits is sent to a background thread,
1729 /// where we ask Tree-sitter to perform an incremental parse.
1730 ///
1731 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1732 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1733 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1734 ///
1735 /// If we time out waiting on the parse, we spawn a second task waiting
1736 /// until the parse does complete and return with the interpolated tree still
1737 /// in the foreground. When the background parse completes, call back into
1738 /// the main thread and assign the foreground parse state.
1739 ///
1740 /// If the buffer or grammar changed since the start of the background parse,
1741 /// initiate an additional reparse recursively. To avoid concurrent parses
1742 /// for the same buffer, we only initiate a new parse if we are not already
1743 /// parsing in the background.
1744 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1745 if self.text.version() != *self.tree_sitter_data.version() {
1746 self.invalidate_tree_sitter_data(self.text.snapshot());
1747 }
1748 if self.reparse.is_some() {
1749 return;
1750 }
1751 let language = if let Some(language) = self.language.clone() {
1752 language
1753 } else {
1754 return;
1755 };
1756
1757 let text = self.text_snapshot();
1758 let parsed_version = self.version();
1759
1760 let mut syntax_map = self.syntax_map.lock();
1761 syntax_map.interpolate(&text);
1762 let language_registry = syntax_map.language_registry();
1763 let mut syntax_snapshot = syntax_map.snapshot();
1764 drop(syntax_map);
1765
1766 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1767 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1768 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1769 &text,
1770 language_registry.clone(),
1771 language.clone(),
1772 sync_parse_timeout,
1773 ) {
1774 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1775 self.reparse = None;
1776 return;
1777 }
1778 }
1779
1780 let parse_task = cx.background_spawn({
1781 let language = language.clone();
1782 let language_registry = language_registry.clone();
1783 async move {
1784 syntax_snapshot.reparse(&text, language_registry, language);
1785 syntax_snapshot
1786 }
1787 });
1788
1789 self.reparse = Some(cx.spawn(async move |this, cx| {
1790 let new_syntax_map = parse_task.await;
1791 this.update(cx, move |this, cx| {
1792 let grammar_changed = || {
1793 this.language
1794 .as_ref()
1795 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1796 };
1797 let language_registry_changed = || {
1798 new_syntax_map.contains_unknown_injections()
1799 && language_registry.is_some_and(|registry| {
1800 registry.version() != new_syntax_map.language_registry_version()
1801 })
1802 };
1803 let parse_again = this.version.changed_since(&parsed_version)
1804 || language_registry_changed()
1805 || grammar_changed();
1806 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1807 this.reparse = None;
1808 if parse_again {
1809 this.reparse(cx, false);
1810 }
1811 })
1812 .ok();
1813 }));
1814 }
1815
1816 fn did_finish_parsing(
1817 &mut self,
1818 syntax_snapshot: SyntaxSnapshot,
1819 block_budget: Duration,
1820 cx: &mut Context<Self>,
1821 ) {
1822 self.non_text_state_update_count += 1;
1823 self.syntax_map.lock().did_parse(syntax_snapshot);
1824 self.was_changed();
1825 self.request_autoindent(cx, block_budget);
1826 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1827 self.invalidate_tree_sitter_data(self.text.snapshot());
1828 cx.emit(BufferEvent::Reparsed);
1829 cx.notify();
1830 }
1831
1832 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1833 self.parse_status.1.clone()
1834 }
1835
1836 /// Wait until the buffer is no longer parsing
1837 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1838 let mut parse_status = self.parse_status();
1839 async move {
1840 while *parse_status.borrow() != ParseStatus::Idle {
1841 if parse_status.changed().await.is_err() {
1842 break;
1843 }
1844 }
1845 }
1846 }
1847
1848 /// Assign to the buffer a set of diagnostics created by a given language server.
1849 pub fn update_diagnostics(
1850 &mut self,
1851 server_id: LanguageServerId,
1852 diagnostics: DiagnosticSet,
1853 cx: &mut Context<Self>,
1854 ) {
1855 let lamport_timestamp = self.text.lamport_clock.tick();
1856 let op = Operation::UpdateDiagnostics {
1857 server_id,
1858 diagnostics: diagnostics.iter().cloned().collect(),
1859 lamport_timestamp,
1860 };
1861
1862 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1863 self.send_operation(op, true, cx);
1864 }
1865
1866 pub fn buffer_diagnostics(
1867 &self,
1868 for_server: Option<LanguageServerId>,
1869 ) -> Vec<&DiagnosticEntry<Anchor>> {
1870 match for_server {
1871 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1872 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1873 Err(_) => Vec::new(),
1874 },
1875 None => self
1876 .diagnostics
1877 .iter()
1878 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1879 .collect(),
1880 }
1881 }
1882
1883 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1884 if let Some(indent_sizes) = self.compute_autoindents() {
1885 let indent_sizes = cx.background_spawn(indent_sizes);
1886 match cx
1887 .background_executor()
1888 .block_with_timeout(block_budget, indent_sizes)
1889 {
1890 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1891 Err(indent_sizes) => {
1892 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1893 let indent_sizes = indent_sizes.await;
1894 this.update(cx, |this, cx| {
1895 this.apply_autoindents(indent_sizes, cx);
1896 })
1897 .ok();
1898 }));
1899 }
1900 }
1901 } else {
1902 self.autoindent_requests.clear();
1903 for tx in self.wait_for_autoindent_txs.drain(..) {
1904 tx.send(()).ok();
1905 }
1906 }
1907 }
1908
1909 fn compute_autoindents(
1910 &self,
1911 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1912 let max_rows_between_yields = 100;
1913 let snapshot = self.snapshot();
1914 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1915 return None;
1916 }
1917
1918 let autoindent_requests = self.autoindent_requests.clone();
1919 Some(async move {
1920 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1921 for request in autoindent_requests {
1922 // Resolve each edited range to its row in the current buffer and in the
1923 // buffer before this batch of edits.
1924 let mut row_ranges = Vec::new();
1925 let mut old_to_new_rows = BTreeMap::new();
1926 let mut language_indent_sizes_by_new_row = Vec::new();
1927 for entry in &request.entries {
1928 let position = entry.range.start;
1929 let new_row = position.to_point(&snapshot).row;
1930 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1931 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1932
1933 if let Some(old_row) = entry.old_row {
1934 old_to_new_rows.insert(old_row, new_row);
1935 }
1936 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1937 }
1938
1939 // Build a map containing the suggested indentation for each of the edited lines
1940 // with respect to the state of the buffer before these edits. This map is keyed
1941 // by the rows for these lines in the current state of the buffer.
1942 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1943 let old_edited_ranges =
1944 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1945 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1946 let mut language_indent_size = IndentSize::default();
1947 for old_edited_range in old_edited_ranges {
1948 let suggestions = request
1949 .before_edit
1950 .suggest_autoindents(old_edited_range.clone())
1951 .into_iter()
1952 .flatten();
1953 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1954 if let Some(suggestion) = suggestion {
1955 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1956
1957 // Find the indent size based on the language for this row.
1958 while let Some((row, size)) = language_indent_sizes.peek() {
1959 if *row > new_row {
1960 break;
1961 }
1962 language_indent_size = *size;
1963 language_indent_sizes.next();
1964 }
1965
1966 let suggested_indent = old_to_new_rows
1967 .get(&suggestion.basis_row)
1968 .and_then(|from_row| {
1969 Some(old_suggestions.get(from_row).copied()?.0)
1970 })
1971 .unwrap_or_else(|| {
1972 request
1973 .before_edit
1974 .indent_size_for_line(suggestion.basis_row)
1975 })
1976 .with_delta(suggestion.delta, language_indent_size);
1977 old_suggestions
1978 .insert(new_row, (suggested_indent, suggestion.within_error));
1979 }
1980 }
1981 yield_now().await;
1982 }
1983
1984 // Compute new suggestions for each line, but only include them in the result
1985 // if they differ from the old suggestion for that line.
1986 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1987 let mut language_indent_size = IndentSize::default();
1988 for (row_range, original_indent_column) in row_ranges {
1989 let new_edited_row_range = if request.is_block_mode {
1990 row_range.start..row_range.start + 1
1991 } else {
1992 row_range.clone()
1993 };
1994
1995 let suggestions = snapshot
1996 .suggest_autoindents(new_edited_row_range.clone())
1997 .into_iter()
1998 .flatten();
1999 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2000 if let Some(suggestion) = suggestion {
2001 // Find the indent size based on the language for this row.
2002 while let Some((row, size)) = language_indent_sizes.peek() {
2003 if *row > new_row {
2004 break;
2005 }
2006 language_indent_size = *size;
2007 language_indent_sizes.next();
2008 }
2009
2010 let suggested_indent = indent_sizes
2011 .get(&suggestion.basis_row)
2012 .copied()
2013 .map(|e| e.0)
2014 .unwrap_or_else(|| {
2015 snapshot.indent_size_for_line(suggestion.basis_row)
2016 })
2017 .with_delta(suggestion.delta, language_indent_size);
2018
2019 if old_suggestions.get(&new_row).is_none_or(
2020 |(old_indentation, was_within_error)| {
2021 suggested_indent != *old_indentation
2022 && (!suggestion.within_error || *was_within_error)
2023 },
2024 ) {
2025 indent_sizes.insert(
2026 new_row,
2027 (suggested_indent, request.ignore_empty_lines),
2028 );
2029 }
2030 }
2031 }
2032
2033 if let (true, Some(original_indent_column)) =
2034 (request.is_block_mode, original_indent_column)
2035 {
2036 let new_indent =
2037 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2038 *indent
2039 } else {
2040 snapshot.indent_size_for_line(row_range.start)
2041 };
2042 let delta = new_indent.len as i64 - original_indent_column as i64;
2043 if delta != 0 {
2044 for row in row_range.skip(1) {
2045 indent_sizes.entry(row).or_insert_with(|| {
2046 let mut size = snapshot.indent_size_for_line(row);
2047 if size.kind == new_indent.kind {
2048 match delta.cmp(&0) {
2049 Ordering::Greater => size.len += delta as u32,
2050 Ordering::Less => {
2051 size.len = size.len.saturating_sub(-delta as u32)
2052 }
2053 Ordering::Equal => {}
2054 }
2055 }
2056 (size, request.ignore_empty_lines)
2057 });
2058 }
2059 }
2060 }
2061
2062 yield_now().await;
2063 }
2064 }
2065
2066 indent_sizes
2067 .into_iter()
2068 .filter_map(|(row, (indent, ignore_empty_lines))| {
2069 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2070 None
2071 } else {
2072 Some((row, indent))
2073 }
2074 })
2075 .collect()
2076 })
2077 }
2078
2079 fn apply_autoindents(
2080 &mut self,
2081 indent_sizes: BTreeMap<u32, IndentSize>,
2082 cx: &mut Context<Self>,
2083 ) {
2084 self.autoindent_requests.clear();
2085 for tx in self.wait_for_autoindent_txs.drain(..) {
2086 tx.send(()).ok();
2087 }
2088
2089 let edits: Vec<_> = indent_sizes
2090 .into_iter()
2091 .filter_map(|(row, indent_size)| {
2092 let current_size = indent_size_for_line(self, row);
2093 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2094 })
2095 .collect();
2096
2097 let preserve_preview = self.preserve_preview();
2098 self.edit(edits, None, cx);
2099 if preserve_preview {
2100 self.refresh_preview();
2101 }
2102 }
2103
2104 /// Create a minimal edit that will cause the given row to be indented
2105 /// with the given size. After applying this edit, the length of the line
2106 /// will always be at least `new_size.len`.
2107 pub fn edit_for_indent_size_adjustment(
2108 row: u32,
2109 current_size: IndentSize,
2110 new_size: IndentSize,
2111 ) -> Option<(Range<Point>, String)> {
2112 if new_size.kind == current_size.kind {
2113 match new_size.len.cmp(¤t_size.len) {
2114 Ordering::Greater => {
2115 let point = Point::new(row, 0);
2116 Some((
2117 point..point,
2118 iter::repeat(new_size.char())
2119 .take((new_size.len - current_size.len) as usize)
2120 .collect::<String>(),
2121 ))
2122 }
2123
2124 Ordering::Less => Some((
2125 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2126 String::new(),
2127 )),
2128
2129 Ordering::Equal => None,
2130 }
2131 } else {
2132 Some((
2133 Point::new(row, 0)..Point::new(row, current_size.len),
2134 iter::repeat(new_size.char())
2135 .take(new_size.len as usize)
2136 .collect::<String>(),
2137 ))
2138 }
2139 }
2140
2141 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2142 /// and the given new text.
2143 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2144 let old_text = self.as_rope().clone();
2145 let base_version = self.version();
2146 cx.background_executor()
2147 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2148 let old_text = old_text.to_string();
2149 let line_ending = LineEnding::detect(&new_text);
2150 LineEnding::normalize(&mut new_text);
2151 let edits = text_diff(&old_text, &new_text);
2152 Diff {
2153 base_version,
2154 line_ending,
2155 edits,
2156 }
2157 })
2158 }
2159
2160 /// Spawns a background task that searches the buffer for any whitespace
2161 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2162 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2163 let old_text = self.as_rope().clone();
2164 let line_ending = self.line_ending();
2165 let base_version = self.version();
2166 cx.background_spawn(async move {
2167 let ranges = trailing_whitespace_ranges(&old_text);
2168 let empty = Arc::<str>::from("");
2169 Diff {
2170 base_version,
2171 line_ending,
2172 edits: ranges
2173 .into_iter()
2174 .map(|range| (range, empty.clone()))
2175 .collect(),
2176 }
2177 })
2178 }
2179
2180 /// Ensures that the buffer ends with a single newline character, and
2181 /// no other whitespace. Skips if the buffer is empty.
2182 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2183 let len = self.len();
2184 if len == 0 {
2185 return;
2186 }
2187 let mut offset = len;
2188 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2189 let non_whitespace_len = chunk
2190 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2191 .len();
2192 offset -= chunk.len();
2193 offset += non_whitespace_len;
2194 if non_whitespace_len != 0 {
2195 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2196 return;
2197 }
2198 break;
2199 }
2200 }
2201 self.edit([(offset..len, "\n")], None, cx);
2202 }
2203
2204 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2205 /// calculated, then adjust the diff to account for those changes, and discard any
2206 /// parts of the diff that conflict with those changes.
2207 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2208 let snapshot = self.snapshot();
2209 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2210 let mut delta = 0;
2211 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2212 while let Some(edit_since) = edits_since.peek() {
2213 // If the edit occurs after a diff hunk, then it does not
2214 // affect that hunk.
2215 if edit_since.old.start > range.end {
2216 break;
2217 }
2218 // If the edit precedes the diff hunk, then adjust the hunk
2219 // to reflect the edit.
2220 else if edit_since.old.end < range.start {
2221 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2222 edits_since.next();
2223 }
2224 // If the edit intersects a diff hunk, then discard that hunk.
2225 else {
2226 return None;
2227 }
2228 }
2229
2230 let start = (range.start as i64 + delta) as usize;
2231 let end = (range.end as i64 + delta) as usize;
2232 Some((start..end, new_text))
2233 });
2234
2235 self.start_transaction();
2236 self.text.set_line_ending(diff.line_ending);
2237 self.edit(adjusted_edits, None, cx);
2238 self.end_transaction(cx)
2239 }
2240
2241 pub fn has_unsaved_edits(&self) -> bool {
2242 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2243
2244 if last_version == self.version {
2245 self.has_unsaved_edits
2246 .set((last_version, has_unsaved_edits));
2247 return has_unsaved_edits;
2248 }
2249
2250 let has_edits = self.has_edits_since(&self.saved_version);
2251 self.has_unsaved_edits
2252 .set((self.version.clone(), has_edits));
2253 has_edits
2254 }
2255
2256 /// Checks if the buffer has unsaved changes.
2257 pub fn is_dirty(&self) -> bool {
2258 if self.capability == Capability::ReadOnly {
2259 return false;
2260 }
2261 if self.has_conflict {
2262 return true;
2263 }
2264 match self.file.as_ref().map(|f| f.disk_state()) {
2265 Some(DiskState::New) | Some(DiskState::Deleted) => {
2266 !self.is_empty() && self.has_unsaved_edits()
2267 }
2268 _ => self.has_unsaved_edits(),
2269 }
2270 }
2271
2272 /// Marks the buffer as having a conflict regardless of current buffer state.
2273 pub fn set_conflict(&mut self) {
2274 self.has_conflict = true;
2275 }
2276
2277 /// Checks if the buffer and its file have both changed since the buffer
2278 /// was last saved or reloaded.
2279 pub fn has_conflict(&self) -> bool {
2280 if self.has_conflict {
2281 return true;
2282 }
2283 let Some(file) = self.file.as_ref() else {
2284 return false;
2285 };
2286 match file.disk_state() {
2287 DiskState::New => false,
2288 DiskState::Present { mtime } => match self.saved_mtime {
2289 Some(saved_mtime) => {
2290 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2291 }
2292 None => true,
2293 },
2294 DiskState::Deleted => false,
2295 DiskState::Historic { .. } => false,
2296 }
2297 }
2298
2299 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2300 pub fn subscribe(&mut self) -> Subscription<usize> {
2301 self.text.subscribe()
2302 }
2303
2304 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2305 ///
2306 /// This allows downstream code to check if the buffer's text has changed without
2307 /// waiting for an effect cycle, which would be required if using eents.
2308 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2309 if let Err(ix) = self
2310 .change_bits
2311 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2312 {
2313 self.change_bits.insert(ix, bit);
2314 }
2315 }
2316
2317 /// Set the change bit for all "listeners".
2318 fn was_changed(&mut self) {
2319 self.change_bits.retain(|change_bit| {
2320 change_bit
2321 .upgrade()
2322 .inspect(|bit| {
2323 _ = bit.replace(true);
2324 })
2325 .is_some()
2326 });
2327 }
2328
2329 /// Starts a transaction, if one is not already in-progress. When undoing or
2330 /// redoing edits, all of the edits performed within a transaction are undone
2331 /// or redone together.
2332 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2333 self.start_transaction_at(Instant::now())
2334 }
2335
2336 /// Starts a transaction, providing the current time. Subsequent transactions
2337 /// that occur within a short period of time will be grouped together. This
2338 /// is controlled by the buffer's undo grouping duration.
2339 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2340 self.transaction_depth += 1;
2341 if self.was_dirty_before_starting_transaction.is_none() {
2342 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2343 }
2344 self.text.start_transaction_at(now)
2345 }
2346
2347 /// Terminates the current transaction, if this is the outermost transaction.
2348 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2349 self.end_transaction_at(Instant::now(), cx)
2350 }
2351
2352 /// Terminates the current transaction, providing the current time. Subsequent transactions
2353 /// that occur within a short period of time will be grouped together. This
2354 /// is controlled by the buffer's undo grouping duration.
2355 pub fn end_transaction_at(
2356 &mut self,
2357 now: Instant,
2358 cx: &mut Context<Self>,
2359 ) -> Option<TransactionId> {
2360 assert!(self.transaction_depth > 0);
2361 self.transaction_depth -= 1;
2362 let was_dirty = if self.transaction_depth == 0 {
2363 self.was_dirty_before_starting_transaction.take().unwrap()
2364 } else {
2365 false
2366 };
2367 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2368 self.did_edit(&start_version, was_dirty, cx);
2369 Some(transaction_id)
2370 } else {
2371 None
2372 }
2373 }
2374
2375 /// Manually add a transaction to the buffer's undo history.
2376 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2377 self.text.push_transaction(transaction, now);
2378 }
2379
2380 /// Differs from `push_transaction` in that it does not clear the redo
2381 /// stack. Intended to be used to create a parent transaction to merge
2382 /// potential child transactions into.
2383 ///
2384 /// The caller is responsible for removing it from the undo history using
2385 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2386 /// are merged into this transaction, the caller is responsible for ensuring
2387 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2388 /// cleared is to create transactions with the usual `start_transaction` and
2389 /// `end_transaction` methods and merging the resulting transactions into
2390 /// the transaction created by this method
2391 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2392 self.text.push_empty_transaction(now)
2393 }
2394
2395 /// Prevent the last transaction from being grouped with any subsequent transactions,
2396 /// even if they occur with the buffer's undo grouping duration.
2397 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2398 self.text.finalize_last_transaction()
2399 }
2400
2401 /// Manually group all changes since a given transaction.
2402 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2403 self.text.group_until_transaction(transaction_id);
2404 }
2405
2406 /// Manually remove a transaction from the buffer's undo history
2407 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2408 self.text.forget_transaction(transaction_id)
2409 }
2410
2411 /// Retrieve a transaction from the buffer's undo history
2412 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2413 self.text.get_transaction(transaction_id)
2414 }
2415
2416 /// Manually merge two transactions in the buffer's undo history.
2417 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2418 self.text.merge_transactions(transaction, destination);
2419 }
2420
2421 /// Waits for the buffer to receive operations with the given timestamps.
2422 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2423 &mut self,
2424 edit_ids: It,
2425 ) -> impl Future<Output = Result<()>> + use<It> {
2426 self.text.wait_for_edits(edit_ids)
2427 }
2428
2429 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2430 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2431 &mut self,
2432 anchors: It,
2433 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2434 self.text.wait_for_anchors(anchors)
2435 }
2436
2437 /// Waits for the buffer to receive operations up to the given version.
2438 pub fn wait_for_version(
2439 &mut self,
2440 version: clock::Global,
2441 ) -> impl Future<Output = Result<()>> + use<> {
2442 self.text.wait_for_version(version)
2443 }
2444
2445 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2446 /// [`Buffer::wait_for_version`] to resolve with an error.
2447 pub fn give_up_waiting(&mut self) {
2448 self.text.give_up_waiting();
2449 }
2450
2451 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2452 let mut rx = None;
2453 if !self.autoindent_requests.is_empty() {
2454 let channel = oneshot::channel();
2455 self.wait_for_autoindent_txs.push(channel.0);
2456 rx = Some(channel.1);
2457 }
2458 rx
2459 }
2460
2461 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2462 pub fn set_active_selections(
2463 &mut self,
2464 selections: Arc<[Selection<Anchor>]>,
2465 line_mode: bool,
2466 cursor_shape: CursorShape,
2467 cx: &mut Context<Self>,
2468 ) {
2469 let lamport_timestamp = self.text.lamport_clock.tick();
2470 self.remote_selections.insert(
2471 self.text.replica_id(),
2472 SelectionSet {
2473 selections: selections.clone(),
2474 lamport_timestamp,
2475 line_mode,
2476 cursor_shape,
2477 },
2478 );
2479 self.send_operation(
2480 Operation::UpdateSelections {
2481 selections,
2482 line_mode,
2483 lamport_timestamp,
2484 cursor_shape,
2485 },
2486 true,
2487 cx,
2488 );
2489 self.non_text_state_update_count += 1;
2490 cx.notify();
2491 }
2492
2493 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2494 /// this replica.
2495 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2496 if self
2497 .remote_selections
2498 .get(&self.text.replica_id())
2499 .is_none_or(|set| !set.selections.is_empty())
2500 {
2501 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2502 }
2503 }
2504
2505 pub fn set_agent_selections(
2506 &mut self,
2507 selections: Arc<[Selection<Anchor>]>,
2508 line_mode: bool,
2509 cursor_shape: CursorShape,
2510 cx: &mut Context<Self>,
2511 ) {
2512 let lamport_timestamp = self.text.lamport_clock.tick();
2513 self.remote_selections.insert(
2514 ReplicaId::AGENT,
2515 SelectionSet {
2516 selections,
2517 lamport_timestamp,
2518 line_mode,
2519 cursor_shape,
2520 },
2521 );
2522 self.non_text_state_update_count += 1;
2523 cx.notify();
2524 }
2525
2526 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2527 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2528 }
2529
2530 /// Replaces the buffer's entire text.
2531 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2532 where
2533 T: Into<Arc<str>>,
2534 {
2535 self.autoindent_requests.clear();
2536 self.edit([(0..self.len(), text)], None, cx)
2537 }
2538
2539 /// Appends the given text to the end of the buffer.
2540 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2541 where
2542 T: Into<Arc<str>>,
2543 {
2544 self.edit([(self.len()..self.len(), text)], None, cx)
2545 }
2546
2547 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2548 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2549 ///
2550 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2551 /// request for the edited ranges, which will be processed when the buffer finishes
2552 /// parsing.
2553 ///
2554 /// Parsing takes place at the end of a transaction, and may compute synchronously
2555 /// or asynchronously, depending on the changes.
2556 pub fn edit<I, S, T>(
2557 &mut self,
2558 edits_iter: I,
2559 autoindent_mode: Option<AutoindentMode>,
2560 cx: &mut Context<Self>,
2561 ) -> Option<clock::Lamport>
2562 where
2563 I: IntoIterator<Item = (Range<S>, T)>,
2564 S: ToOffset,
2565 T: Into<Arc<str>>,
2566 {
2567 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2568 }
2569
2570 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2571 pub fn edit_non_coalesce<I, S, T>(
2572 &mut self,
2573 edits_iter: I,
2574 autoindent_mode: Option<AutoindentMode>,
2575 cx: &mut Context<Self>,
2576 ) -> Option<clock::Lamport>
2577 where
2578 I: IntoIterator<Item = (Range<S>, T)>,
2579 S: ToOffset,
2580 T: Into<Arc<str>>,
2581 {
2582 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2583 }
2584
2585 fn edit_internal<I, S, T>(
2586 &mut self,
2587 edits_iter: I,
2588 autoindent_mode: Option<AutoindentMode>,
2589 coalesce_adjacent: bool,
2590 cx: &mut Context<Self>,
2591 ) -> Option<clock::Lamport>
2592 where
2593 I: IntoIterator<Item = (Range<S>, T)>,
2594 S: ToOffset,
2595 T: Into<Arc<str>>,
2596 {
2597 // Skip invalid edits and coalesce contiguous ones.
2598 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2599
2600 for (range, new_text) in edits_iter {
2601 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2602
2603 if range.start > range.end {
2604 mem::swap(&mut range.start, &mut range.end);
2605 }
2606 let new_text = new_text.into();
2607 if !new_text.is_empty() || !range.is_empty() {
2608 let prev_edit = edits.last_mut();
2609 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2610 if coalesce_adjacent {
2611 prev_range.end >= range.start
2612 } else {
2613 prev_range.end > range.start
2614 }
2615 });
2616
2617 if let Some((prev_range, prev_text)) = prev_edit
2618 && should_coalesce
2619 {
2620 prev_range.end = cmp::max(prev_range.end, range.end);
2621 *prev_text = format!("{prev_text}{new_text}").into();
2622 } else {
2623 edits.push((range, new_text));
2624 }
2625 }
2626 }
2627 if edits.is_empty() {
2628 return None;
2629 }
2630
2631 self.start_transaction();
2632 self.pending_autoindent.take();
2633 let autoindent_request = autoindent_mode
2634 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2635
2636 let edit_operation = self.text.edit(edits.iter().cloned());
2637 let edit_id = edit_operation.timestamp();
2638
2639 if let Some((before_edit, mode)) = autoindent_request {
2640 let mut delta = 0isize;
2641 let mut previous_setting = None;
2642 let entries: Vec<_> = edits
2643 .into_iter()
2644 .enumerate()
2645 .zip(&edit_operation.as_edit().unwrap().new_text)
2646 .filter(|((_, (range, _)), _)| {
2647 let language = before_edit.language_at(range.start);
2648 let language_id = language.map(|l| l.id());
2649 if let Some((cached_language_id, auto_indent)) = previous_setting
2650 && cached_language_id == language_id
2651 {
2652 auto_indent
2653 } else {
2654 // The auto-indent setting is not present in editorconfigs, hence
2655 // we can avoid passing the file here.
2656 let auto_indent =
2657 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2658 previous_setting = Some((language_id, auto_indent));
2659 auto_indent
2660 }
2661 })
2662 .map(|((ix, (range, _)), new_text)| {
2663 let new_text_length = new_text.len();
2664 let old_start = range.start.to_point(&before_edit);
2665 let new_start = (delta + range.start as isize) as usize;
2666 let range_len = range.end - range.start;
2667 delta += new_text_length as isize - range_len as isize;
2668
2669 // Decide what range of the insertion to auto-indent, and whether
2670 // the first line of the insertion should be considered a newly-inserted line
2671 // or an edit to an existing line.
2672 let mut range_of_insertion_to_indent = 0..new_text_length;
2673 let mut first_line_is_new = true;
2674
2675 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2676 let old_line_end = before_edit.line_len(old_start.row);
2677
2678 if old_start.column > old_line_start {
2679 first_line_is_new = false;
2680 }
2681
2682 if !new_text.contains('\n')
2683 && (old_start.column + (range_len as u32) < old_line_end
2684 || old_line_end == old_line_start)
2685 {
2686 first_line_is_new = false;
2687 }
2688
2689 // When inserting text starting with a newline, avoid auto-indenting the
2690 // previous line.
2691 if new_text.starts_with('\n') {
2692 range_of_insertion_to_indent.start += 1;
2693 first_line_is_new = true;
2694 }
2695
2696 let mut original_indent_column = None;
2697 if let AutoindentMode::Block {
2698 original_indent_columns,
2699 } = &mode
2700 {
2701 original_indent_column = Some(if new_text.starts_with('\n') {
2702 indent_size_for_text(
2703 new_text[range_of_insertion_to_indent.clone()].chars(),
2704 )
2705 .len
2706 } else {
2707 original_indent_columns
2708 .get(ix)
2709 .copied()
2710 .flatten()
2711 .unwrap_or_else(|| {
2712 indent_size_for_text(
2713 new_text[range_of_insertion_to_indent.clone()].chars(),
2714 )
2715 .len
2716 })
2717 });
2718
2719 // Avoid auto-indenting the line after the edit.
2720 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2721 range_of_insertion_to_indent.end -= 1;
2722 }
2723 }
2724
2725 AutoindentRequestEntry {
2726 original_indent_column,
2727 old_row: if first_line_is_new {
2728 None
2729 } else {
2730 Some(old_start.row)
2731 },
2732 indent_size: before_edit.language_indent_size_at(range.start, cx),
2733 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2734 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2735 }
2736 })
2737 .collect();
2738
2739 if !entries.is_empty() {
2740 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2741 before_edit,
2742 entries,
2743 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2744 ignore_empty_lines: false,
2745 }));
2746 }
2747 }
2748
2749 self.end_transaction(cx);
2750 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2751 Some(edit_id)
2752 }
2753
2754 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2755 self.was_changed();
2756
2757 if self.edits_since::<usize>(old_version).next().is_none() {
2758 return;
2759 }
2760
2761 self.reparse(cx, true);
2762 cx.emit(BufferEvent::Edited);
2763 if was_dirty != self.is_dirty() {
2764 cx.emit(BufferEvent::DirtyChanged);
2765 }
2766 cx.notify();
2767 }
2768
2769 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2770 where
2771 I: IntoIterator<Item = Range<T>>,
2772 T: ToOffset + Copy,
2773 {
2774 let before_edit = self.snapshot();
2775 let entries = ranges
2776 .into_iter()
2777 .map(|range| AutoindentRequestEntry {
2778 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2779 old_row: None,
2780 indent_size: before_edit.language_indent_size_at(range.start, cx),
2781 original_indent_column: None,
2782 })
2783 .collect();
2784 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2785 before_edit,
2786 entries,
2787 is_block_mode: false,
2788 ignore_empty_lines: true,
2789 }));
2790 self.request_autoindent(cx, Duration::from_micros(300));
2791 }
2792
2793 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2794 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2795 pub fn insert_empty_line(
2796 &mut self,
2797 position: impl ToPoint,
2798 space_above: bool,
2799 space_below: bool,
2800 cx: &mut Context<Self>,
2801 ) -> Point {
2802 let mut position = position.to_point(self);
2803
2804 self.start_transaction();
2805
2806 self.edit(
2807 [(position..position, "\n")],
2808 Some(AutoindentMode::EachLine),
2809 cx,
2810 );
2811
2812 if position.column > 0 {
2813 position += Point::new(1, 0);
2814 }
2815
2816 if !self.is_line_blank(position.row) {
2817 self.edit(
2818 [(position..position, "\n")],
2819 Some(AutoindentMode::EachLine),
2820 cx,
2821 );
2822 }
2823
2824 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2825 self.edit(
2826 [(position..position, "\n")],
2827 Some(AutoindentMode::EachLine),
2828 cx,
2829 );
2830 position.row += 1;
2831 }
2832
2833 if space_below
2834 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2835 {
2836 self.edit(
2837 [(position..position, "\n")],
2838 Some(AutoindentMode::EachLine),
2839 cx,
2840 );
2841 }
2842
2843 self.end_transaction(cx);
2844
2845 position
2846 }
2847
2848 /// Applies the given remote operations to the buffer.
2849 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2850 self.pending_autoindent.take();
2851 let was_dirty = self.is_dirty();
2852 let old_version = self.version.clone();
2853 let mut deferred_ops = Vec::new();
2854 let buffer_ops = ops
2855 .into_iter()
2856 .filter_map(|op| match op {
2857 Operation::Buffer(op) => Some(op),
2858 _ => {
2859 if self.can_apply_op(&op) {
2860 self.apply_op(op, cx);
2861 } else {
2862 deferred_ops.push(op);
2863 }
2864 None
2865 }
2866 })
2867 .collect::<Vec<_>>();
2868 for operation in buffer_ops.iter() {
2869 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2870 }
2871 self.text.apply_ops(buffer_ops);
2872 self.deferred_ops.insert(deferred_ops);
2873 self.flush_deferred_ops(cx);
2874 self.did_edit(&old_version, was_dirty, cx);
2875 // Notify independently of whether the buffer was edited as the operations could include a
2876 // selection update.
2877 cx.notify();
2878 }
2879
2880 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2881 let mut deferred_ops = Vec::new();
2882 for op in self.deferred_ops.drain().iter().cloned() {
2883 if self.can_apply_op(&op) {
2884 self.apply_op(op, cx);
2885 } else {
2886 deferred_ops.push(op);
2887 }
2888 }
2889 self.deferred_ops.insert(deferred_ops);
2890 }
2891
2892 pub fn has_deferred_ops(&self) -> bool {
2893 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2894 }
2895
2896 fn can_apply_op(&self, operation: &Operation) -> bool {
2897 match operation {
2898 Operation::Buffer(_) => {
2899 unreachable!("buffer operations should never be applied at this layer")
2900 }
2901 Operation::UpdateDiagnostics {
2902 diagnostics: diagnostic_set,
2903 ..
2904 } => diagnostic_set.iter().all(|diagnostic| {
2905 self.text.can_resolve(&diagnostic.range.start)
2906 && self.text.can_resolve(&diagnostic.range.end)
2907 }),
2908 Operation::UpdateSelections { selections, .. } => selections
2909 .iter()
2910 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2911 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2912 }
2913 }
2914
2915 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2916 match operation {
2917 Operation::Buffer(_) => {
2918 unreachable!("buffer operations should never be applied at this layer")
2919 }
2920 Operation::UpdateDiagnostics {
2921 server_id,
2922 diagnostics: diagnostic_set,
2923 lamport_timestamp,
2924 } => {
2925 let snapshot = self.snapshot();
2926 self.apply_diagnostic_update(
2927 server_id,
2928 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2929 lamport_timestamp,
2930 cx,
2931 );
2932 }
2933 Operation::UpdateSelections {
2934 selections,
2935 lamport_timestamp,
2936 line_mode,
2937 cursor_shape,
2938 } => {
2939 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2940 && set.lamport_timestamp > lamport_timestamp
2941 {
2942 return;
2943 }
2944
2945 self.remote_selections.insert(
2946 lamport_timestamp.replica_id,
2947 SelectionSet {
2948 selections,
2949 lamport_timestamp,
2950 line_mode,
2951 cursor_shape,
2952 },
2953 );
2954 self.text.lamport_clock.observe(lamport_timestamp);
2955 self.non_text_state_update_count += 1;
2956 }
2957 Operation::UpdateCompletionTriggers {
2958 triggers,
2959 lamport_timestamp,
2960 server_id,
2961 } => {
2962 if triggers.is_empty() {
2963 self.completion_triggers_per_language_server
2964 .remove(&server_id);
2965 self.completion_triggers = self
2966 .completion_triggers_per_language_server
2967 .values()
2968 .flat_map(|triggers| triggers.iter().cloned())
2969 .collect();
2970 } else {
2971 self.completion_triggers_per_language_server
2972 .insert(server_id, triggers.iter().cloned().collect());
2973 self.completion_triggers.extend(triggers);
2974 }
2975 self.text.lamport_clock.observe(lamport_timestamp);
2976 }
2977 Operation::UpdateLineEnding {
2978 line_ending,
2979 lamport_timestamp,
2980 } => {
2981 self.text.set_line_ending(line_ending);
2982 self.text.lamport_clock.observe(lamport_timestamp);
2983 }
2984 }
2985 }
2986
2987 fn apply_diagnostic_update(
2988 &mut self,
2989 server_id: LanguageServerId,
2990 diagnostics: DiagnosticSet,
2991 lamport_timestamp: clock::Lamport,
2992 cx: &mut Context<Self>,
2993 ) {
2994 if lamport_timestamp > self.diagnostics_timestamp {
2995 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2996 if diagnostics.is_empty() {
2997 if let Ok(ix) = ix {
2998 self.diagnostics.remove(ix);
2999 }
3000 } else {
3001 match ix {
3002 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3003 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3004 };
3005 }
3006 self.diagnostics_timestamp = lamport_timestamp;
3007 self.non_text_state_update_count += 1;
3008 self.text.lamport_clock.observe(lamport_timestamp);
3009 cx.notify();
3010 cx.emit(BufferEvent::DiagnosticsUpdated);
3011 }
3012 }
3013
3014 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3015 self.was_changed();
3016 cx.emit(BufferEvent::Operation {
3017 operation,
3018 is_local,
3019 });
3020 }
3021
3022 /// Removes the selections for a given peer.
3023 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3024 self.remote_selections.remove(&replica_id);
3025 cx.notify();
3026 }
3027
3028 /// Undoes the most recent transaction.
3029 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3030 let was_dirty = self.is_dirty();
3031 let old_version = self.version.clone();
3032
3033 if let Some((transaction_id, operation)) = self.text.undo() {
3034 self.send_operation(Operation::Buffer(operation), true, cx);
3035 self.did_edit(&old_version, was_dirty, cx);
3036 Some(transaction_id)
3037 } else {
3038 None
3039 }
3040 }
3041
3042 /// Manually undoes a specific transaction in the buffer's undo history.
3043 pub fn undo_transaction(
3044 &mut self,
3045 transaction_id: TransactionId,
3046 cx: &mut Context<Self>,
3047 ) -> bool {
3048 let was_dirty = self.is_dirty();
3049 let old_version = self.version.clone();
3050 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3051 self.send_operation(Operation::Buffer(operation), true, cx);
3052 self.did_edit(&old_version, was_dirty, cx);
3053 true
3054 } else {
3055 false
3056 }
3057 }
3058
3059 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3060 pub fn undo_to_transaction(
3061 &mut self,
3062 transaction_id: TransactionId,
3063 cx: &mut Context<Self>,
3064 ) -> bool {
3065 let was_dirty = self.is_dirty();
3066 let old_version = self.version.clone();
3067
3068 let operations = self.text.undo_to_transaction(transaction_id);
3069 let undone = !operations.is_empty();
3070 for operation in operations {
3071 self.send_operation(Operation::Buffer(operation), true, cx);
3072 }
3073 if undone {
3074 self.did_edit(&old_version, was_dirty, cx)
3075 }
3076 undone
3077 }
3078
3079 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3080 let was_dirty = self.is_dirty();
3081 let operation = self.text.undo_operations(counts);
3082 let old_version = self.version.clone();
3083 self.send_operation(Operation::Buffer(operation), true, cx);
3084 self.did_edit(&old_version, was_dirty, cx);
3085 }
3086
3087 /// Manually redoes a specific transaction in the buffer's redo history.
3088 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3089 let was_dirty = self.is_dirty();
3090 let old_version = self.version.clone();
3091
3092 if let Some((transaction_id, operation)) = self.text.redo() {
3093 self.send_operation(Operation::Buffer(operation), true, cx);
3094 self.did_edit(&old_version, was_dirty, cx);
3095 Some(transaction_id)
3096 } else {
3097 None
3098 }
3099 }
3100
3101 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3102 pub fn redo_to_transaction(
3103 &mut self,
3104 transaction_id: TransactionId,
3105 cx: &mut Context<Self>,
3106 ) -> bool {
3107 let was_dirty = self.is_dirty();
3108 let old_version = self.version.clone();
3109
3110 let operations = self.text.redo_to_transaction(transaction_id);
3111 let redone = !operations.is_empty();
3112 for operation in operations {
3113 self.send_operation(Operation::Buffer(operation), true, cx);
3114 }
3115 if redone {
3116 self.did_edit(&old_version, was_dirty, cx)
3117 }
3118 redone
3119 }
3120
3121 /// Override current completion triggers with the user-provided completion triggers.
3122 pub fn set_completion_triggers(
3123 &mut self,
3124 server_id: LanguageServerId,
3125 triggers: BTreeSet<String>,
3126 cx: &mut Context<Self>,
3127 ) {
3128 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3129 if triggers.is_empty() {
3130 self.completion_triggers_per_language_server
3131 .remove(&server_id);
3132 self.completion_triggers = self
3133 .completion_triggers_per_language_server
3134 .values()
3135 .flat_map(|triggers| triggers.iter().cloned())
3136 .collect();
3137 } else {
3138 self.completion_triggers_per_language_server
3139 .insert(server_id, triggers.clone());
3140 self.completion_triggers.extend(triggers.iter().cloned());
3141 }
3142 self.send_operation(
3143 Operation::UpdateCompletionTriggers {
3144 triggers: triggers.into_iter().collect(),
3145 lamport_timestamp: self.completion_triggers_timestamp,
3146 server_id,
3147 },
3148 true,
3149 cx,
3150 );
3151 cx.notify();
3152 }
3153
3154 /// Returns a list of strings which trigger a completion menu for this language.
3155 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3156 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3157 &self.completion_triggers
3158 }
3159
3160 /// Call this directly after performing edits to prevent the preview tab
3161 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3162 /// to return false until there are additional edits.
3163 pub fn refresh_preview(&mut self) {
3164 self.preview_version = self.version.clone();
3165 }
3166
3167 /// Whether we should preserve the preview status of a tab containing this buffer.
3168 pub fn preserve_preview(&self) -> bool {
3169 !self.has_edits_since(&self.preview_version)
3170 }
3171}
3172
3173#[doc(hidden)]
3174#[cfg(any(test, feature = "test-support"))]
3175impl Buffer {
3176 pub fn edit_via_marked_text(
3177 &mut self,
3178 marked_string: &str,
3179 autoindent_mode: Option<AutoindentMode>,
3180 cx: &mut Context<Self>,
3181 ) {
3182 let edits = self.edits_for_marked_text(marked_string);
3183 self.edit(edits, autoindent_mode, cx);
3184 }
3185
3186 pub fn set_group_interval(&mut self, group_interval: Duration) {
3187 self.text.set_group_interval(group_interval);
3188 }
3189
3190 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3191 where
3192 T: rand::Rng,
3193 {
3194 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3195 let mut last_end = None;
3196 for _ in 0..old_range_count {
3197 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3198 break;
3199 }
3200
3201 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3202 let mut range = self.random_byte_range(new_start, rng);
3203 if rng.random_bool(0.2) {
3204 mem::swap(&mut range.start, &mut range.end);
3205 }
3206 last_end = Some(range.end);
3207
3208 let new_text_len = rng.random_range(0..10);
3209 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3210 new_text = new_text.to_uppercase();
3211
3212 edits.push((range, new_text));
3213 }
3214 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3215 self.edit(edits, None, cx);
3216 }
3217
3218 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3219 let was_dirty = self.is_dirty();
3220 let old_version = self.version.clone();
3221
3222 let ops = self.text.randomly_undo_redo(rng);
3223 if !ops.is_empty() {
3224 for op in ops {
3225 self.send_operation(Operation::Buffer(op), true, cx);
3226 self.did_edit(&old_version, was_dirty, cx);
3227 }
3228 }
3229 }
3230}
3231
3232impl EventEmitter<BufferEvent> for Buffer {}
3233
3234impl Deref for Buffer {
3235 type Target = TextBuffer;
3236
3237 fn deref(&self) -> &Self::Target {
3238 &self.text
3239 }
3240}
3241
3242impl BufferSnapshot {
3243 /// Returns [`IndentSize`] for a given line that respects user settings and
3244 /// language preferences.
3245 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3246 indent_size_for_line(self, row)
3247 }
3248
3249 /// Returns [`IndentSize`] for a given position that respects user settings
3250 /// and language preferences.
3251 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3252 let settings = language_settings(
3253 self.language_at(position).map(|l| l.name()),
3254 self.file(),
3255 cx,
3256 );
3257 if settings.hard_tabs {
3258 IndentSize::tab()
3259 } else {
3260 IndentSize::spaces(settings.tab_size.get())
3261 }
3262 }
3263
3264 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3265 /// is passed in as `single_indent_size`.
3266 pub fn suggested_indents(
3267 &self,
3268 rows: impl Iterator<Item = u32>,
3269 single_indent_size: IndentSize,
3270 ) -> BTreeMap<u32, IndentSize> {
3271 let mut result = BTreeMap::new();
3272
3273 for row_range in contiguous_ranges(rows, 10) {
3274 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3275 Some(suggestions) => suggestions,
3276 _ => break,
3277 };
3278
3279 for (row, suggestion) in row_range.zip(suggestions) {
3280 let indent_size = if let Some(suggestion) = suggestion {
3281 result
3282 .get(&suggestion.basis_row)
3283 .copied()
3284 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3285 .with_delta(suggestion.delta, single_indent_size)
3286 } else {
3287 self.indent_size_for_line(row)
3288 };
3289
3290 result.insert(row, indent_size);
3291 }
3292 }
3293
3294 result
3295 }
3296
3297 fn suggest_autoindents(
3298 &self,
3299 row_range: Range<u32>,
3300 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3301 let config = &self.language.as_ref()?.config;
3302 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3303
3304 #[derive(Debug, Clone)]
3305 struct StartPosition {
3306 start: Point,
3307 suffix: SharedString,
3308 language: Arc<Language>,
3309 }
3310
3311 // Find the suggested indentation ranges based on the syntax tree.
3312 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3313 let end = Point::new(row_range.end, 0);
3314 let range = (start..end).to_offset(&self.text);
3315 let mut matches = self.syntax.matches_with_options(
3316 range.clone(),
3317 &self.text,
3318 TreeSitterOptions {
3319 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3320 max_start_depth: None,
3321 },
3322 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3323 );
3324 let indent_configs = matches
3325 .grammars()
3326 .iter()
3327 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3328 .collect::<Vec<_>>();
3329
3330 let mut indent_ranges = Vec::<Range<Point>>::new();
3331 let mut start_positions = Vec::<StartPosition>::new();
3332 let mut outdent_positions = Vec::<Point>::new();
3333 while let Some(mat) = matches.peek() {
3334 let mut start: Option<Point> = None;
3335 let mut end: Option<Point> = None;
3336
3337 let config = indent_configs[mat.grammar_index];
3338 for capture in mat.captures {
3339 if capture.index == config.indent_capture_ix {
3340 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3341 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3342 } else if Some(capture.index) == config.start_capture_ix {
3343 start = Some(Point::from_ts_point(capture.node.end_position()));
3344 } else if Some(capture.index) == config.end_capture_ix {
3345 end = Some(Point::from_ts_point(capture.node.start_position()));
3346 } else if Some(capture.index) == config.outdent_capture_ix {
3347 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3348 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3349 start_positions.push(StartPosition {
3350 start: Point::from_ts_point(capture.node.start_position()),
3351 suffix: suffix.clone(),
3352 language: mat.language.clone(),
3353 });
3354 }
3355 }
3356
3357 matches.advance();
3358 if let Some((start, end)) = start.zip(end) {
3359 if start.row == end.row {
3360 continue;
3361 }
3362 let range = start..end;
3363 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3364 Err(ix) => indent_ranges.insert(ix, range),
3365 Ok(ix) => {
3366 let prev_range = &mut indent_ranges[ix];
3367 prev_range.end = prev_range.end.max(range.end);
3368 }
3369 }
3370 }
3371 }
3372
3373 let mut error_ranges = Vec::<Range<Point>>::new();
3374 let mut matches = self
3375 .syntax
3376 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3377 while let Some(mat) = matches.peek() {
3378 let node = mat.captures[0].node;
3379 let start = Point::from_ts_point(node.start_position());
3380 let end = Point::from_ts_point(node.end_position());
3381 let range = start..end;
3382 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3383 Ok(ix) | Err(ix) => ix,
3384 };
3385 let mut end_ix = ix;
3386 while let Some(existing_range) = error_ranges.get(end_ix) {
3387 if existing_range.end < end {
3388 end_ix += 1;
3389 } else {
3390 break;
3391 }
3392 }
3393 error_ranges.splice(ix..end_ix, [range]);
3394 matches.advance();
3395 }
3396
3397 outdent_positions.sort();
3398 for outdent_position in outdent_positions {
3399 // find the innermost indent range containing this outdent_position
3400 // set its end to the outdent position
3401 if let Some(range_to_truncate) = indent_ranges
3402 .iter_mut()
3403 .rfind(|indent_range| indent_range.contains(&outdent_position))
3404 {
3405 range_to_truncate.end = outdent_position;
3406 }
3407 }
3408
3409 start_positions.sort_by_key(|b| b.start);
3410
3411 // Find the suggested indentation increases and decreased based on regexes.
3412 let mut regex_outdent_map = HashMap::default();
3413 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3414 let mut start_positions_iter = start_positions.iter().peekable();
3415
3416 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3417 self.for_each_line(
3418 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3419 ..Point::new(row_range.end, 0),
3420 |row, line| {
3421 let indent_len = self.indent_size_for_line(row).len;
3422 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3423 let row_language_config = row_language
3424 .as_ref()
3425 .map(|lang| lang.config())
3426 .unwrap_or(config);
3427
3428 if row_language_config
3429 .decrease_indent_pattern
3430 .as_ref()
3431 .is_some_and(|regex| regex.is_match(line))
3432 {
3433 indent_change_rows.push((row, Ordering::Less));
3434 }
3435 if row_language_config
3436 .increase_indent_pattern
3437 .as_ref()
3438 .is_some_and(|regex| regex.is_match(line))
3439 {
3440 indent_change_rows.push((row + 1, Ordering::Greater));
3441 }
3442 while let Some(pos) = start_positions_iter.peek() {
3443 if pos.start.row < row {
3444 let pos = start_positions_iter.next().unwrap().clone();
3445 last_seen_suffix
3446 .entry(pos.suffix.to_string())
3447 .or_default()
3448 .push(pos);
3449 } else {
3450 break;
3451 }
3452 }
3453 for rule in &row_language_config.decrease_indent_patterns {
3454 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3455 let row_start_column = self.indent_size_for_line(row).len;
3456 let basis_row = rule
3457 .valid_after
3458 .iter()
3459 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3460 .flatten()
3461 .filter(|pos| {
3462 row_language
3463 .as_ref()
3464 .or(self.language.as_ref())
3465 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3466 })
3467 .filter(|pos| pos.start.column <= row_start_column)
3468 .max_by_key(|pos| pos.start.row);
3469 if let Some(outdent_to) = basis_row {
3470 regex_outdent_map.insert(row, outdent_to.start.row);
3471 }
3472 break;
3473 }
3474 }
3475 },
3476 );
3477
3478 let mut indent_changes = indent_change_rows.into_iter().peekable();
3479 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3480 prev_non_blank_row.unwrap_or(0)
3481 } else {
3482 row_range.start.saturating_sub(1)
3483 };
3484
3485 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3486 Some(row_range.map(move |row| {
3487 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3488
3489 let mut indent_from_prev_row = false;
3490 let mut outdent_from_prev_row = false;
3491 let mut outdent_to_row = u32::MAX;
3492 let mut from_regex = false;
3493
3494 while let Some((indent_row, delta)) = indent_changes.peek() {
3495 match indent_row.cmp(&row) {
3496 Ordering::Equal => match delta {
3497 Ordering::Less => {
3498 from_regex = true;
3499 outdent_from_prev_row = true
3500 }
3501 Ordering::Greater => {
3502 indent_from_prev_row = true;
3503 from_regex = true
3504 }
3505 _ => {}
3506 },
3507
3508 Ordering::Greater => break,
3509 Ordering::Less => {}
3510 }
3511
3512 indent_changes.next();
3513 }
3514
3515 for range in &indent_ranges {
3516 if range.start.row >= row {
3517 break;
3518 }
3519 if range.start.row == prev_row && range.end > row_start {
3520 indent_from_prev_row = true;
3521 }
3522 if range.end > prev_row_start && range.end <= row_start {
3523 outdent_to_row = outdent_to_row.min(range.start.row);
3524 }
3525 }
3526
3527 if let Some(basis_row) = regex_outdent_map.get(&row) {
3528 indent_from_prev_row = false;
3529 outdent_to_row = *basis_row;
3530 from_regex = true;
3531 }
3532
3533 let within_error = error_ranges
3534 .iter()
3535 .any(|e| e.start.row < row && e.end > row_start);
3536
3537 let suggestion = if outdent_to_row == prev_row
3538 || (outdent_from_prev_row && indent_from_prev_row)
3539 {
3540 Some(IndentSuggestion {
3541 basis_row: prev_row,
3542 delta: Ordering::Equal,
3543 within_error: within_error && !from_regex,
3544 })
3545 } else if indent_from_prev_row {
3546 Some(IndentSuggestion {
3547 basis_row: prev_row,
3548 delta: Ordering::Greater,
3549 within_error: within_error && !from_regex,
3550 })
3551 } else if outdent_to_row < prev_row {
3552 Some(IndentSuggestion {
3553 basis_row: outdent_to_row,
3554 delta: Ordering::Equal,
3555 within_error: within_error && !from_regex,
3556 })
3557 } else if outdent_from_prev_row {
3558 Some(IndentSuggestion {
3559 basis_row: prev_row,
3560 delta: Ordering::Less,
3561 within_error: within_error && !from_regex,
3562 })
3563 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3564 {
3565 Some(IndentSuggestion {
3566 basis_row: prev_row,
3567 delta: Ordering::Equal,
3568 within_error: within_error && !from_regex,
3569 })
3570 } else {
3571 None
3572 };
3573
3574 prev_row = row;
3575 prev_row_start = row_start;
3576 suggestion
3577 }))
3578 }
3579
3580 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3581 while row > 0 {
3582 row -= 1;
3583 if !self.is_line_blank(row) {
3584 return Some(row);
3585 }
3586 }
3587 None
3588 }
3589
3590 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3591 let captures = self.syntax.captures(range, &self.text, |grammar| {
3592 grammar
3593 .highlights_config
3594 .as_ref()
3595 .map(|config| &config.query)
3596 });
3597 let highlight_maps = captures
3598 .grammars()
3599 .iter()
3600 .map(|grammar| grammar.highlight_map())
3601 .collect();
3602 (captures, highlight_maps)
3603 }
3604
3605 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3606 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3607 /// returned in chunks where each chunk has a single syntax highlighting style and
3608 /// diagnostic status.
3609 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3610 let range = range.start.to_offset(self)..range.end.to_offset(self);
3611
3612 let mut syntax = None;
3613 if language_aware {
3614 syntax = Some(self.get_highlights(range.clone()));
3615 }
3616 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3617 let diagnostics = language_aware;
3618 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3619 }
3620
3621 pub fn highlighted_text_for_range<T: ToOffset>(
3622 &self,
3623 range: Range<T>,
3624 override_style: Option<HighlightStyle>,
3625 syntax_theme: &SyntaxTheme,
3626 ) -> HighlightedText {
3627 HighlightedText::from_buffer_range(
3628 range,
3629 &self.text,
3630 &self.syntax,
3631 override_style,
3632 syntax_theme,
3633 )
3634 }
3635
3636 /// Invokes the given callback for each line of text in the given range of the buffer.
3637 /// Uses callback to avoid allocating a string for each line.
3638 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3639 let mut line = String::new();
3640 let mut row = range.start.row;
3641 for chunk in self
3642 .as_rope()
3643 .chunks_in_range(range.to_offset(self))
3644 .chain(["\n"])
3645 {
3646 for (newline_ix, text) in chunk.split('\n').enumerate() {
3647 if newline_ix > 0 {
3648 callback(row, &line);
3649 row += 1;
3650 line.clear();
3651 }
3652 line.push_str(text);
3653 }
3654 }
3655 }
3656
3657 /// Iterates over every [`SyntaxLayer`] in the buffer.
3658 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3659 self.syntax_layers_for_range(0..self.len(), true)
3660 }
3661
3662 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3663 let offset = position.to_offset(self);
3664 self.syntax_layers_for_range(offset..offset, false)
3665 .filter(|l| {
3666 if let Some(ranges) = l.included_sub_ranges {
3667 ranges.iter().any(|range| {
3668 let start = range.start.to_offset(self);
3669 start <= offset && {
3670 let end = range.end.to_offset(self);
3671 offset < end
3672 }
3673 })
3674 } else {
3675 l.node().start_byte() <= offset && l.node().end_byte() > offset
3676 }
3677 })
3678 .last()
3679 }
3680
3681 pub fn syntax_layers_for_range<D: ToOffset>(
3682 &self,
3683 range: Range<D>,
3684 include_hidden: bool,
3685 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3686 self.syntax
3687 .layers_for_range(range, &self.text, include_hidden)
3688 }
3689
3690 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3691 &self,
3692 range: Range<D>,
3693 ) -> Option<SyntaxLayer<'_>> {
3694 let range = range.to_offset(self);
3695 self.syntax
3696 .layers_for_range(range, &self.text, false)
3697 .max_by(|a, b| {
3698 if a.depth != b.depth {
3699 a.depth.cmp(&b.depth)
3700 } else if a.offset.0 != b.offset.0 {
3701 a.offset.0.cmp(&b.offset.0)
3702 } else {
3703 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3704 }
3705 })
3706 }
3707
3708 /// Returns the main [`Language`].
3709 pub fn language(&self) -> Option<&Arc<Language>> {
3710 self.language.as_ref()
3711 }
3712
3713 /// Returns the [`Language`] at the given location.
3714 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3715 self.syntax_layer_at(position)
3716 .map(|info| info.language)
3717 .or(self.language.as_ref())
3718 }
3719
3720 /// Returns the settings for the language at the given location.
3721 pub fn settings_at<'a, D: ToOffset>(
3722 &'a self,
3723 position: D,
3724 cx: &'a App,
3725 ) -> Cow<'a, LanguageSettings> {
3726 language_settings(
3727 self.language_at(position).map(|l| l.name()),
3728 self.file.as_ref(),
3729 cx,
3730 )
3731 }
3732
3733 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3734 CharClassifier::new(self.language_scope_at(point))
3735 }
3736
3737 /// Returns the [`LanguageScope`] at the given location.
3738 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3739 let offset = position.to_offset(self);
3740 let mut scope = None;
3741 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3742
3743 // Use the layer that has the smallest node intersecting the given point.
3744 for layer in self
3745 .syntax
3746 .layers_for_range(offset..offset, &self.text, false)
3747 {
3748 let mut cursor = layer.node().walk();
3749
3750 let mut range = None;
3751 loop {
3752 let child_range = cursor.node().byte_range();
3753 if !child_range.contains(&offset) {
3754 break;
3755 }
3756
3757 range = Some(child_range);
3758 if cursor.goto_first_child_for_byte(offset).is_none() {
3759 break;
3760 }
3761 }
3762
3763 if let Some(range) = range
3764 && smallest_range_and_depth.as_ref().is_none_or(
3765 |(smallest_range, smallest_range_depth)| {
3766 if layer.depth > *smallest_range_depth {
3767 true
3768 } else if layer.depth == *smallest_range_depth {
3769 range.len() < smallest_range.len()
3770 } else {
3771 false
3772 }
3773 },
3774 )
3775 {
3776 smallest_range_and_depth = Some((range, layer.depth));
3777 scope = Some(LanguageScope {
3778 language: layer.language.clone(),
3779 override_id: layer.override_id(offset, &self.text),
3780 });
3781 }
3782 }
3783
3784 scope.or_else(|| {
3785 self.language.clone().map(|language| LanguageScope {
3786 language,
3787 override_id: None,
3788 })
3789 })
3790 }
3791
3792 /// Returns a tuple of the range and character kind of the word
3793 /// surrounding the given position.
3794 pub fn surrounding_word<T: ToOffset>(
3795 &self,
3796 start: T,
3797 scope_context: Option<CharScopeContext>,
3798 ) -> (Range<usize>, Option<CharKind>) {
3799 let mut start = start.to_offset(self);
3800 let mut end = start;
3801 let mut next_chars = self.chars_at(start).take(128).peekable();
3802 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3803
3804 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3805 let word_kind = cmp::max(
3806 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3807 next_chars.peek().copied().map(|c| classifier.kind(c)),
3808 );
3809
3810 for ch in prev_chars {
3811 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3812 start -= ch.len_utf8();
3813 } else {
3814 break;
3815 }
3816 }
3817
3818 for ch in next_chars {
3819 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3820 end += ch.len_utf8();
3821 } else {
3822 break;
3823 }
3824 }
3825
3826 (start..end, word_kind)
3827 }
3828
3829 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3830 /// range. When `require_larger` is true, the node found must be larger than the query range.
3831 ///
3832 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3833 /// be moved to the root of the tree.
3834 fn goto_node_enclosing_range(
3835 cursor: &mut tree_sitter::TreeCursor,
3836 query_range: &Range<usize>,
3837 require_larger: bool,
3838 ) -> bool {
3839 let mut ascending = false;
3840 loop {
3841 let mut range = cursor.node().byte_range();
3842 if query_range.is_empty() {
3843 // When the query range is empty and the current node starts after it, move to the
3844 // previous sibling to find the node the containing node.
3845 if range.start > query_range.start {
3846 cursor.goto_previous_sibling();
3847 range = cursor.node().byte_range();
3848 }
3849 } else {
3850 // When the query range is non-empty and the current node ends exactly at the start,
3851 // move to the next sibling to find a node that extends beyond the start.
3852 if range.end == query_range.start {
3853 cursor.goto_next_sibling();
3854 range = cursor.node().byte_range();
3855 }
3856 }
3857
3858 let encloses = range.contains_inclusive(query_range)
3859 && (!require_larger || range.len() > query_range.len());
3860 if !encloses {
3861 ascending = true;
3862 if !cursor.goto_parent() {
3863 return false;
3864 }
3865 continue;
3866 } else if ascending {
3867 return true;
3868 }
3869
3870 // Descend into the current node.
3871 if cursor
3872 .goto_first_child_for_byte(query_range.start)
3873 .is_none()
3874 {
3875 return true;
3876 }
3877 }
3878 }
3879
3880 pub fn syntax_ancestor<'a, T: ToOffset>(
3881 &'a self,
3882 range: Range<T>,
3883 ) -> Option<tree_sitter::Node<'a>> {
3884 let range = range.start.to_offset(self)..range.end.to_offset(self);
3885 let mut result: Option<tree_sitter::Node<'a>> = None;
3886 for layer in self
3887 .syntax
3888 .layers_for_range(range.clone(), &self.text, true)
3889 {
3890 let mut cursor = layer.node().walk();
3891
3892 // Find the node that both contains the range and is larger than it.
3893 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3894 continue;
3895 }
3896
3897 let left_node = cursor.node();
3898 let mut layer_result = left_node;
3899
3900 // For an empty range, try to find another node immediately to the right of the range.
3901 if left_node.end_byte() == range.start {
3902 let mut right_node = None;
3903 while !cursor.goto_next_sibling() {
3904 if !cursor.goto_parent() {
3905 break;
3906 }
3907 }
3908
3909 while cursor.node().start_byte() == range.start {
3910 right_node = Some(cursor.node());
3911 if !cursor.goto_first_child() {
3912 break;
3913 }
3914 }
3915
3916 // If there is a candidate node on both sides of the (empty) range, then
3917 // decide between the two by favoring a named node over an anonymous token.
3918 // If both nodes are the same in that regard, favor the right one.
3919 if let Some(right_node) = right_node
3920 && (right_node.is_named() || !left_node.is_named())
3921 {
3922 layer_result = right_node;
3923 }
3924 }
3925
3926 if let Some(previous_result) = &result
3927 && previous_result.byte_range().len() < layer_result.byte_range().len()
3928 {
3929 continue;
3930 }
3931 result = Some(layer_result);
3932 }
3933
3934 result
3935 }
3936
3937 /// Find the previous sibling syntax node at the given range.
3938 ///
3939 /// This function locates the syntax node that precedes the node containing
3940 /// the given range. It searches hierarchically by:
3941 /// 1. Finding the node that contains the given range
3942 /// 2. Looking for the previous sibling at the same tree level
3943 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3944 ///
3945 /// Returns `None` if there is no previous sibling at any ancestor level.
3946 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3947 &'a self,
3948 range: Range<T>,
3949 ) -> Option<tree_sitter::Node<'a>> {
3950 let range = range.start.to_offset(self)..range.end.to_offset(self);
3951 let mut result: Option<tree_sitter::Node<'a>> = None;
3952
3953 for layer in self
3954 .syntax
3955 .layers_for_range(range.clone(), &self.text, true)
3956 {
3957 let mut cursor = layer.node().walk();
3958
3959 // Find the node that contains the range
3960 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3961 continue;
3962 }
3963
3964 // Look for the previous sibling, moving up ancestor levels if needed
3965 loop {
3966 if cursor.goto_previous_sibling() {
3967 let layer_result = cursor.node();
3968
3969 if let Some(previous_result) = &result {
3970 if previous_result.byte_range().end < layer_result.byte_range().end {
3971 continue;
3972 }
3973 }
3974 result = Some(layer_result);
3975 break;
3976 }
3977
3978 // No sibling found at this level, try moving up to parent
3979 if !cursor.goto_parent() {
3980 break;
3981 }
3982 }
3983 }
3984
3985 result
3986 }
3987
3988 /// Find the next sibling syntax node at the given range.
3989 ///
3990 /// This function locates the syntax node that follows the node containing
3991 /// the given range. It searches hierarchically by:
3992 /// 1. Finding the node that contains the given range
3993 /// 2. Looking for the next sibling at the same tree level
3994 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3995 ///
3996 /// Returns `None` if there is no next sibling at any ancestor level.
3997 pub fn syntax_next_sibling<'a, T: ToOffset>(
3998 &'a self,
3999 range: Range<T>,
4000 ) -> Option<tree_sitter::Node<'a>> {
4001 let range = range.start.to_offset(self)..range.end.to_offset(self);
4002 let mut result: Option<tree_sitter::Node<'a>> = None;
4003
4004 for layer in self
4005 .syntax
4006 .layers_for_range(range.clone(), &self.text, true)
4007 {
4008 let mut cursor = layer.node().walk();
4009
4010 // Find the node that contains the range
4011 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4012 continue;
4013 }
4014
4015 // Look for the next sibling, moving up ancestor levels if needed
4016 loop {
4017 if cursor.goto_next_sibling() {
4018 let layer_result = cursor.node();
4019
4020 if let Some(previous_result) = &result {
4021 if previous_result.byte_range().start > layer_result.byte_range().start {
4022 continue;
4023 }
4024 }
4025 result = Some(layer_result);
4026 break;
4027 }
4028
4029 // No sibling found at this level, try moving up to parent
4030 if !cursor.goto_parent() {
4031 break;
4032 }
4033 }
4034 }
4035
4036 result
4037 }
4038
4039 /// Returns the root syntax node within the given row
4040 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4041 let start_offset = position.to_offset(self);
4042
4043 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4044
4045 let layer = self
4046 .syntax
4047 .layers_for_range(start_offset..start_offset, &self.text, true)
4048 .next()?;
4049
4050 let mut cursor = layer.node().walk();
4051
4052 // Descend to the first leaf that touches the start of the range.
4053 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4054 if cursor.node().end_byte() == start_offset {
4055 cursor.goto_next_sibling();
4056 }
4057 }
4058
4059 // Ascend to the root node within the same row.
4060 while cursor.goto_parent() {
4061 if cursor.node().start_position().row != row {
4062 break;
4063 }
4064 }
4065
4066 Some(cursor.node())
4067 }
4068
4069 /// Returns the outline for the buffer.
4070 ///
4071 /// This method allows passing an optional [`SyntaxTheme`] to
4072 /// syntax-highlight the returned symbols.
4073 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4074 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4075 }
4076
4077 /// Returns all the symbols that contain the given position.
4078 ///
4079 /// This method allows passing an optional [`SyntaxTheme`] to
4080 /// syntax-highlight the returned symbols.
4081 pub fn symbols_containing<T: ToOffset>(
4082 &self,
4083 position: T,
4084 theme: Option<&SyntaxTheme>,
4085 ) -> Vec<OutlineItem<Anchor>> {
4086 let position = position.to_offset(self);
4087 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4088 let end = self.clip_offset(position + 1, Bias::Right);
4089 let mut items = self.outline_items_containing(start..end, false, theme);
4090 let mut prev_depth = None;
4091 items.retain(|item| {
4092 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4093 prev_depth = Some(item.depth);
4094 result
4095 });
4096 items
4097 }
4098
4099 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4100 let range = range.to_offset(self);
4101 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4102 grammar.outline_config.as_ref().map(|c| &c.query)
4103 });
4104 let configs = matches
4105 .grammars()
4106 .iter()
4107 .map(|g| g.outline_config.as_ref().unwrap())
4108 .collect::<Vec<_>>();
4109
4110 while let Some(mat) = matches.peek() {
4111 let config = &configs[mat.grammar_index];
4112 let containing_item_node = maybe!({
4113 let item_node = mat.captures.iter().find_map(|cap| {
4114 if cap.index == config.item_capture_ix {
4115 Some(cap.node)
4116 } else {
4117 None
4118 }
4119 })?;
4120
4121 let item_byte_range = item_node.byte_range();
4122 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4123 None
4124 } else {
4125 Some(item_node)
4126 }
4127 });
4128
4129 if let Some(item_node) = containing_item_node {
4130 return Some(
4131 Point::from_ts_point(item_node.start_position())
4132 ..Point::from_ts_point(item_node.end_position()),
4133 );
4134 }
4135
4136 matches.advance();
4137 }
4138 None
4139 }
4140
4141 pub fn outline_items_containing<T: ToOffset>(
4142 &self,
4143 range: Range<T>,
4144 include_extra_context: bool,
4145 theme: Option<&SyntaxTheme>,
4146 ) -> Vec<OutlineItem<Anchor>> {
4147 self.outline_items_containing_internal(
4148 range,
4149 include_extra_context,
4150 theme,
4151 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4152 )
4153 }
4154
4155 pub fn outline_items_as_points_containing<T: ToOffset>(
4156 &self,
4157 range: Range<T>,
4158 include_extra_context: bool,
4159 theme: Option<&SyntaxTheme>,
4160 ) -> Vec<OutlineItem<Point>> {
4161 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4162 range
4163 })
4164 }
4165
4166 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4167 &self,
4168 range: Range<T>,
4169 include_extra_context: bool,
4170 theme: Option<&SyntaxTheme>,
4171 ) -> Vec<OutlineItem<usize>> {
4172 self.outline_items_containing_internal(
4173 range,
4174 include_extra_context,
4175 theme,
4176 |buffer, range| range.to_offset(buffer),
4177 )
4178 }
4179
4180 fn outline_items_containing_internal<T: ToOffset, U>(
4181 &self,
4182 range: Range<T>,
4183 include_extra_context: bool,
4184 theme: Option<&SyntaxTheme>,
4185 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4186 ) -> Vec<OutlineItem<U>> {
4187 let range = range.to_offset(self);
4188 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4189 grammar.outline_config.as_ref().map(|c| &c.query)
4190 });
4191
4192 let mut items = Vec::new();
4193 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4194 while let Some(mat) = matches.peek() {
4195 let config = matches.grammars()[mat.grammar_index]
4196 .outline_config
4197 .as_ref()
4198 .unwrap();
4199 if let Some(item) =
4200 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4201 {
4202 items.push(item);
4203 } else if let Some(capture) = mat
4204 .captures
4205 .iter()
4206 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4207 {
4208 let capture_range = capture.node.start_position()..capture.node.end_position();
4209 let mut capture_row_range =
4210 capture_range.start.row as u32..capture_range.end.row as u32;
4211 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4212 {
4213 capture_row_range.end -= 1;
4214 }
4215 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4216 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4217 last_row_range.end = capture_row_range.end;
4218 } else {
4219 annotation_row_ranges.push(capture_row_range);
4220 }
4221 } else {
4222 annotation_row_ranges.push(capture_row_range);
4223 }
4224 }
4225 matches.advance();
4226 }
4227
4228 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4229
4230 // Assign depths based on containment relationships and convert to anchors.
4231 let mut item_ends_stack = Vec::<Point>::new();
4232 let mut anchor_items = Vec::new();
4233 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4234 for item in items {
4235 while let Some(last_end) = item_ends_stack.last().copied() {
4236 if last_end < item.range.end {
4237 item_ends_stack.pop();
4238 } else {
4239 break;
4240 }
4241 }
4242
4243 let mut annotation_row_range = None;
4244 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4245 let row_preceding_item = item.range.start.row.saturating_sub(1);
4246 if next_annotation_row_range.end < row_preceding_item {
4247 annotation_row_ranges.next();
4248 } else {
4249 if next_annotation_row_range.end == row_preceding_item {
4250 annotation_row_range = Some(next_annotation_row_range.clone());
4251 annotation_row_ranges.next();
4252 }
4253 break;
4254 }
4255 }
4256
4257 anchor_items.push(OutlineItem {
4258 depth: item_ends_stack.len(),
4259 range: range_callback(self, item.range.clone()),
4260 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4261 text: item.text,
4262 highlight_ranges: item.highlight_ranges,
4263 name_ranges: item.name_ranges,
4264 body_range: item.body_range.map(|r| range_callback(self, r)),
4265 annotation_range: annotation_row_range.map(|annotation_range| {
4266 let point_range = Point::new(annotation_range.start, 0)
4267 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4268 range_callback(self, point_range)
4269 }),
4270 });
4271 item_ends_stack.push(item.range.end);
4272 }
4273
4274 anchor_items
4275 }
4276
4277 fn next_outline_item(
4278 &self,
4279 config: &OutlineConfig,
4280 mat: &SyntaxMapMatch,
4281 range: &Range<usize>,
4282 include_extra_context: bool,
4283 theme: Option<&SyntaxTheme>,
4284 ) -> Option<OutlineItem<Point>> {
4285 let item_node = mat.captures.iter().find_map(|cap| {
4286 if cap.index == config.item_capture_ix {
4287 Some(cap.node)
4288 } else {
4289 None
4290 }
4291 })?;
4292
4293 let item_byte_range = item_node.byte_range();
4294 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4295 return None;
4296 }
4297 let item_point_range = Point::from_ts_point(item_node.start_position())
4298 ..Point::from_ts_point(item_node.end_position());
4299
4300 let mut open_point = None;
4301 let mut close_point = None;
4302
4303 let mut buffer_ranges = Vec::new();
4304 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4305 let mut range = node.start_byte()..node.end_byte();
4306 let start = node.start_position();
4307 if node.end_position().row > start.row {
4308 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4309 }
4310
4311 if !range.is_empty() {
4312 buffer_ranges.push((range, node_is_name));
4313 }
4314 };
4315
4316 for capture in mat.captures {
4317 if capture.index == config.name_capture_ix {
4318 add_to_buffer_ranges(capture.node, true);
4319 } else if Some(capture.index) == config.context_capture_ix
4320 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4321 {
4322 add_to_buffer_ranges(capture.node, false);
4323 } else {
4324 if Some(capture.index) == config.open_capture_ix {
4325 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4326 } else if Some(capture.index) == config.close_capture_ix {
4327 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4328 }
4329 }
4330 }
4331
4332 if buffer_ranges.is_empty() {
4333 return None;
4334 }
4335 let source_range_for_text =
4336 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4337
4338 let mut text = String::new();
4339 let mut highlight_ranges = Vec::new();
4340 let mut name_ranges = Vec::new();
4341 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4342 let mut last_buffer_range_end = 0;
4343 for (buffer_range, is_name) in buffer_ranges {
4344 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4345 if space_added {
4346 text.push(' ');
4347 }
4348 let before_append_len = text.len();
4349 let mut offset = buffer_range.start;
4350 chunks.seek(buffer_range.clone());
4351 for mut chunk in chunks.by_ref() {
4352 if chunk.text.len() > buffer_range.end - offset {
4353 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4354 offset = buffer_range.end;
4355 } else {
4356 offset += chunk.text.len();
4357 }
4358 let style = chunk
4359 .syntax_highlight_id
4360 .zip(theme)
4361 .and_then(|(highlight, theme)| highlight.style(theme));
4362 if let Some(style) = style {
4363 let start = text.len();
4364 let end = start + chunk.text.len();
4365 highlight_ranges.push((start..end, style));
4366 }
4367 text.push_str(chunk.text);
4368 if offset >= buffer_range.end {
4369 break;
4370 }
4371 }
4372 if is_name {
4373 let after_append_len = text.len();
4374 let start = if space_added && !name_ranges.is_empty() {
4375 before_append_len - 1
4376 } else {
4377 before_append_len
4378 };
4379 name_ranges.push(start..after_append_len);
4380 }
4381 last_buffer_range_end = buffer_range.end;
4382 }
4383
4384 Some(OutlineItem {
4385 depth: 0, // We'll calculate the depth later
4386 range: item_point_range,
4387 source_range_for_text: source_range_for_text.to_point(self),
4388 text,
4389 highlight_ranges,
4390 name_ranges,
4391 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4392 annotation_range: None,
4393 })
4394 }
4395
4396 pub fn function_body_fold_ranges<T: ToOffset>(
4397 &self,
4398 within: Range<T>,
4399 ) -> impl Iterator<Item = Range<usize>> + '_ {
4400 self.text_object_ranges(within, TreeSitterOptions::default())
4401 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4402 }
4403
4404 /// For each grammar in the language, runs the provided
4405 /// [`tree_sitter::Query`] against the given range.
4406 pub fn matches(
4407 &self,
4408 range: Range<usize>,
4409 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4410 ) -> SyntaxMapMatches<'_> {
4411 self.syntax.matches(range, self, query)
4412 }
4413
4414 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4415 /// Hence, may return more bracket pairs than the range contains.
4416 ///
4417 /// Will omit known chunks.
4418 /// The resulting bracket match collections are not ordered.
4419 pub fn fetch_bracket_ranges(
4420 &self,
4421 range: Range<usize>,
4422 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4423 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4424 let mut all_bracket_matches = HashMap::default();
4425
4426 for chunk in self
4427 .tree_sitter_data
4428 .chunks
4429 .applicable_chunks(&[range.to_point(self)])
4430 {
4431 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4432 continue;
4433 }
4434 let chunk_range = chunk.anchor_range();
4435 let chunk_range = chunk_range.to_offset(&self);
4436
4437 if let Some(cached_brackets) =
4438 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4439 {
4440 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4441 continue;
4442 }
4443
4444 let mut all_brackets = Vec::new();
4445 let mut opens = Vec::new();
4446 let mut color_pairs = Vec::new();
4447
4448 let mut matches = self.syntax.matches_with_options(
4449 chunk_range.clone(),
4450 &self.text,
4451 TreeSitterOptions {
4452 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4453 max_start_depth: None,
4454 },
4455 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4456 );
4457 let configs = matches
4458 .grammars()
4459 .iter()
4460 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4461 .collect::<Vec<_>>();
4462
4463 while let Some(mat) = matches.peek() {
4464 let mut open = None;
4465 let mut close = None;
4466 let syntax_layer_depth = mat.depth;
4467 let config = configs[mat.grammar_index];
4468 let pattern = &config.patterns[mat.pattern_index];
4469 for capture in mat.captures {
4470 if capture.index == config.open_capture_ix {
4471 open = Some(capture.node.byte_range());
4472 } else if capture.index == config.close_capture_ix {
4473 close = Some(capture.node.byte_range());
4474 }
4475 }
4476
4477 matches.advance();
4478
4479 let Some((open_range, close_range)) = open.zip(close) else {
4480 continue;
4481 };
4482
4483 let bracket_range = open_range.start..=close_range.end;
4484 if !bracket_range.overlaps(&chunk_range) {
4485 continue;
4486 }
4487
4488 let index = all_brackets.len();
4489 all_brackets.push(BracketMatch {
4490 open_range: open_range.clone(),
4491 close_range: close_range.clone(),
4492 newline_only: pattern.newline_only,
4493 syntax_layer_depth,
4494 color_index: None,
4495 });
4496
4497 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4498 // bracket will match the entire tag with all text inside.
4499 // For now, avoid highlighting any pair that has more than single char in each bracket.
4500 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4501 let should_color =
4502 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4503 if should_color {
4504 opens.push(open_range.clone());
4505 color_pairs.push((open_range, close_range, index));
4506 }
4507 }
4508
4509 opens.sort_by_key(|r| (r.start, r.end));
4510 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4511 color_pairs.sort_by_key(|(_, close, _)| close.end);
4512
4513 let mut open_stack = Vec::new();
4514 let mut open_index = 0;
4515 for (open, close, index) in color_pairs {
4516 while open_index < opens.len() && opens[open_index].start < close.start {
4517 open_stack.push(opens[open_index].clone());
4518 open_index += 1;
4519 }
4520
4521 if open_stack.last() == Some(&open) {
4522 let depth_index = open_stack.len() - 1;
4523 all_brackets[index].color_index = Some(depth_index);
4524 open_stack.pop();
4525 }
4526 }
4527
4528 all_brackets.sort_by_key(|bracket_match| {
4529 (bracket_match.open_range.start, bracket_match.open_range.end)
4530 });
4531
4532 if let empty_slot @ None =
4533 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4534 {
4535 *empty_slot = Some(all_brackets.clone());
4536 }
4537 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4538 }
4539
4540 all_bracket_matches
4541 }
4542
4543 pub fn all_bracket_ranges(
4544 &self,
4545 range: Range<usize>,
4546 ) -> impl Iterator<Item = BracketMatch<usize>> {
4547 self.fetch_bracket_ranges(range.clone(), None)
4548 .into_values()
4549 .flatten()
4550 .filter(move |bracket_match| {
4551 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4552 bracket_range.overlaps(&range)
4553 })
4554 }
4555
4556 /// Returns bracket range pairs overlapping or adjacent to `range`
4557 pub fn bracket_ranges<T: ToOffset>(
4558 &self,
4559 range: Range<T>,
4560 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4561 // Find bracket pairs that *inclusively* contain the given range.
4562 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4563 self.all_bracket_ranges(range)
4564 .filter(|pair| !pair.newline_only)
4565 }
4566
4567 pub fn debug_variables_query<T: ToOffset>(
4568 &self,
4569 range: Range<T>,
4570 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4571 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4572
4573 let mut matches = self.syntax.matches_with_options(
4574 range.clone(),
4575 &self.text,
4576 TreeSitterOptions::default(),
4577 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4578 );
4579
4580 let configs = matches
4581 .grammars()
4582 .iter()
4583 .map(|grammar| grammar.debug_variables_config.as_ref())
4584 .collect::<Vec<_>>();
4585
4586 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4587
4588 iter::from_fn(move || {
4589 loop {
4590 while let Some(capture) = captures.pop() {
4591 if capture.0.overlaps(&range) {
4592 return Some(capture);
4593 }
4594 }
4595
4596 let mat = matches.peek()?;
4597
4598 let Some(config) = configs[mat.grammar_index].as_ref() else {
4599 matches.advance();
4600 continue;
4601 };
4602
4603 for capture in mat.captures {
4604 let Some(ix) = config
4605 .objects_by_capture_ix
4606 .binary_search_by_key(&capture.index, |e| e.0)
4607 .ok()
4608 else {
4609 continue;
4610 };
4611 let text_object = config.objects_by_capture_ix[ix].1;
4612 let byte_range = capture.node.byte_range();
4613
4614 let mut found = false;
4615 for (range, existing) in captures.iter_mut() {
4616 if existing == &text_object {
4617 range.start = range.start.min(byte_range.start);
4618 range.end = range.end.max(byte_range.end);
4619 found = true;
4620 break;
4621 }
4622 }
4623
4624 if !found {
4625 captures.push((byte_range, text_object));
4626 }
4627 }
4628
4629 matches.advance();
4630 }
4631 })
4632 }
4633
4634 pub fn text_object_ranges<T: ToOffset>(
4635 &self,
4636 range: Range<T>,
4637 options: TreeSitterOptions,
4638 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4639 let range =
4640 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4641
4642 let mut matches =
4643 self.syntax
4644 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4645 grammar.text_object_config.as_ref().map(|c| &c.query)
4646 });
4647
4648 let configs = matches
4649 .grammars()
4650 .iter()
4651 .map(|grammar| grammar.text_object_config.as_ref())
4652 .collect::<Vec<_>>();
4653
4654 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4655
4656 iter::from_fn(move || {
4657 loop {
4658 while let Some(capture) = captures.pop() {
4659 if capture.0.overlaps(&range) {
4660 return Some(capture);
4661 }
4662 }
4663
4664 let mat = matches.peek()?;
4665
4666 let Some(config) = configs[mat.grammar_index].as_ref() else {
4667 matches.advance();
4668 continue;
4669 };
4670
4671 for capture in mat.captures {
4672 let Some(ix) = config
4673 .text_objects_by_capture_ix
4674 .binary_search_by_key(&capture.index, |e| e.0)
4675 .ok()
4676 else {
4677 continue;
4678 };
4679 let text_object = config.text_objects_by_capture_ix[ix].1;
4680 let byte_range = capture.node.byte_range();
4681
4682 let mut found = false;
4683 for (range, existing) in captures.iter_mut() {
4684 if existing == &text_object {
4685 range.start = range.start.min(byte_range.start);
4686 range.end = range.end.max(byte_range.end);
4687 found = true;
4688 break;
4689 }
4690 }
4691
4692 if !found {
4693 captures.push((byte_range, text_object));
4694 }
4695 }
4696
4697 matches.advance();
4698 }
4699 })
4700 }
4701
4702 /// Returns enclosing bracket ranges containing the given range
4703 pub fn enclosing_bracket_ranges<T: ToOffset>(
4704 &self,
4705 range: Range<T>,
4706 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4707 let range = range.start.to_offset(self)..range.end.to_offset(self);
4708
4709 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4710 let max_depth = result
4711 .iter()
4712 .map(|mat| mat.syntax_layer_depth)
4713 .max()
4714 .unwrap_or(0);
4715 result.into_iter().filter(move |pair| {
4716 pair.open_range.start <= range.start
4717 && pair.close_range.end >= range.end
4718 && pair.syntax_layer_depth == max_depth
4719 })
4720 }
4721
4722 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4723 ///
4724 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4725 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4726 &self,
4727 range: Range<T>,
4728 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4729 ) -> Option<(Range<usize>, Range<usize>)> {
4730 let range = range.start.to_offset(self)..range.end.to_offset(self);
4731
4732 // Get the ranges of the innermost pair of brackets.
4733 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4734
4735 for pair in self.enclosing_bracket_ranges(range) {
4736 if let Some(range_filter) = range_filter
4737 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4738 {
4739 continue;
4740 }
4741
4742 let len = pair.close_range.end - pair.open_range.start;
4743
4744 if let Some((existing_open, existing_close)) = &result {
4745 let existing_len = existing_close.end - existing_open.start;
4746 if len > existing_len {
4747 continue;
4748 }
4749 }
4750
4751 result = Some((pair.open_range, pair.close_range));
4752 }
4753
4754 result
4755 }
4756
4757 /// Returns anchor ranges for any matches of the redaction query.
4758 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4759 /// will be run on the relevant section of the buffer.
4760 pub fn redacted_ranges<T: ToOffset>(
4761 &self,
4762 range: Range<T>,
4763 ) -> impl Iterator<Item = Range<usize>> + '_ {
4764 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4765 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4766 grammar
4767 .redactions_config
4768 .as_ref()
4769 .map(|config| &config.query)
4770 });
4771
4772 let configs = syntax_matches
4773 .grammars()
4774 .iter()
4775 .map(|grammar| grammar.redactions_config.as_ref())
4776 .collect::<Vec<_>>();
4777
4778 iter::from_fn(move || {
4779 let redacted_range = syntax_matches
4780 .peek()
4781 .and_then(|mat| {
4782 configs[mat.grammar_index].and_then(|config| {
4783 mat.captures
4784 .iter()
4785 .find(|capture| capture.index == config.redaction_capture_ix)
4786 })
4787 })
4788 .map(|mat| mat.node.byte_range());
4789 syntax_matches.advance();
4790 redacted_range
4791 })
4792 }
4793
4794 pub fn injections_intersecting_range<T: ToOffset>(
4795 &self,
4796 range: Range<T>,
4797 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4798 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4799
4800 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4801 grammar
4802 .injection_config
4803 .as_ref()
4804 .map(|config| &config.query)
4805 });
4806
4807 let configs = syntax_matches
4808 .grammars()
4809 .iter()
4810 .map(|grammar| grammar.injection_config.as_ref())
4811 .collect::<Vec<_>>();
4812
4813 iter::from_fn(move || {
4814 let ranges = syntax_matches.peek().and_then(|mat| {
4815 let config = &configs[mat.grammar_index]?;
4816 let content_capture_range = mat.captures.iter().find_map(|capture| {
4817 if capture.index == config.content_capture_ix {
4818 Some(capture.node.byte_range())
4819 } else {
4820 None
4821 }
4822 })?;
4823 let language = self.language_at(content_capture_range.start)?;
4824 Some((content_capture_range, language))
4825 });
4826 syntax_matches.advance();
4827 ranges
4828 })
4829 }
4830
4831 pub fn runnable_ranges(
4832 &self,
4833 offset_range: Range<usize>,
4834 ) -> impl Iterator<Item = RunnableRange> + '_ {
4835 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4836 grammar.runnable_config.as_ref().map(|config| &config.query)
4837 });
4838
4839 let test_configs = syntax_matches
4840 .grammars()
4841 .iter()
4842 .map(|grammar| grammar.runnable_config.as_ref())
4843 .collect::<Vec<_>>();
4844
4845 iter::from_fn(move || {
4846 loop {
4847 let mat = syntax_matches.peek()?;
4848
4849 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4850 let mut run_range = None;
4851 let full_range = mat.captures.iter().fold(
4852 Range {
4853 start: usize::MAX,
4854 end: 0,
4855 },
4856 |mut acc, next| {
4857 let byte_range = next.node.byte_range();
4858 if acc.start > byte_range.start {
4859 acc.start = byte_range.start;
4860 }
4861 if acc.end < byte_range.end {
4862 acc.end = byte_range.end;
4863 }
4864 acc
4865 },
4866 );
4867 if full_range.start > full_range.end {
4868 // We did not find a full spanning range of this match.
4869 return None;
4870 }
4871 let extra_captures: SmallVec<[_; 1]> =
4872 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4873 test_configs
4874 .extra_captures
4875 .get(capture.index as usize)
4876 .cloned()
4877 .and_then(|tag_name| match tag_name {
4878 RunnableCapture::Named(name) => {
4879 Some((capture.node.byte_range(), name))
4880 }
4881 RunnableCapture::Run => {
4882 let _ = run_range.insert(capture.node.byte_range());
4883 None
4884 }
4885 })
4886 }));
4887 let run_range = run_range?;
4888 let tags = test_configs
4889 .query
4890 .property_settings(mat.pattern_index)
4891 .iter()
4892 .filter_map(|property| {
4893 if *property.key == *"tag" {
4894 property
4895 .value
4896 .as_ref()
4897 .map(|value| RunnableTag(value.to_string().into()))
4898 } else {
4899 None
4900 }
4901 })
4902 .collect();
4903 let extra_captures = extra_captures
4904 .into_iter()
4905 .map(|(range, name)| {
4906 (
4907 name.to_string(),
4908 self.text_for_range(range).collect::<String>(),
4909 )
4910 })
4911 .collect();
4912 // All tags should have the same range.
4913 Some(RunnableRange {
4914 run_range,
4915 full_range,
4916 runnable: Runnable {
4917 tags,
4918 language: mat.language,
4919 buffer: self.remote_id(),
4920 },
4921 extra_captures,
4922 buffer_id: self.remote_id(),
4923 })
4924 });
4925
4926 syntax_matches.advance();
4927 if test_range.is_some() {
4928 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4929 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4930 return test_range;
4931 }
4932 }
4933 })
4934 }
4935
4936 /// Returns selections for remote peers intersecting the given range.
4937 #[allow(clippy::type_complexity)]
4938 pub fn selections_in_range(
4939 &self,
4940 range: Range<Anchor>,
4941 include_local: bool,
4942 ) -> impl Iterator<
4943 Item = (
4944 ReplicaId,
4945 bool,
4946 CursorShape,
4947 impl Iterator<Item = &Selection<Anchor>> + '_,
4948 ),
4949 > + '_ {
4950 self.remote_selections
4951 .iter()
4952 .filter(move |(replica_id, set)| {
4953 (include_local || **replica_id != self.text.replica_id())
4954 && !set.selections.is_empty()
4955 })
4956 .map(move |(replica_id, set)| {
4957 let start_ix = match set.selections.binary_search_by(|probe| {
4958 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4959 }) {
4960 Ok(ix) | Err(ix) => ix,
4961 };
4962 let end_ix = match set.selections.binary_search_by(|probe| {
4963 probe.start.cmp(&range.end, self).then(Ordering::Less)
4964 }) {
4965 Ok(ix) | Err(ix) => ix,
4966 };
4967
4968 (
4969 *replica_id,
4970 set.line_mode,
4971 set.cursor_shape,
4972 set.selections[start_ix..end_ix].iter(),
4973 )
4974 })
4975 }
4976
4977 /// Returns if the buffer contains any diagnostics.
4978 pub fn has_diagnostics(&self) -> bool {
4979 !self.diagnostics.is_empty()
4980 }
4981
4982 /// Returns all the diagnostics intersecting the given range.
4983 pub fn diagnostics_in_range<'a, T, O>(
4984 &'a self,
4985 search_range: Range<T>,
4986 reversed: bool,
4987 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4988 where
4989 T: 'a + Clone + ToOffset,
4990 O: 'a + FromAnchor,
4991 {
4992 let mut iterators: Vec<_> = self
4993 .diagnostics
4994 .iter()
4995 .map(|(_, collection)| {
4996 collection
4997 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4998 .peekable()
4999 })
5000 .collect();
5001
5002 std::iter::from_fn(move || {
5003 let (next_ix, _) = iterators
5004 .iter_mut()
5005 .enumerate()
5006 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5007 .min_by(|(_, a), (_, b)| {
5008 let cmp = a
5009 .range
5010 .start
5011 .cmp(&b.range.start, self)
5012 // when range is equal, sort by diagnostic severity
5013 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5014 // and stabilize order with group_id
5015 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5016 if reversed { cmp.reverse() } else { cmp }
5017 })?;
5018 iterators[next_ix]
5019 .next()
5020 .map(
5021 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5022 diagnostic,
5023 range: FromAnchor::from_anchor(&range.start, self)
5024 ..FromAnchor::from_anchor(&range.end, self),
5025 },
5026 )
5027 })
5028 }
5029
5030 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5031 /// should be used instead.
5032 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5033 &self.diagnostics
5034 }
5035
5036 /// Returns all the diagnostic groups associated with the given
5037 /// language server ID. If no language server ID is provided,
5038 /// all diagnostics groups are returned.
5039 pub fn diagnostic_groups(
5040 &self,
5041 language_server_id: Option<LanguageServerId>,
5042 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5043 let mut groups = Vec::new();
5044
5045 if let Some(language_server_id) = language_server_id {
5046 if let Ok(ix) = self
5047 .diagnostics
5048 .binary_search_by_key(&language_server_id, |e| e.0)
5049 {
5050 self.diagnostics[ix]
5051 .1
5052 .groups(language_server_id, &mut groups, self);
5053 }
5054 } else {
5055 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5056 diagnostics.groups(*language_server_id, &mut groups, self);
5057 }
5058 }
5059
5060 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5061 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5062 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5063 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5064 });
5065
5066 groups
5067 }
5068
5069 /// Returns an iterator over the diagnostics for the given group.
5070 pub fn diagnostic_group<O>(
5071 &self,
5072 group_id: usize,
5073 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5074 where
5075 O: FromAnchor + 'static,
5076 {
5077 self.diagnostics
5078 .iter()
5079 .flat_map(move |(_, set)| set.group(group_id, self))
5080 }
5081
5082 /// An integer version number that accounts for all updates besides
5083 /// the buffer's text itself (which is versioned via a version vector).
5084 pub fn non_text_state_update_count(&self) -> usize {
5085 self.non_text_state_update_count
5086 }
5087
5088 /// An integer version that changes when the buffer's syntax changes.
5089 pub fn syntax_update_count(&self) -> usize {
5090 self.syntax.update_count()
5091 }
5092
5093 /// Returns a snapshot of underlying file.
5094 pub fn file(&self) -> Option<&Arc<dyn File>> {
5095 self.file.as_ref()
5096 }
5097
5098 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5099 if let Some(file) = self.file() {
5100 if file.path().file_name().is_none() || include_root {
5101 Some(file.full_path(cx).to_string_lossy().into_owned())
5102 } else {
5103 Some(file.path().display(file.path_style(cx)).to_string())
5104 }
5105 } else {
5106 None
5107 }
5108 }
5109
5110 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5111 let query_str = query.fuzzy_contents;
5112 if query_str.is_some_and(|query| query.is_empty()) {
5113 return BTreeMap::default();
5114 }
5115
5116 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5117 language,
5118 override_id: None,
5119 }));
5120
5121 let mut query_ix = 0;
5122 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5123 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5124
5125 let mut words = BTreeMap::default();
5126 let mut current_word_start_ix = None;
5127 let mut chunk_ix = query.range.start;
5128 for chunk in self.chunks(query.range, false) {
5129 for (i, c) in chunk.text.char_indices() {
5130 let ix = chunk_ix + i;
5131 if classifier.is_word(c) {
5132 if current_word_start_ix.is_none() {
5133 current_word_start_ix = Some(ix);
5134 }
5135
5136 if let Some(query_chars) = &query_chars
5137 && query_ix < query_len
5138 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5139 {
5140 query_ix += 1;
5141 }
5142 continue;
5143 } else if let Some(word_start) = current_word_start_ix.take()
5144 && query_ix == query_len
5145 {
5146 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5147 let mut word_text = self.text_for_range(word_start..ix).peekable();
5148 let first_char = word_text
5149 .peek()
5150 .and_then(|first_chunk| first_chunk.chars().next());
5151 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5152 if !query.skip_digits
5153 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5154 {
5155 words.insert(word_text.collect(), word_range);
5156 }
5157 }
5158 query_ix = 0;
5159 }
5160 chunk_ix += chunk.text.len();
5161 }
5162
5163 words
5164 }
5165}
5166
5167pub struct WordsQuery<'a> {
5168 /// Only returns words with all chars from the fuzzy string in them.
5169 pub fuzzy_contents: Option<&'a str>,
5170 /// Skips words that start with a digit.
5171 pub skip_digits: bool,
5172 /// Buffer offset range, to look for words.
5173 pub range: Range<usize>,
5174}
5175
5176fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5177 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5178}
5179
5180fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5181 let mut result = IndentSize::spaces(0);
5182 for c in text {
5183 let kind = match c {
5184 ' ' => IndentKind::Space,
5185 '\t' => IndentKind::Tab,
5186 _ => break,
5187 };
5188 if result.len == 0 {
5189 result.kind = kind;
5190 }
5191 result.len += 1;
5192 }
5193 result
5194}
5195
5196impl Clone for BufferSnapshot {
5197 fn clone(&self) -> Self {
5198 Self {
5199 text: self.text.clone(),
5200 syntax: self.syntax.clone(),
5201 file: self.file.clone(),
5202 remote_selections: self.remote_selections.clone(),
5203 diagnostics: self.diagnostics.clone(),
5204 language: self.language.clone(),
5205 tree_sitter_data: self.tree_sitter_data.clone(),
5206 non_text_state_update_count: self.non_text_state_update_count,
5207 capability: self.capability,
5208 }
5209 }
5210}
5211
5212impl Deref for BufferSnapshot {
5213 type Target = text::BufferSnapshot;
5214
5215 fn deref(&self) -> &Self::Target {
5216 &self.text
5217 }
5218}
5219
5220unsafe impl Send for BufferChunks<'_> {}
5221
5222impl<'a> BufferChunks<'a> {
5223 pub(crate) fn new(
5224 text: &'a Rope,
5225 range: Range<usize>,
5226 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5227 diagnostics: bool,
5228 buffer_snapshot: Option<&'a BufferSnapshot>,
5229 ) -> Self {
5230 let mut highlights = None;
5231 if let Some((captures, highlight_maps)) = syntax {
5232 highlights = Some(BufferChunkHighlights {
5233 captures,
5234 next_capture: None,
5235 stack: Default::default(),
5236 highlight_maps,
5237 })
5238 }
5239
5240 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5241 let chunks = text.chunks_in_range(range.clone());
5242
5243 let mut this = BufferChunks {
5244 range,
5245 buffer_snapshot,
5246 chunks,
5247 diagnostic_endpoints,
5248 error_depth: 0,
5249 warning_depth: 0,
5250 information_depth: 0,
5251 hint_depth: 0,
5252 unnecessary_depth: 0,
5253 underline: true,
5254 highlights,
5255 };
5256 this.initialize_diagnostic_endpoints();
5257 this
5258 }
5259
5260 /// Seeks to the given byte offset in the buffer.
5261 pub fn seek(&mut self, range: Range<usize>) {
5262 let old_range = std::mem::replace(&mut self.range, range.clone());
5263 self.chunks.set_range(self.range.clone());
5264 if let Some(highlights) = self.highlights.as_mut() {
5265 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5266 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5267 highlights
5268 .stack
5269 .retain(|(end_offset, _)| *end_offset > range.start);
5270 if let Some(capture) = &highlights.next_capture
5271 && range.start >= capture.node.start_byte()
5272 {
5273 let next_capture_end = capture.node.end_byte();
5274 if range.start < next_capture_end {
5275 highlights.stack.push((
5276 next_capture_end,
5277 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5278 ));
5279 }
5280 highlights.next_capture.take();
5281 }
5282 } else if let Some(snapshot) = self.buffer_snapshot {
5283 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5284 *highlights = BufferChunkHighlights {
5285 captures,
5286 next_capture: None,
5287 stack: Default::default(),
5288 highlight_maps,
5289 };
5290 } else {
5291 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5292 // Seeking such BufferChunks is not supported.
5293 debug_assert!(
5294 false,
5295 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5296 );
5297 }
5298
5299 highlights.captures.set_byte_range(self.range.clone());
5300 self.initialize_diagnostic_endpoints();
5301 }
5302 }
5303
5304 fn initialize_diagnostic_endpoints(&mut self) {
5305 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5306 && let Some(buffer) = self.buffer_snapshot
5307 {
5308 let mut diagnostic_endpoints = Vec::new();
5309 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5310 diagnostic_endpoints.push(DiagnosticEndpoint {
5311 offset: entry.range.start,
5312 is_start: true,
5313 severity: entry.diagnostic.severity,
5314 is_unnecessary: entry.diagnostic.is_unnecessary,
5315 underline: entry.diagnostic.underline,
5316 });
5317 diagnostic_endpoints.push(DiagnosticEndpoint {
5318 offset: entry.range.end,
5319 is_start: false,
5320 severity: entry.diagnostic.severity,
5321 is_unnecessary: entry.diagnostic.is_unnecessary,
5322 underline: entry.diagnostic.underline,
5323 });
5324 }
5325 diagnostic_endpoints
5326 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5327 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5328 self.hint_depth = 0;
5329 self.error_depth = 0;
5330 self.warning_depth = 0;
5331 self.information_depth = 0;
5332 }
5333 }
5334
5335 /// The current byte offset in the buffer.
5336 pub fn offset(&self) -> usize {
5337 self.range.start
5338 }
5339
5340 pub fn range(&self) -> Range<usize> {
5341 self.range.clone()
5342 }
5343
5344 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5345 let depth = match endpoint.severity {
5346 DiagnosticSeverity::ERROR => &mut self.error_depth,
5347 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5348 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5349 DiagnosticSeverity::HINT => &mut self.hint_depth,
5350 _ => return,
5351 };
5352 if endpoint.is_start {
5353 *depth += 1;
5354 } else {
5355 *depth -= 1;
5356 }
5357
5358 if endpoint.is_unnecessary {
5359 if endpoint.is_start {
5360 self.unnecessary_depth += 1;
5361 } else {
5362 self.unnecessary_depth -= 1;
5363 }
5364 }
5365 }
5366
5367 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5368 if self.error_depth > 0 {
5369 Some(DiagnosticSeverity::ERROR)
5370 } else if self.warning_depth > 0 {
5371 Some(DiagnosticSeverity::WARNING)
5372 } else if self.information_depth > 0 {
5373 Some(DiagnosticSeverity::INFORMATION)
5374 } else if self.hint_depth > 0 {
5375 Some(DiagnosticSeverity::HINT)
5376 } else {
5377 None
5378 }
5379 }
5380
5381 fn current_code_is_unnecessary(&self) -> bool {
5382 self.unnecessary_depth > 0
5383 }
5384}
5385
5386impl<'a> Iterator for BufferChunks<'a> {
5387 type Item = Chunk<'a>;
5388
5389 fn next(&mut self) -> Option<Self::Item> {
5390 let mut next_capture_start = usize::MAX;
5391 let mut next_diagnostic_endpoint = usize::MAX;
5392
5393 if let Some(highlights) = self.highlights.as_mut() {
5394 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5395 if *parent_capture_end <= self.range.start {
5396 highlights.stack.pop();
5397 } else {
5398 break;
5399 }
5400 }
5401
5402 if highlights.next_capture.is_none() {
5403 highlights.next_capture = highlights.captures.next();
5404 }
5405
5406 while let Some(capture) = highlights.next_capture.as_ref() {
5407 if self.range.start < capture.node.start_byte() {
5408 next_capture_start = capture.node.start_byte();
5409 break;
5410 } else {
5411 let highlight_id =
5412 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5413 highlights
5414 .stack
5415 .push((capture.node.end_byte(), highlight_id));
5416 highlights.next_capture = highlights.captures.next();
5417 }
5418 }
5419 }
5420
5421 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5422 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5423 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5424 if endpoint.offset <= self.range.start {
5425 self.update_diagnostic_depths(endpoint);
5426 diagnostic_endpoints.next();
5427 self.underline = endpoint.underline;
5428 } else {
5429 next_diagnostic_endpoint = endpoint.offset;
5430 break;
5431 }
5432 }
5433 }
5434 self.diagnostic_endpoints = diagnostic_endpoints;
5435
5436 if let Some(ChunkBitmaps {
5437 text: chunk,
5438 chars: chars_map,
5439 tabs,
5440 }) = self.chunks.peek_with_bitmaps()
5441 {
5442 let chunk_start = self.range.start;
5443 let mut chunk_end = (self.chunks.offset() + chunk.len())
5444 .min(next_capture_start)
5445 .min(next_diagnostic_endpoint);
5446 let mut highlight_id = None;
5447 if let Some(highlights) = self.highlights.as_ref()
5448 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5449 {
5450 chunk_end = chunk_end.min(*parent_capture_end);
5451 highlight_id = Some(*parent_highlight_id);
5452 }
5453 let bit_start = chunk_start - self.chunks.offset();
5454 let bit_end = chunk_end - self.chunks.offset();
5455
5456 let slice = &chunk[bit_start..bit_end];
5457
5458 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5459 let tabs = (tabs >> bit_start) & mask;
5460 let chars = (chars_map >> bit_start) & mask;
5461
5462 self.range.start = chunk_end;
5463 if self.range.start == self.chunks.offset() + chunk.len() {
5464 self.chunks.next().unwrap();
5465 }
5466
5467 Some(Chunk {
5468 text: slice,
5469 syntax_highlight_id: highlight_id,
5470 underline: self.underline,
5471 diagnostic_severity: self.current_diagnostic_severity(),
5472 is_unnecessary: self.current_code_is_unnecessary(),
5473 tabs,
5474 chars,
5475 ..Chunk::default()
5476 })
5477 } else {
5478 None
5479 }
5480 }
5481}
5482
5483impl operation_queue::Operation for Operation {
5484 fn lamport_timestamp(&self) -> clock::Lamport {
5485 match self {
5486 Operation::Buffer(_) => {
5487 unreachable!("buffer operations should never be deferred at this layer")
5488 }
5489 Operation::UpdateDiagnostics {
5490 lamport_timestamp, ..
5491 }
5492 | Operation::UpdateSelections {
5493 lamport_timestamp, ..
5494 }
5495 | Operation::UpdateCompletionTriggers {
5496 lamport_timestamp, ..
5497 }
5498 | Operation::UpdateLineEnding {
5499 lamport_timestamp, ..
5500 } => *lamport_timestamp,
5501 }
5502 }
5503}
5504
5505impl Default for Diagnostic {
5506 fn default() -> Self {
5507 Self {
5508 source: Default::default(),
5509 source_kind: DiagnosticSourceKind::Other,
5510 code: None,
5511 code_description: None,
5512 severity: DiagnosticSeverity::ERROR,
5513 message: Default::default(),
5514 markdown: None,
5515 group_id: 0,
5516 is_primary: false,
5517 is_disk_based: false,
5518 is_unnecessary: false,
5519 underline: true,
5520 data: None,
5521 registration_id: None,
5522 }
5523 }
5524}
5525
5526impl IndentSize {
5527 /// Returns an [`IndentSize`] representing the given spaces.
5528 pub fn spaces(len: u32) -> Self {
5529 Self {
5530 len,
5531 kind: IndentKind::Space,
5532 }
5533 }
5534
5535 /// Returns an [`IndentSize`] representing a tab.
5536 pub fn tab() -> Self {
5537 Self {
5538 len: 1,
5539 kind: IndentKind::Tab,
5540 }
5541 }
5542
5543 /// An iterator over the characters represented by this [`IndentSize`].
5544 pub fn chars(&self) -> impl Iterator<Item = char> {
5545 iter::repeat(self.char()).take(self.len as usize)
5546 }
5547
5548 /// The character representation of this [`IndentSize`].
5549 pub fn char(&self) -> char {
5550 match self.kind {
5551 IndentKind::Space => ' ',
5552 IndentKind::Tab => '\t',
5553 }
5554 }
5555
5556 /// Consumes the current [`IndentSize`] and returns a new one that has
5557 /// been shrunk or enlarged by the given size along the given direction.
5558 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5559 match direction {
5560 Ordering::Less => {
5561 if self.kind == size.kind && self.len >= size.len {
5562 self.len -= size.len;
5563 }
5564 }
5565 Ordering::Equal => {}
5566 Ordering::Greater => {
5567 if self.len == 0 {
5568 self = size;
5569 } else if self.kind == size.kind {
5570 self.len += size.len;
5571 }
5572 }
5573 }
5574 self
5575 }
5576
5577 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5578 match self.kind {
5579 IndentKind::Space => self.len as usize,
5580 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5581 }
5582 }
5583}
5584
5585#[cfg(any(test, feature = "test-support"))]
5586pub struct TestFile {
5587 pub path: Arc<RelPath>,
5588 pub root_name: String,
5589 pub local_root: Option<PathBuf>,
5590}
5591
5592#[cfg(any(test, feature = "test-support"))]
5593impl File for TestFile {
5594 fn path(&self) -> &Arc<RelPath> {
5595 &self.path
5596 }
5597
5598 fn full_path(&self, _: &gpui::App) -> PathBuf {
5599 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5600 }
5601
5602 fn as_local(&self) -> Option<&dyn LocalFile> {
5603 if self.local_root.is_some() {
5604 Some(self)
5605 } else {
5606 None
5607 }
5608 }
5609
5610 fn disk_state(&self) -> DiskState {
5611 unimplemented!()
5612 }
5613
5614 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5615 self.path().file_name().unwrap_or(self.root_name.as_ref())
5616 }
5617
5618 fn worktree_id(&self, _: &App) -> WorktreeId {
5619 WorktreeId::from_usize(0)
5620 }
5621
5622 fn to_proto(&self, _: &App) -> rpc::proto::File {
5623 unimplemented!()
5624 }
5625
5626 fn is_private(&self) -> bool {
5627 false
5628 }
5629
5630 fn path_style(&self, _cx: &App) -> PathStyle {
5631 PathStyle::local()
5632 }
5633}
5634
5635#[cfg(any(test, feature = "test-support"))]
5636impl LocalFile for TestFile {
5637 fn abs_path(&self, _cx: &App) -> PathBuf {
5638 PathBuf::from(self.local_root.as_ref().unwrap())
5639 .join(&self.root_name)
5640 .join(self.path.as_std_path())
5641 }
5642
5643 fn load(&self, _cx: &App) -> Task<Result<String>> {
5644 unimplemented!()
5645 }
5646
5647 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5648 unimplemented!()
5649 }
5650}
5651
5652pub(crate) fn contiguous_ranges(
5653 values: impl Iterator<Item = u32>,
5654 max_len: usize,
5655) -> impl Iterator<Item = Range<u32>> {
5656 let mut values = values;
5657 let mut current_range: Option<Range<u32>> = None;
5658 std::iter::from_fn(move || {
5659 loop {
5660 if let Some(value) = values.next() {
5661 if let Some(range) = &mut current_range
5662 && value == range.end
5663 && range.len() < max_len
5664 {
5665 range.end += 1;
5666 continue;
5667 }
5668
5669 let prev_range = current_range.clone();
5670 current_range = Some(value..(value + 1));
5671 if prev_range.is_some() {
5672 return prev_range;
5673 }
5674 } else {
5675 return current_range.take();
5676 }
5677 }
5678 })
5679}
5680
5681#[derive(Default, Debug)]
5682pub struct CharClassifier {
5683 scope: Option<LanguageScope>,
5684 scope_context: Option<CharScopeContext>,
5685 ignore_punctuation: bool,
5686}
5687
5688impl CharClassifier {
5689 pub fn new(scope: Option<LanguageScope>) -> Self {
5690 Self {
5691 scope,
5692 scope_context: None,
5693 ignore_punctuation: false,
5694 }
5695 }
5696
5697 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5698 Self {
5699 scope_context,
5700 ..self
5701 }
5702 }
5703
5704 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5705 Self {
5706 ignore_punctuation,
5707 ..self
5708 }
5709 }
5710
5711 pub fn is_whitespace(&self, c: char) -> bool {
5712 self.kind(c) == CharKind::Whitespace
5713 }
5714
5715 pub fn is_word(&self, c: char) -> bool {
5716 self.kind(c) == CharKind::Word
5717 }
5718
5719 pub fn is_punctuation(&self, c: char) -> bool {
5720 self.kind(c) == CharKind::Punctuation
5721 }
5722
5723 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5724 if c.is_alphanumeric() || c == '_' {
5725 return CharKind::Word;
5726 }
5727
5728 if let Some(scope) = &self.scope {
5729 let characters = match self.scope_context {
5730 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5731 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5732 None => scope.word_characters(),
5733 };
5734 if let Some(characters) = characters
5735 && characters.contains(&c)
5736 {
5737 return CharKind::Word;
5738 }
5739 }
5740
5741 if c.is_whitespace() {
5742 return CharKind::Whitespace;
5743 }
5744
5745 if ignore_punctuation {
5746 CharKind::Word
5747 } else {
5748 CharKind::Punctuation
5749 }
5750 }
5751
5752 pub fn kind(&self, c: char) -> CharKind {
5753 self.kind_with(c, self.ignore_punctuation)
5754 }
5755}
5756
5757/// Find all of the ranges of whitespace that occur at the ends of lines
5758/// in the given rope.
5759///
5760/// This could also be done with a regex search, but this implementation
5761/// avoids copying text.
5762pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5763 let mut ranges = Vec::new();
5764
5765 let mut offset = 0;
5766 let mut prev_chunk_trailing_whitespace_range = 0..0;
5767 for chunk in rope.chunks() {
5768 let mut prev_line_trailing_whitespace_range = 0..0;
5769 for (i, line) in chunk.split('\n').enumerate() {
5770 let line_end_offset = offset + line.len();
5771 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5772 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5773
5774 if i == 0 && trimmed_line_len == 0 {
5775 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5776 }
5777 if !prev_line_trailing_whitespace_range.is_empty() {
5778 ranges.push(prev_line_trailing_whitespace_range);
5779 }
5780
5781 offset = line_end_offset + 1;
5782 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5783 }
5784
5785 offset -= 1;
5786 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5787 }
5788
5789 if !prev_chunk_trailing_whitespace_range.is_empty() {
5790 ranges.push(prev_chunk_trailing_whitespace_range);
5791 }
5792
5793 ranges
5794}