1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a mutable replica, but toggled to be only readable.
89 Read,
90 /// The buffer is a read-only replica.
91 ReadOnly,
92}
93
94impl Capability {
95 /// Returns `true` if the capability is `ReadWrite`.
96 pub fn editable(self) -> bool {
97 matches!(self, Capability::ReadWrite)
98 }
99}
100
101pub type BufferRow = u32;
102
103/// An in-memory representation of a source code file, including its text,
104/// syntax trees, git status, and diagnostics.
105pub struct Buffer {
106 text: TextBuffer,
107 branch_state: Option<BufferBranchState>,
108 /// Filesystem state, `None` when there is no path.
109 file: Option<Arc<dyn File>>,
110 /// The mtime of the file when this buffer was last loaded from
111 /// or saved to disk.
112 saved_mtime: Option<MTime>,
113 /// The version vector when this buffer was last loaded from
114 /// or saved to disk.
115 saved_version: clock::Global,
116 preview_version: clock::Global,
117 transaction_depth: usize,
118 was_dirty_before_starting_transaction: Option<bool>,
119 reload_task: Option<Task<Result<()>>>,
120 language: Option<Arc<Language>>,
121 autoindent_requests: Vec<Arc<AutoindentRequest>>,
122 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
123 pending_autoindent: Option<Task<()>>,
124 sync_parse_timeout: Option<Duration>,
125 syntax_map: Mutex<SyntaxMap>,
126 reparse: Option<Task<()>>,
127 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
128 non_text_state_update_count: usize,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 diagnostics_timestamp: clock::Lamport,
132 completion_triggers: BTreeSet<String>,
133 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
134 completion_triggers_timestamp: clock::Lamport,
135 deferred_ops: OperationQueue<Operation>,
136 capability: Capability,
137 has_conflict: bool,
138 /// Memoize calls to has_changes_since(saved_version).
139 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
140 has_unsaved_edits: Cell<(clock::Global, bool)>,
141 change_bits: Vec<rc::Weak<Cell<bool>>>,
142 _subscriptions: Vec<gpui::Subscription>,
143 tree_sitter_data: Arc<TreeSitterData>,
144 encoding: &'static Encoding,
145 has_bom: bool,
146}
147
148#[derive(Debug)]
149pub struct TreeSitterData {
150 chunks: RowChunks,
151 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
152}
153
154const MAX_ROWS_IN_A_CHUNK: u32 = 50;
155
156impl TreeSitterData {
157 fn clear(&mut self, snapshot: text::BufferSnapshot) {
158 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
159 self.brackets_by_chunks.get_mut().clear();
160 self.brackets_by_chunks
161 .get_mut()
162 .resize(self.chunks.len(), None);
163 }
164
165 fn new(snapshot: text::BufferSnapshot) -> Self {
166 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
167 Self {
168 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
169 chunks,
170 }
171 }
172
173 fn version(&self) -> &clock::Global {
174 self.chunks.version()
175 }
176}
177
178#[derive(Copy, Clone, Debug, PartialEq, Eq)]
179pub enum ParseStatus {
180 Idle,
181 Parsing,
182}
183
184struct BufferBranchState {
185 base_buffer: Entity<Buffer>,
186 merged_operations: Vec<Lamport>,
187}
188
189/// An immutable, cheaply cloneable representation of a fixed
190/// state of a buffer.
191pub struct BufferSnapshot {
192 pub text: text::BufferSnapshot,
193 pub syntax: SyntaxSnapshot,
194 file: Option<Arc<dyn File>>,
195 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
196 remote_selections: TreeMap<ReplicaId, SelectionSet>,
197 language: Option<Arc<Language>>,
198 non_text_state_update_count: usize,
199 tree_sitter_data: Arc<TreeSitterData>,
200 pub capability: Capability,
201}
202
203/// The kind and amount of indentation in a particular line. For now,
204/// assumes that indentation is all the same character.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub struct IndentSize {
207 /// The number of bytes that comprise the indentation.
208 pub len: u32,
209 /// The kind of whitespace used for indentation.
210 pub kind: IndentKind,
211}
212
213/// A whitespace character that's used for indentation.
214#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
215pub enum IndentKind {
216 /// An ASCII space character.
217 #[default]
218 Space,
219 /// An ASCII tab character.
220 Tab,
221}
222
223/// The shape of a selection cursor.
224#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
225pub enum CursorShape {
226 /// A vertical bar
227 #[default]
228 Bar,
229 /// A block that surrounds the following character
230 Block,
231 /// An underline that runs along the following character
232 Underline,
233 /// A box drawn around the following character
234 Hollow,
235}
236
237impl From<settings::CursorShape> for CursorShape {
238 fn from(shape: settings::CursorShape) -> Self {
239 match shape {
240 settings::CursorShape::Bar => CursorShape::Bar,
241 settings::CursorShape::Block => CursorShape::Block,
242 settings::CursorShape::Underline => CursorShape::Underline,
243 settings::CursorShape::Hollow => CursorShape::Hollow,
244 }
245 }
246}
247
248#[derive(Clone, Debug)]
249struct SelectionSet {
250 line_mode: bool,
251 cursor_shape: CursorShape,
252 selections: Arc<[Selection<Anchor>]>,
253 lamport_timestamp: clock::Lamport,
254}
255
256/// A diagnostic associated with a certain range of a buffer.
257#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
258pub struct Diagnostic {
259 /// The name of the service that produced this diagnostic.
260 pub source: Option<String>,
261 /// The ID provided by the dynamic registration that produced this diagnostic.
262 pub registration_id: Option<SharedString>,
263 /// A machine-readable code that identifies this diagnostic.
264 pub code: Option<NumberOrString>,
265 pub code_description: Option<lsp::Uri>,
266 /// Whether this diagnostic is a hint, warning, or error.
267 pub severity: DiagnosticSeverity,
268 /// The human-readable message associated with this diagnostic.
269 pub message: String,
270 /// The human-readable message (in markdown format)
271 pub markdown: Option<String>,
272 /// An id that identifies the group to which this diagnostic belongs.
273 ///
274 /// When a language server produces a diagnostic with
275 /// one or more associated diagnostics, those diagnostics are all
276 /// assigned a single group ID.
277 pub group_id: usize,
278 /// Whether this diagnostic is the primary diagnostic for its group.
279 ///
280 /// In a given group, the primary diagnostic is the top-level diagnostic
281 /// returned by the language server. The non-primary diagnostics are the
282 /// associated diagnostics.
283 pub is_primary: bool,
284 /// Whether this diagnostic is considered to originate from an analysis of
285 /// files on disk, as opposed to any unsaved buffer contents. This is a
286 /// property of a given diagnostic source, and is configured for a given
287 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
288 /// for the language server.
289 pub is_disk_based: bool,
290 /// Whether this diagnostic marks unnecessary code.
291 pub is_unnecessary: bool,
292 /// Quick separation of diagnostics groups based by their source.
293 pub source_kind: DiagnosticSourceKind,
294 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
295 pub data: Option<Value>,
296 /// Whether to underline the corresponding text range in the editor.
297 pub underline: bool,
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
301pub enum DiagnosticSourceKind {
302 Pulled,
303 Pushed,
304 Other,
305}
306
307/// An operation used to synchronize this buffer with its other replicas.
308#[derive(Clone, Debug, PartialEq)]
309pub enum Operation {
310 /// A text operation.
311 Buffer(text::Operation),
312
313 /// An update to the buffer's diagnostics.
314 UpdateDiagnostics {
315 /// The id of the language server that produced the new diagnostics.
316 server_id: LanguageServerId,
317 /// The diagnostics.
318 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 },
322
323 /// An update to the most recent selections in this buffer.
324 UpdateSelections {
325 /// The selections.
326 selections: Arc<[Selection<Anchor>]>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// Whether the selections are in 'line mode'.
330 line_mode: bool,
331 /// The [`CursorShape`] associated with these selections.
332 cursor_shape: CursorShape,
333 },
334
335 /// An update to the characters that should trigger autocompletion
336 /// for this buffer.
337 UpdateCompletionTriggers {
338 /// The characters that trigger autocompletion.
339 triggers: Vec<String>,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 /// The language server ID.
343 server_id: LanguageServerId,
344 },
345
346 /// An update to the line ending type of this buffer.
347 UpdateLineEnding {
348 /// The line ending type.
349 line_ending: LineEnding,
350 /// The buffer's lamport timestamp.
351 lamport_timestamp: clock::Lamport,
352 },
353}
354
355/// An event that occurs in a buffer.
356#[derive(Clone, Debug, PartialEq)]
357pub enum BufferEvent {
358 /// The buffer was changed in a way that must be
359 /// propagated to its other replicas.
360 Operation {
361 operation: Operation,
362 is_local: bool,
363 },
364 /// The buffer was edited.
365 Edited,
366 /// The buffer's `dirty` bit changed.
367 DirtyChanged,
368 /// The buffer was saved.
369 Saved,
370 /// The buffer's file was changed on disk.
371 FileHandleChanged,
372 /// The buffer was reloaded.
373 Reloaded,
374 /// The buffer is in need of a reload
375 ReloadNeeded,
376 /// The buffer's language was changed.
377 /// The boolean indicates whether this buffer did not have a language before, but does now.
378 LanguageChanged(bool),
379 /// The buffer's syntax trees were updated.
380 Reparsed,
381 /// The buffer's diagnostics were updated.
382 DiagnosticsUpdated,
383 /// The buffer gained or lost editing capabilities.
384 CapabilityChanged,
385}
386
387/// The file associated with a buffer.
388pub trait File: Send + Sync + Any {
389 /// Returns the [`LocalFile`] associated with this file, if the
390 /// file is local.
391 fn as_local(&self) -> Option<&dyn LocalFile>;
392
393 /// Returns whether this file is local.
394 fn is_local(&self) -> bool {
395 self.as_local().is_some()
396 }
397
398 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
399 /// only available in some states, such as modification time.
400 fn disk_state(&self) -> DiskState;
401
402 /// Returns the path of this file relative to the worktree's root directory.
403 fn path(&self) -> &Arc<RelPath>;
404
405 /// Returns the path of this file relative to the worktree's parent directory (this means it
406 /// includes the name of the worktree's root folder).
407 fn full_path(&self, cx: &App) -> PathBuf;
408
409 /// Returns the path style of this file.
410 fn path_style(&self, cx: &App) -> PathStyle;
411
412 /// Returns the last component of this handle's absolute path. If this handle refers to the root
413 /// of its worktree, then this method will return the name of the worktree itself.
414 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
415
416 /// Returns the id of the worktree to which this file belongs.
417 ///
418 /// This is needed for looking up project-specific settings.
419 fn worktree_id(&self, cx: &App) -> WorktreeId;
420
421 /// Converts this file into a protobuf message.
422 fn to_proto(&self, cx: &App) -> rpc::proto::File;
423
424 /// Return whether Zed considers this to be a private file.
425 fn is_private(&self) -> bool;
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// The row of the edit start in the buffer before the edit was applied.
524 /// This is stored here because the anchor in range is created after
525 /// the edit, so it cannot be used with the before_edit snapshot.
526 old_row: Option<u32>,
527 indent_size: IndentSize,
528 original_indent_column: Option<u32>,
529}
530
531#[derive(Debug)]
532struct IndentSuggestion {
533 basis_row: u32,
534 delta: Ordering,
535 within_error: bool,
536}
537
538struct BufferChunkHighlights<'a> {
539 captures: SyntaxMapCaptures<'a>,
540 next_capture: Option<SyntaxMapCapture<'a>>,
541 stack: Vec<(usize, HighlightId)>,
542 highlight_maps: Vec<HighlightMap>,
543}
544
545/// An iterator that yields chunks of a buffer's text, along with their
546/// syntax highlights and diagnostic status.
547pub struct BufferChunks<'a> {
548 buffer_snapshot: Option<&'a BufferSnapshot>,
549 range: Range<usize>,
550 chunks: text::Chunks<'a>,
551 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
552 error_depth: usize,
553 warning_depth: usize,
554 information_depth: usize,
555 hint_depth: usize,
556 unnecessary_depth: usize,
557 underline: bool,
558 highlights: Option<BufferChunkHighlights<'a>>,
559}
560
561/// A chunk of a buffer's text, along with its syntax highlight and
562/// diagnostic status.
563#[derive(Clone, Debug, Default)]
564pub struct Chunk<'a> {
565 /// The text of the chunk.
566 pub text: &'a str,
567 /// The syntax highlighting style of the chunk.
568 pub syntax_highlight_id: Option<HighlightId>,
569 /// The highlight style that has been applied to this chunk in
570 /// the editor.
571 pub highlight_style: Option<HighlightStyle>,
572 /// The severity of diagnostic associated with this chunk, if any.
573 pub diagnostic_severity: Option<DiagnosticSeverity>,
574 /// A bitset of which characters are tabs in this string.
575 pub tabs: u128,
576 /// Bitmap of character indices in this chunk
577 pub chars: u128,
578 /// Whether this chunk of text is marked as unnecessary.
579 pub is_unnecessary: bool,
580 /// Whether this chunk of text was originally a tab character.
581 pub is_tab: bool,
582 /// Whether this chunk of text was originally an inlay.
583 pub is_inlay: bool,
584 /// Whether to underline the corresponding text range in the editor.
585 pub underline: bool,
586}
587
588/// A set of edits to a given version of a buffer, computed asynchronously.
589#[derive(Debug)]
590pub struct Diff {
591 pub base_version: clock::Global,
592 pub line_ending: LineEnding,
593 pub edits: Vec<(Range<usize>, Arc<str>)>,
594}
595
596#[derive(Debug, Clone, Copy)]
597pub(crate) struct DiagnosticEndpoint {
598 offset: usize,
599 is_start: bool,
600 underline: bool,
601 severity: DiagnosticSeverity,
602 is_unnecessary: bool,
603}
604
605/// A class of characters, used for characterizing a run of text.
606#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
607pub enum CharKind {
608 /// Whitespace.
609 Whitespace,
610 /// Punctuation.
611 Punctuation,
612 /// Word.
613 Word,
614}
615
616/// Context for character classification within a specific scope.
617#[derive(Copy, Clone, Eq, PartialEq, Debug)]
618pub enum CharScopeContext {
619 /// Character classification for completion queries.
620 ///
621 /// This context treats certain characters as word constituents that would
622 /// normally be considered punctuation, such as '-' in Tailwind classes
623 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
624 Completion,
625 /// Character classification for linked edits.
626 ///
627 /// This context handles characters that should be treated as part of
628 /// identifiers during linked editing operations, such as '.' in JSX
629 /// component names like `<Animated.View>`.
630 LinkedEdit,
631}
632
633/// A runnable is a set of data about a region that could be resolved into a task
634pub struct Runnable {
635 pub tags: SmallVec<[RunnableTag; 1]>,
636 pub language: Arc<Language>,
637 pub buffer: BufferId,
638}
639
640#[derive(Default, Clone, Debug)]
641pub struct HighlightedText {
642 pub text: SharedString,
643 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
644}
645
646#[derive(Default, Debug)]
647struct HighlightedTextBuilder {
648 pub text: String,
649 highlights: Vec<(Range<usize>, HighlightStyle)>,
650}
651
652impl HighlightedText {
653 pub fn from_buffer_range<T: ToOffset>(
654 range: Range<T>,
655 snapshot: &text::BufferSnapshot,
656 syntax_snapshot: &SyntaxSnapshot,
657 override_style: Option<HighlightStyle>,
658 syntax_theme: &SyntaxTheme,
659 ) -> Self {
660 let mut highlighted_text = HighlightedTextBuilder::default();
661 highlighted_text.add_text_from_buffer_range(
662 range,
663 snapshot,
664 syntax_snapshot,
665 override_style,
666 syntax_theme,
667 );
668 highlighted_text.build()
669 }
670
671 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
672 gpui::StyledText::new(self.text.clone())
673 .with_default_highlights(default_style, self.highlights.iter().cloned())
674 }
675
676 /// Returns the first line without leading whitespace unless highlighted
677 /// and a boolean indicating if there are more lines after
678 pub fn first_line_preview(self) -> (Self, bool) {
679 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
680 let first_line = &self.text[..newline_ix];
681
682 // Trim leading whitespace, unless an edit starts prior to it.
683 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
684 if let Some((first_highlight_range, _)) = self.highlights.first() {
685 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
686 }
687
688 let preview_text = &first_line[preview_start_ix..];
689 let preview_highlights = self
690 .highlights
691 .into_iter()
692 .skip_while(|(range, _)| range.end <= preview_start_ix)
693 .take_while(|(range, _)| range.start < newline_ix)
694 .filter_map(|(mut range, highlight)| {
695 range.start = range.start.saturating_sub(preview_start_ix);
696 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
697 if range.is_empty() {
698 None
699 } else {
700 Some((range, highlight))
701 }
702 });
703
704 let preview = Self {
705 text: SharedString::new(preview_text),
706 highlights: preview_highlights.collect(),
707 };
708
709 (preview, self.text.len() > newline_ix)
710 }
711}
712
713impl HighlightedTextBuilder {
714 pub fn build(self) -> HighlightedText {
715 HighlightedText {
716 text: self.text.into(),
717 highlights: self.highlights,
718 }
719 }
720
721 pub fn add_text_from_buffer_range<T: ToOffset>(
722 &mut self,
723 range: Range<T>,
724 snapshot: &text::BufferSnapshot,
725 syntax_snapshot: &SyntaxSnapshot,
726 override_style: Option<HighlightStyle>,
727 syntax_theme: &SyntaxTheme,
728 ) {
729 let range = range.to_offset(snapshot);
730 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
731 let start = self.text.len();
732 self.text.push_str(chunk.text);
733 let end = self.text.len();
734
735 if let Some(highlight_style) = chunk
736 .syntax_highlight_id
737 .and_then(|id| id.style(syntax_theme))
738 {
739 let highlight_style = override_style.map_or(highlight_style, |override_style| {
740 highlight_style.highlight(override_style)
741 });
742 self.highlights.push((start..end, highlight_style));
743 } else if let Some(override_style) = override_style {
744 self.highlights.push((start..end, override_style));
745 }
746 }
747 }
748
749 fn highlighted_chunks<'a>(
750 range: Range<usize>,
751 snapshot: &'a text::BufferSnapshot,
752 syntax_snapshot: &'a SyntaxSnapshot,
753 ) -> BufferChunks<'a> {
754 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
755 grammar
756 .highlights_config
757 .as_ref()
758 .map(|config| &config.query)
759 });
760
761 let highlight_maps = captures
762 .grammars()
763 .iter()
764 .map(|grammar| grammar.highlight_map())
765 .collect();
766
767 BufferChunks::new(
768 snapshot.as_rope(),
769 range,
770 Some((captures, highlight_maps)),
771 false,
772 None,
773 )
774 }
775}
776
777#[derive(Clone)]
778pub struct EditPreview {
779 old_snapshot: text::BufferSnapshot,
780 applied_edits_snapshot: text::BufferSnapshot,
781 syntax_snapshot: SyntaxSnapshot,
782}
783
784impl EditPreview {
785 pub fn as_unified_diff(
786 &self,
787 file: Option<&Arc<dyn File>>,
788 edits: &[(Range<Anchor>, impl AsRef<str>)],
789 ) -> Option<String> {
790 let (first, _) = edits.first()?;
791 let (last, _) = edits.last()?;
792
793 let start = first.start.to_point(&self.old_snapshot);
794 let old_end = last.end.to_point(&self.old_snapshot);
795 let new_end = last
796 .end
797 .bias_right(&self.old_snapshot)
798 .to_point(&self.applied_edits_snapshot);
799
800 let start = Point::new(start.row.saturating_sub(3), 0);
801 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
802 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
803
804 let diff_body = unified_diff_with_offsets(
805 &self
806 .old_snapshot
807 .text_for_range(start..old_end)
808 .collect::<String>(),
809 &self
810 .applied_edits_snapshot
811 .text_for_range(start..new_end)
812 .collect::<String>(),
813 start.row,
814 start.row,
815 );
816
817 let path = file.map(|f| f.path().as_unix_str());
818 let header = match path {
819 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
820 None => String::new(),
821 };
822
823 Some(format!("{}{}", header, diff_body))
824 }
825
826 pub fn highlight_edits(
827 &self,
828 current_snapshot: &BufferSnapshot,
829 edits: &[(Range<Anchor>, impl AsRef<str>)],
830 include_deletions: bool,
831 cx: &App,
832 ) -> HighlightedText {
833 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
834 return HighlightedText::default();
835 };
836
837 let mut highlighted_text = HighlightedTextBuilder::default();
838
839 let visible_range_in_preview_snapshot =
840 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
841 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
842
843 let insertion_highlight_style = HighlightStyle {
844 background_color: Some(cx.theme().status().created_background),
845 ..Default::default()
846 };
847 let deletion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().deleted_background),
849 ..Default::default()
850 };
851 let syntax_theme = cx.theme().syntax();
852
853 for (range, edit_text) in edits {
854 let edit_new_end_in_preview_snapshot = range
855 .end
856 .bias_right(&self.old_snapshot)
857 .to_offset(&self.applied_edits_snapshot);
858 let edit_start_in_preview_snapshot =
859 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
860
861 let unchanged_range_in_preview_snapshot =
862 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
863 if !unchanged_range_in_preview_snapshot.is_empty() {
864 highlighted_text.add_text_from_buffer_range(
865 unchanged_range_in_preview_snapshot,
866 &self.applied_edits_snapshot,
867 &self.syntax_snapshot,
868 None,
869 syntax_theme,
870 );
871 }
872
873 let range_in_current_snapshot = range.to_offset(current_snapshot);
874 if include_deletions && !range_in_current_snapshot.is_empty() {
875 highlighted_text.add_text_from_buffer_range(
876 range_in_current_snapshot,
877 ¤t_snapshot.text,
878 ¤t_snapshot.syntax,
879 Some(deletion_highlight_style),
880 syntax_theme,
881 );
882 }
883
884 if !edit_text.as_ref().is_empty() {
885 highlighted_text.add_text_from_buffer_range(
886 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
887 &self.applied_edits_snapshot,
888 &self.syntax_snapshot,
889 Some(insertion_highlight_style),
890 syntax_theme,
891 );
892 }
893
894 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
895 }
896
897 highlighted_text.add_text_from_buffer_range(
898 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
899 &self.applied_edits_snapshot,
900 &self.syntax_snapshot,
901 None,
902 syntax_theme,
903 );
904
905 highlighted_text.build()
906 }
907
908 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
909 cx.new(|cx| {
910 let mut buffer = Buffer::local_normalized(
911 self.applied_edits_snapshot.as_rope().clone(),
912 self.applied_edits_snapshot.line_ending(),
913 cx,
914 );
915 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
916 buffer
917 })
918 }
919
920 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
921 let (first, _) = edits.first()?;
922 let (last, _) = edits.last()?;
923
924 let start = first
925 .start
926 .bias_left(&self.old_snapshot)
927 .to_point(&self.applied_edits_snapshot);
928 let end = last
929 .end
930 .bias_right(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932
933 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
934 let range = Point::new(start.row, 0)
935 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
936
937 Some(range)
938 }
939}
940
941#[derive(Clone, Debug, PartialEq, Eq)]
942pub struct BracketMatch<T> {
943 pub open_range: Range<T>,
944 pub close_range: Range<T>,
945 pub newline_only: bool,
946 pub syntax_layer_depth: usize,
947 pub color_index: Option<usize>,
948}
949
950impl<T> BracketMatch<T> {
951 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
952 (self.open_range, self.close_range)
953 }
954}
955
956impl Buffer {
957 /// Create a new buffer with the given base text.
958 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
959 Self::build(
960 TextBuffer::new(
961 ReplicaId::LOCAL,
962 cx.entity_id().as_non_zero_u64().into(),
963 base_text.into(),
964 ),
965 None,
966 Capability::ReadWrite,
967 )
968 }
969
970 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
971 pub fn local_normalized(
972 base_text_normalized: Rope,
973 line_ending: LineEnding,
974 cx: &Context<Self>,
975 ) -> Self {
976 Self::build(
977 TextBuffer::new_normalized(
978 ReplicaId::LOCAL,
979 cx.entity_id().as_non_zero_u64().into(),
980 line_ending,
981 base_text_normalized,
982 ),
983 None,
984 Capability::ReadWrite,
985 )
986 }
987
988 /// Create a new buffer that is a replica of a remote buffer.
989 pub fn remote(
990 remote_id: BufferId,
991 replica_id: ReplicaId,
992 capability: Capability,
993 base_text: impl Into<String>,
994 ) -> Self {
995 Self::build(
996 TextBuffer::new(replica_id, remote_id, base_text.into()),
997 None,
998 capability,
999 )
1000 }
1001
1002 /// Create a new buffer that is a replica of a remote buffer, populating its
1003 /// state from the given protobuf message.
1004 pub fn from_proto(
1005 replica_id: ReplicaId,
1006 capability: Capability,
1007 message: proto::BufferState,
1008 file: Option<Arc<dyn File>>,
1009 ) -> Result<Self> {
1010 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1011 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1012 let mut this = Self::build(buffer, file, capability);
1013 this.text.set_line_ending(proto::deserialize_line_ending(
1014 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1015 ));
1016 this.saved_version = proto::deserialize_version(&message.saved_version);
1017 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1018 Ok(this)
1019 }
1020
1021 /// Serialize the buffer's state to a protobuf message.
1022 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1023 proto::BufferState {
1024 id: self.remote_id().into(),
1025 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1026 base_text: self.base_text().to_string(),
1027 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1028 saved_version: proto::serialize_version(&self.saved_version),
1029 saved_mtime: self.saved_mtime.map(|time| time.into()),
1030 }
1031 }
1032
1033 /// Serialize as protobufs all of the changes to the buffer since the given version.
1034 pub fn serialize_ops(
1035 &self,
1036 since: Option<clock::Global>,
1037 cx: &App,
1038 ) -> Task<Vec<proto::Operation>> {
1039 let mut operations = Vec::new();
1040 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1041
1042 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1043 proto::serialize_operation(&Operation::UpdateSelections {
1044 selections: set.selections.clone(),
1045 lamport_timestamp: set.lamport_timestamp,
1046 line_mode: set.line_mode,
1047 cursor_shape: set.cursor_shape,
1048 })
1049 }));
1050
1051 for (server_id, diagnostics) in &self.diagnostics {
1052 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1053 lamport_timestamp: self.diagnostics_timestamp,
1054 server_id: *server_id,
1055 diagnostics: diagnostics.iter().cloned().collect(),
1056 }));
1057 }
1058
1059 for (server_id, completions) in &self.completion_triggers_per_language_server {
1060 operations.push(proto::serialize_operation(
1061 &Operation::UpdateCompletionTriggers {
1062 triggers: completions.iter().cloned().collect(),
1063 lamport_timestamp: self.completion_triggers_timestamp,
1064 server_id: *server_id,
1065 },
1066 ));
1067 }
1068
1069 let text_operations = self.text.operations().clone();
1070 cx.background_spawn(async move {
1071 let since = since.unwrap_or_default();
1072 operations.extend(
1073 text_operations
1074 .iter()
1075 .filter(|(_, op)| !since.observed(op.timestamp()))
1076 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1077 );
1078 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1079 operations
1080 })
1081 }
1082
1083 /// Assign a language to the buffer, returning the buffer.
1084 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1085 self.set_language_async(Some(language), cx);
1086 self
1087 }
1088
1089 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1090 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1091 self.set_language(Some(language), cx);
1092 self
1093 }
1094
1095 /// Returns the [`Capability`] of this buffer.
1096 pub fn capability(&self) -> Capability {
1097 self.capability
1098 }
1099
1100 /// Whether this buffer can only be read.
1101 pub fn read_only(&self) -> bool {
1102 !self.capability.editable()
1103 }
1104
1105 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1106 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1107 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1108 let snapshot = buffer.snapshot();
1109 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1110 let tree_sitter_data = TreeSitterData::new(snapshot);
1111 Self {
1112 saved_mtime,
1113 tree_sitter_data: Arc::new(tree_sitter_data),
1114 saved_version: buffer.version(),
1115 preview_version: buffer.version(),
1116 reload_task: None,
1117 transaction_depth: 0,
1118 was_dirty_before_starting_transaction: None,
1119 has_unsaved_edits: Cell::new((buffer.version(), false)),
1120 text: buffer,
1121 branch_state: None,
1122 file,
1123 capability,
1124 syntax_map,
1125 reparse: None,
1126 non_text_state_update_count: 0,
1127 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1128 Some(Duration::from_millis(10))
1129 } else {
1130 Some(Duration::from_millis(1))
1131 },
1132 parse_status: watch::channel(ParseStatus::Idle),
1133 autoindent_requests: Default::default(),
1134 wait_for_autoindent_txs: Default::default(),
1135 pending_autoindent: Default::default(),
1136 language: None,
1137 remote_selections: Default::default(),
1138 diagnostics: Default::default(),
1139 diagnostics_timestamp: Lamport::MIN,
1140 completion_triggers: Default::default(),
1141 completion_triggers_per_language_server: Default::default(),
1142 completion_triggers_timestamp: Lamport::MIN,
1143 deferred_ops: OperationQueue::new(),
1144 has_conflict: false,
1145 change_bits: Default::default(),
1146 _subscriptions: Vec::new(),
1147 encoding: encoding_rs::UTF_8,
1148 has_bom: false,
1149 }
1150 }
1151
1152 pub fn build_snapshot(
1153 text: Rope,
1154 language: Option<Arc<Language>>,
1155 language_registry: Option<Arc<LanguageRegistry>>,
1156 cx: &mut App,
1157 ) -> impl Future<Output = BufferSnapshot> + use<> {
1158 let entity_id = cx.reserve_entity::<Self>().entity_id();
1159 let buffer_id = entity_id.as_non_zero_u64().into();
1160 async move {
1161 let text =
1162 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1163 .snapshot();
1164 let mut syntax = SyntaxMap::new(&text).snapshot();
1165 if let Some(language) = language.clone() {
1166 let language_registry = language_registry.clone();
1167 syntax.reparse(&text, language_registry, language);
1168 }
1169 let tree_sitter_data = TreeSitterData::new(text.clone());
1170 BufferSnapshot {
1171 text,
1172 syntax,
1173 file: None,
1174 diagnostics: Default::default(),
1175 remote_selections: Default::default(),
1176 tree_sitter_data: Arc::new(tree_sitter_data),
1177 language,
1178 non_text_state_update_count: 0,
1179 capability: Capability::ReadOnly,
1180 }
1181 }
1182 }
1183
1184 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1185 let entity_id = cx.reserve_entity::<Self>().entity_id();
1186 let buffer_id = entity_id.as_non_zero_u64().into();
1187 let text = TextBuffer::new_normalized(
1188 ReplicaId::LOCAL,
1189 buffer_id,
1190 Default::default(),
1191 Rope::new(),
1192 )
1193 .snapshot();
1194 let syntax = SyntaxMap::new(&text).snapshot();
1195 let tree_sitter_data = TreeSitterData::new(text.clone());
1196 BufferSnapshot {
1197 text,
1198 syntax,
1199 tree_sitter_data: Arc::new(tree_sitter_data),
1200 file: None,
1201 diagnostics: Default::default(),
1202 remote_selections: Default::default(),
1203 language: None,
1204 non_text_state_update_count: 0,
1205 capability: Capability::ReadOnly,
1206 }
1207 }
1208
1209 #[cfg(any(test, feature = "test-support"))]
1210 pub fn build_snapshot_sync(
1211 text: Rope,
1212 language: Option<Arc<Language>>,
1213 language_registry: Option<Arc<LanguageRegistry>>,
1214 cx: &mut App,
1215 ) -> BufferSnapshot {
1216 let entity_id = cx.reserve_entity::<Self>().entity_id();
1217 let buffer_id = entity_id.as_non_zero_u64().into();
1218 let text =
1219 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1220 .snapshot();
1221 let mut syntax = SyntaxMap::new(&text).snapshot();
1222 if let Some(language) = language.clone() {
1223 syntax.reparse(&text, language_registry, language);
1224 }
1225 let tree_sitter_data = TreeSitterData::new(text.clone());
1226 BufferSnapshot {
1227 text,
1228 syntax,
1229 tree_sitter_data: Arc::new(tree_sitter_data),
1230 file: None,
1231 diagnostics: Default::default(),
1232 remote_selections: Default::default(),
1233 language,
1234 non_text_state_update_count: 0,
1235 capability: Capability::ReadOnly,
1236 }
1237 }
1238
1239 /// Retrieve a snapshot of the buffer's current state. This is computationally
1240 /// cheap, and allows reading from the buffer on a background thread.
1241 pub fn snapshot(&self) -> BufferSnapshot {
1242 let text = self.text.snapshot();
1243 let mut syntax_map = self.syntax_map.lock();
1244 syntax_map.interpolate(&text);
1245 let syntax = syntax_map.snapshot();
1246
1247 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1248 Arc::new(TreeSitterData::new(text.clone()))
1249 } else {
1250 self.tree_sitter_data.clone()
1251 };
1252
1253 BufferSnapshot {
1254 text,
1255 syntax,
1256 tree_sitter_data,
1257 file: self.file.clone(),
1258 remote_selections: self.remote_selections.clone(),
1259 diagnostics: self.diagnostics.clone(),
1260 language: self.language.clone(),
1261 non_text_state_update_count: self.non_text_state_update_count,
1262 capability: self.capability,
1263 }
1264 }
1265
1266 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1267 let this = cx.entity();
1268 cx.new(|cx| {
1269 let mut branch = Self {
1270 branch_state: Some(BufferBranchState {
1271 base_buffer: this.clone(),
1272 merged_operations: Default::default(),
1273 }),
1274 language: self.language.clone(),
1275 has_conflict: self.has_conflict,
1276 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1277 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1278 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1279 };
1280 if let Some(language_registry) = self.language_registry() {
1281 branch.set_language_registry(language_registry);
1282 }
1283
1284 // Reparse the branch buffer so that we get syntax highlighting immediately.
1285 branch.reparse(cx, true);
1286
1287 branch
1288 })
1289 }
1290
1291 pub fn preview_edits(
1292 &self,
1293 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1294 cx: &App,
1295 ) -> Task<EditPreview> {
1296 let registry = self.language_registry();
1297 let language = self.language().cloned();
1298 let old_snapshot = self.text.snapshot();
1299 let mut branch_buffer = self.text.branch();
1300 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1301 cx.background_spawn(async move {
1302 if !edits.is_empty() {
1303 if let Some(language) = language.clone() {
1304 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1305 }
1306
1307 branch_buffer.edit(edits.iter().cloned());
1308 let snapshot = branch_buffer.snapshot();
1309 syntax_snapshot.interpolate(&snapshot);
1310
1311 if let Some(language) = language {
1312 syntax_snapshot.reparse(&snapshot, registry, language);
1313 }
1314 }
1315 EditPreview {
1316 old_snapshot,
1317 applied_edits_snapshot: branch_buffer.snapshot(),
1318 syntax_snapshot,
1319 }
1320 })
1321 }
1322
1323 /// Applies all of the changes in this buffer that intersect any of the
1324 /// given `ranges` to its base buffer.
1325 ///
1326 /// If `ranges` is empty, then all changes will be applied. This buffer must
1327 /// be a branch buffer to call this method.
1328 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1329 let Some(base_buffer) = self.base_buffer() else {
1330 debug_panic!("not a branch buffer");
1331 return;
1332 };
1333
1334 let mut ranges = if ranges.is_empty() {
1335 &[0..usize::MAX]
1336 } else {
1337 ranges.as_slice()
1338 }
1339 .iter()
1340 .peekable();
1341
1342 let mut edits = Vec::new();
1343 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1344 let mut is_included = false;
1345 while let Some(range) = ranges.peek() {
1346 if range.end < edit.new.start {
1347 ranges.next().unwrap();
1348 } else {
1349 if range.start <= edit.new.end {
1350 is_included = true;
1351 }
1352 break;
1353 }
1354 }
1355
1356 if is_included {
1357 edits.push((
1358 edit.old.clone(),
1359 self.text_for_range(edit.new.clone()).collect::<String>(),
1360 ));
1361 }
1362 }
1363
1364 let operation = base_buffer.update(cx, |base_buffer, cx| {
1365 // cx.emit(BufferEvent::DiffBaseChanged);
1366 base_buffer.edit(edits, None, cx)
1367 });
1368
1369 if let Some(operation) = operation
1370 && let Some(BufferBranchState {
1371 merged_operations, ..
1372 }) = &mut self.branch_state
1373 {
1374 merged_operations.push(operation);
1375 }
1376 }
1377
1378 fn on_base_buffer_event(
1379 &mut self,
1380 _: Entity<Buffer>,
1381 event: &BufferEvent,
1382 cx: &mut Context<Self>,
1383 ) {
1384 let BufferEvent::Operation { operation, .. } = event else {
1385 return;
1386 };
1387 let Some(BufferBranchState {
1388 merged_operations, ..
1389 }) = &mut self.branch_state
1390 else {
1391 return;
1392 };
1393
1394 let mut operation_to_undo = None;
1395 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1396 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1397 {
1398 merged_operations.remove(ix);
1399 operation_to_undo = Some(operation.timestamp);
1400 }
1401
1402 self.apply_ops([operation.clone()], cx);
1403
1404 if let Some(timestamp) = operation_to_undo {
1405 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1406 self.undo_operations(counts, cx);
1407 }
1408 }
1409
1410 #[cfg(test)]
1411 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1412 &self.text
1413 }
1414
1415 /// Retrieve a snapshot of the buffer's raw text, without any
1416 /// language-related state like the syntax tree or diagnostics.
1417 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1418 self.text.snapshot()
1419 }
1420
1421 /// The file associated with the buffer, if any.
1422 pub fn file(&self) -> Option<&Arc<dyn File>> {
1423 self.file.as_ref()
1424 }
1425
1426 /// The version of the buffer that was last saved or reloaded from disk.
1427 pub fn saved_version(&self) -> &clock::Global {
1428 &self.saved_version
1429 }
1430
1431 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1432 pub fn saved_mtime(&self) -> Option<MTime> {
1433 self.saved_mtime
1434 }
1435
1436 /// Returns the character encoding of the buffer's file.
1437 pub fn encoding(&self) -> &'static Encoding {
1438 self.encoding
1439 }
1440
1441 /// Sets the character encoding of the buffer.
1442 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1443 self.encoding = encoding;
1444 }
1445
1446 /// Returns whether the buffer has a Byte Order Mark.
1447 pub fn has_bom(&self) -> bool {
1448 self.has_bom
1449 }
1450
1451 /// Sets whether the buffer has a Byte Order Mark.
1452 pub fn set_has_bom(&mut self, has_bom: bool) {
1453 self.has_bom = has_bom;
1454 }
1455
1456 /// Assign a language to the buffer.
1457 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1458 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1459 }
1460
1461 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1462 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1463 self.set_language_(language, true, cx);
1464 }
1465
1466 fn set_language_(
1467 &mut self,
1468 language: Option<Arc<Language>>,
1469 may_block: bool,
1470 cx: &mut Context<Self>,
1471 ) {
1472 self.non_text_state_update_count += 1;
1473 self.syntax_map.lock().clear(&self.text);
1474 let old_language = std::mem::replace(&mut self.language, language);
1475 self.was_changed();
1476 self.reparse(cx, may_block);
1477 let has_fresh_language =
1478 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1479 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1480 }
1481
1482 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1483 /// other languages if parts of the buffer are written in different languages.
1484 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1485 self.syntax_map
1486 .lock()
1487 .set_language_registry(language_registry);
1488 }
1489
1490 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1491 self.syntax_map.lock().language_registry()
1492 }
1493
1494 /// Assign the line ending type to the buffer.
1495 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1496 self.text.set_line_ending(line_ending);
1497
1498 let lamport_timestamp = self.text.lamport_clock.tick();
1499 self.send_operation(
1500 Operation::UpdateLineEnding {
1501 line_ending,
1502 lamport_timestamp,
1503 },
1504 true,
1505 cx,
1506 );
1507 }
1508
1509 /// Assign the buffer a new [`Capability`].
1510 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1511 if self.capability != capability {
1512 self.capability = capability;
1513 cx.emit(BufferEvent::CapabilityChanged)
1514 }
1515 }
1516
1517 /// This method is called to signal that the buffer has been saved.
1518 pub fn did_save(
1519 &mut self,
1520 version: clock::Global,
1521 mtime: Option<MTime>,
1522 cx: &mut Context<Self>,
1523 ) {
1524 self.saved_version = version.clone();
1525 self.has_unsaved_edits.set((version, false));
1526 self.has_conflict = false;
1527 self.saved_mtime = mtime;
1528 self.was_changed();
1529 cx.emit(BufferEvent::Saved);
1530 cx.notify();
1531 }
1532
1533 /// Reloads the contents of the buffer from disk.
1534 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1535 let (tx, rx) = futures::channel::oneshot::channel();
1536 let prev_version = self.text.version();
1537 self.reload_task = Some(cx.spawn(async move |this, cx| {
1538 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1539 let file = this.file.as_ref()?.as_local()?;
1540 Some((
1541 file.disk_state().mtime(),
1542 file.load_bytes(cx),
1543 this.encoding,
1544 ))
1545 })?
1546 else {
1547 return Ok(());
1548 };
1549
1550 let bytes = load_bytes_task.await?;
1551 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1552 let new_text = cow.into_owned();
1553
1554 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1555 this.update(cx, |this, cx| {
1556 if this.version() == diff.base_version {
1557 this.finalize_last_transaction();
1558 this.apply_diff(diff, cx);
1559 tx.send(this.finalize_last_transaction().cloned()).ok();
1560 this.has_conflict = false;
1561 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1562 } else {
1563 if !diff.edits.is_empty()
1564 || this
1565 .edits_since::<usize>(&diff.base_version)
1566 .next()
1567 .is_some()
1568 {
1569 this.has_conflict = true;
1570 }
1571
1572 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1573 }
1574
1575 this.reload_task.take();
1576 })
1577 }));
1578 rx
1579 }
1580
1581 /// This method is called to signal that the buffer has been reloaded.
1582 pub fn did_reload(
1583 &mut self,
1584 version: clock::Global,
1585 line_ending: LineEnding,
1586 mtime: Option<MTime>,
1587 cx: &mut Context<Self>,
1588 ) {
1589 self.saved_version = version;
1590 self.has_unsaved_edits
1591 .set((self.saved_version.clone(), false));
1592 self.text.set_line_ending(line_ending);
1593 self.saved_mtime = mtime;
1594 cx.emit(BufferEvent::Reloaded);
1595 cx.notify();
1596 }
1597
1598 /// Updates the [`File`] backing this buffer. This should be called when
1599 /// the file has changed or has been deleted.
1600 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1601 let was_dirty = self.is_dirty();
1602 let mut file_changed = false;
1603
1604 if let Some(old_file) = self.file.as_ref() {
1605 if new_file.path() != old_file.path() {
1606 file_changed = true;
1607 }
1608
1609 let old_state = old_file.disk_state();
1610 let new_state = new_file.disk_state();
1611 if old_state != new_state {
1612 file_changed = true;
1613 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1614 cx.emit(BufferEvent::ReloadNeeded)
1615 }
1616 }
1617 } else {
1618 file_changed = true;
1619 };
1620
1621 self.file = Some(new_file);
1622 if file_changed {
1623 self.was_changed();
1624 self.non_text_state_update_count += 1;
1625 if was_dirty != self.is_dirty() {
1626 cx.emit(BufferEvent::DirtyChanged);
1627 }
1628 cx.emit(BufferEvent::FileHandleChanged);
1629 cx.notify();
1630 }
1631 }
1632
1633 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1634 Some(self.branch_state.as_ref()?.base_buffer.clone())
1635 }
1636
1637 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1638 pub fn language(&self) -> Option<&Arc<Language>> {
1639 self.language.as_ref()
1640 }
1641
1642 /// Returns the [`Language`] at the given location.
1643 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1644 let offset = position.to_offset(self);
1645 let mut is_first = true;
1646 let start_anchor = self.anchor_before(offset);
1647 let end_anchor = self.anchor_after(offset);
1648 self.syntax_map
1649 .lock()
1650 .layers_for_range(offset..offset, &self.text, false)
1651 .filter(|layer| {
1652 if is_first {
1653 is_first = false;
1654 return true;
1655 }
1656
1657 layer
1658 .included_sub_ranges
1659 .map(|sub_ranges| {
1660 sub_ranges.iter().any(|sub_range| {
1661 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1662 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1663 !is_before_start && !is_after_end
1664 })
1665 })
1666 .unwrap_or(true)
1667 })
1668 .last()
1669 .map(|info| info.language.clone())
1670 .or_else(|| self.language.clone())
1671 }
1672
1673 /// Returns each [`Language`] for the active syntax layers at the given location.
1674 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1675 let offset = position.to_offset(self);
1676 let mut languages: Vec<Arc<Language>> = self
1677 .syntax_map
1678 .lock()
1679 .layers_for_range(offset..offset, &self.text, false)
1680 .map(|info| info.language.clone())
1681 .collect();
1682
1683 if languages.is_empty()
1684 && let Some(buffer_language) = self.language()
1685 {
1686 languages.push(buffer_language.clone());
1687 }
1688
1689 languages
1690 }
1691
1692 /// An integer version number that accounts for all updates besides
1693 /// the buffer's text itself (which is versioned via a version vector).
1694 pub fn non_text_state_update_count(&self) -> usize {
1695 self.non_text_state_update_count
1696 }
1697
1698 /// Whether the buffer is being parsed in the background.
1699 #[cfg(any(test, feature = "test-support"))]
1700 pub fn is_parsing(&self) -> bool {
1701 self.reparse.is_some()
1702 }
1703
1704 /// Indicates whether the buffer contains any regions that may be
1705 /// written in a language that hasn't been loaded yet.
1706 pub fn contains_unknown_injections(&self) -> bool {
1707 self.syntax_map.lock().contains_unknown_injections()
1708 }
1709
1710 #[cfg(any(test, feature = "test-support"))]
1711 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1712 self.sync_parse_timeout = timeout;
1713 }
1714
1715 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1716 match Arc::get_mut(&mut self.tree_sitter_data) {
1717 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1718 None => {
1719 let tree_sitter_data = TreeSitterData::new(snapshot);
1720 self.tree_sitter_data = Arc::new(tree_sitter_data)
1721 }
1722 }
1723 }
1724
1725 /// Called after an edit to synchronize the buffer's main parse tree with
1726 /// the buffer's new underlying state.
1727 ///
1728 /// Locks the syntax map and interpolates the edits since the last reparse
1729 /// into the foreground syntax tree.
1730 ///
1731 /// Then takes a stable snapshot of the syntax map before unlocking it.
1732 /// The snapshot with the interpolated edits is sent to a background thread,
1733 /// where we ask Tree-sitter to perform an incremental parse.
1734 ///
1735 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1736 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1737 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1738 ///
1739 /// If we time out waiting on the parse, we spawn a second task waiting
1740 /// until the parse does complete and return with the interpolated tree still
1741 /// in the foreground. When the background parse completes, call back into
1742 /// the main thread and assign the foreground parse state.
1743 ///
1744 /// If the buffer or grammar changed since the start of the background parse,
1745 /// initiate an additional reparse recursively. To avoid concurrent parses
1746 /// for the same buffer, we only initiate a new parse if we are not already
1747 /// parsing in the background.
1748 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1749 if self.text.version() != *self.tree_sitter_data.version() {
1750 self.invalidate_tree_sitter_data(self.text.snapshot());
1751 }
1752 if self.reparse.is_some() {
1753 return;
1754 }
1755 let language = if let Some(language) = self.language.clone() {
1756 language
1757 } else {
1758 return;
1759 };
1760
1761 let text = self.text_snapshot();
1762 let parsed_version = self.version();
1763
1764 let mut syntax_map = self.syntax_map.lock();
1765 syntax_map.interpolate(&text);
1766 let language_registry = syntax_map.language_registry();
1767 let mut syntax_snapshot = syntax_map.snapshot();
1768 drop(syntax_map);
1769
1770 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1771 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1772 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1773 &text,
1774 language_registry.clone(),
1775 language.clone(),
1776 sync_parse_timeout,
1777 ) {
1778 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1779 self.reparse = None;
1780 return;
1781 }
1782 }
1783
1784 let parse_task = cx.background_spawn({
1785 let language = language.clone();
1786 let language_registry = language_registry.clone();
1787 async move {
1788 syntax_snapshot.reparse(&text, language_registry, language);
1789 syntax_snapshot
1790 }
1791 });
1792
1793 self.reparse = Some(cx.spawn(async move |this, cx| {
1794 let new_syntax_map = parse_task.await;
1795 this.update(cx, move |this, cx| {
1796 let grammar_changed = || {
1797 this.language
1798 .as_ref()
1799 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1800 };
1801 let language_registry_changed = || {
1802 new_syntax_map.contains_unknown_injections()
1803 && language_registry.is_some_and(|registry| {
1804 registry.version() != new_syntax_map.language_registry_version()
1805 })
1806 };
1807 let parse_again = this.version.changed_since(&parsed_version)
1808 || language_registry_changed()
1809 || grammar_changed();
1810 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1811 this.reparse = None;
1812 if parse_again {
1813 this.reparse(cx, false);
1814 }
1815 })
1816 .ok();
1817 }));
1818 }
1819
1820 fn did_finish_parsing(
1821 &mut self,
1822 syntax_snapshot: SyntaxSnapshot,
1823 block_budget: Duration,
1824 cx: &mut Context<Self>,
1825 ) {
1826 self.non_text_state_update_count += 1;
1827 self.syntax_map.lock().did_parse(syntax_snapshot);
1828 self.was_changed();
1829 self.request_autoindent(cx, block_budget);
1830 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1831 self.invalidate_tree_sitter_data(self.text.snapshot());
1832 cx.emit(BufferEvent::Reparsed);
1833 cx.notify();
1834 }
1835
1836 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1837 self.parse_status.1.clone()
1838 }
1839
1840 /// Wait until the buffer is no longer parsing
1841 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1842 let mut parse_status = self.parse_status();
1843 async move {
1844 while *parse_status.borrow() != ParseStatus::Idle {
1845 if parse_status.changed().await.is_err() {
1846 break;
1847 }
1848 }
1849 }
1850 }
1851
1852 /// Assign to the buffer a set of diagnostics created by a given language server.
1853 pub fn update_diagnostics(
1854 &mut self,
1855 server_id: LanguageServerId,
1856 diagnostics: DiagnosticSet,
1857 cx: &mut Context<Self>,
1858 ) {
1859 let lamport_timestamp = self.text.lamport_clock.tick();
1860 let op = Operation::UpdateDiagnostics {
1861 server_id,
1862 diagnostics: diagnostics.iter().cloned().collect(),
1863 lamport_timestamp,
1864 };
1865
1866 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1867 self.send_operation(op, true, cx);
1868 }
1869
1870 pub fn buffer_diagnostics(
1871 &self,
1872 for_server: Option<LanguageServerId>,
1873 ) -> Vec<&DiagnosticEntry<Anchor>> {
1874 match for_server {
1875 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1876 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1877 Err(_) => Vec::new(),
1878 },
1879 None => self
1880 .diagnostics
1881 .iter()
1882 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1883 .collect(),
1884 }
1885 }
1886
1887 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1888 if let Some(indent_sizes) = self.compute_autoindents() {
1889 let indent_sizes = cx.background_spawn(indent_sizes);
1890 match cx
1891 .background_executor()
1892 .block_with_timeout(block_budget, indent_sizes)
1893 {
1894 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1895 Err(indent_sizes) => {
1896 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1897 let indent_sizes = indent_sizes.await;
1898 this.update(cx, |this, cx| {
1899 this.apply_autoindents(indent_sizes, cx);
1900 })
1901 .ok();
1902 }));
1903 }
1904 }
1905 } else {
1906 self.autoindent_requests.clear();
1907 for tx in self.wait_for_autoindent_txs.drain(..) {
1908 tx.send(()).ok();
1909 }
1910 }
1911 }
1912
1913 fn compute_autoindents(
1914 &self,
1915 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1916 let max_rows_between_yields = 100;
1917 let snapshot = self.snapshot();
1918 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1919 return None;
1920 }
1921
1922 let autoindent_requests = self.autoindent_requests.clone();
1923 Some(async move {
1924 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1925 for request in autoindent_requests {
1926 // Resolve each edited range to its row in the current buffer and in the
1927 // buffer before this batch of edits.
1928 let mut row_ranges = Vec::new();
1929 let mut old_to_new_rows = BTreeMap::new();
1930 let mut language_indent_sizes_by_new_row = Vec::new();
1931 for entry in &request.entries {
1932 let position = entry.range.start;
1933 let new_row = position.to_point(&snapshot).row;
1934 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1935 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1936
1937 if let Some(old_row) = entry.old_row {
1938 old_to_new_rows.insert(old_row, new_row);
1939 }
1940 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1941 }
1942
1943 // Build a map containing the suggested indentation for each of the edited lines
1944 // with respect to the state of the buffer before these edits. This map is keyed
1945 // by the rows for these lines in the current state of the buffer.
1946 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1947 let old_edited_ranges =
1948 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1949 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1950 let mut language_indent_size = IndentSize::default();
1951 for old_edited_range in old_edited_ranges {
1952 let suggestions = request
1953 .before_edit
1954 .suggest_autoindents(old_edited_range.clone())
1955 .into_iter()
1956 .flatten();
1957 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1958 if let Some(suggestion) = suggestion {
1959 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1960
1961 // Find the indent size based on the language for this row.
1962 while let Some((row, size)) = language_indent_sizes.peek() {
1963 if *row > new_row {
1964 break;
1965 }
1966 language_indent_size = *size;
1967 language_indent_sizes.next();
1968 }
1969
1970 let suggested_indent = old_to_new_rows
1971 .get(&suggestion.basis_row)
1972 .and_then(|from_row| {
1973 Some(old_suggestions.get(from_row).copied()?.0)
1974 })
1975 .unwrap_or_else(|| {
1976 request
1977 .before_edit
1978 .indent_size_for_line(suggestion.basis_row)
1979 })
1980 .with_delta(suggestion.delta, language_indent_size);
1981 old_suggestions
1982 .insert(new_row, (suggested_indent, suggestion.within_error));
1983 }
1984 }
1985 yield_now().await;
1986 }
1987
1988 // Compute new suggestions for each line, but only include them in the result
1989 // if they differ from the old suggestion for that line.
1990 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1991 let mut language_indent_size = IndentSize::default();
1992 for (row_range, original_indent_column) in row_ranges {
1993 let new_edited_row_range = if request.is_block_mode {
1994 row_range.start..row_range.start + 1
1995 } else {
1996 row_range.clone()
1997 };
1998
1999 let suggestions = snapshot
2000 .suggest_autoindents(new_edited_row_range.clone())
2001 .into_iter()
2002 .flatten();
2003 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2004 if let Some(suggestion) = suggestion {
2005 // Find the indent size based on the language for this row.
2006 while let Some((row, size)) = language_indent_sizes.peek() {
2007 if *row > new_row {
2008 break;
2009 }
2010 language_indent_size = *size;
2011 language_indent_sizes.next();
2012 }
2013
2014 let suggested_indent = indent_sizes
2015 .get(&suggestion.basis_row)
2016 .copied()
2017 .map(|e| e.0)
2018 .unwrap_or_else(|| {
2019 snapshot.indent_size_for_line(suggestion.basis_row)
2020 })
2021 .with_delta(suggestion.delta, language_indent_size);
2022
2023 if old_suggestions.get(&new_row).is_none_or(
2024 |(old_indentation, was_within_error)| {
2025 suggested_indent != *old_indentation
2026 && (!suggestion.within_error || *was_within_error)
2027 },
2028 ) {
2029 indent_sizes.insert(
2030 new_row,
2031 (suggested_indent, request.ignore_empty_lines),
2032 );
2033 }
2034 }
2035 }
2036
2037 if let (true, Some(original_indent_column)) =
2038 (request.is_block_mode, original_indent_column)
2039 {
2040 let new_indent =
2041 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2042 *indent
2043 } else {
2044 snapshot.indent_size_for_line(row_range.start)
2045 };
2046 let delta = new_indent.len as i64 - original_indent_column as i64;
2047 if delta != 0 {
2048 for row in row_range.skip(1) {
2049 indent_sizes.entry(row).or_insert_with(|| {
2050 let mut size = snapshot.indent_size_for_line(row);
2051 if size.kind == new_indent.kind {
2052 match delta.cmp(&0) {
2053 Ordering::Greater => size.len += delta as u32,
2054 Ordering::Less => {
2055 size.len = size.len.saturating_sub(-delta as u32)
2056 }
2057 Ordering::Equal => {}
2058 }
2059 }
2060 (size, request.ignore_empty_lines)
2061 });
2062 }
2063 }
2064 }
2065
2066 yield_now().await;
2067 }
2068 }
2069
2070 indent_sizes
2071 .into_iter()
2072 .filter_map(|(row, (indent, ignore_empty_lines))| {
2073 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2074 None
2075 } else {
2076 Some((row, indent))
2077 }
2078 })
2079 .collect()
2080 })
2081 }
2082
2083 fn apply_autoindents(
2084 &mut self,
2085 indent_sizes: BTreeMap<u32, IndentSize>,
2086 cx: &mut Context<Self>,
2087 ) {
2088 self.autoindent_requests.clear();
2089 for tx in self.wait_for_autoindent_txs.drain(..) {
2090 tx.send(()).ok();
2091 }
2092
2093 let edits: Vec<_> = indent_sizes
2094 .into_iter()
2095 .filter_map(|(row, indent_size)| {
2096 let current_size = indent_size_for_line(self, row);
2097 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2098 })
2099 .collect();
2100
2101 let preserve_preview = self.preserve_preview();
2102 self.edit(edits, None, cx);
2103 if preserve_preview {
2104 self.refresh_preview();
2105 }
2106 }
2107
2108 /// Create a minimal edit that will cause the given row to be indented
2109 /// with the given size. After applying this edit, the length of the line
2110 /// will always be at least `new_size.len`.
2111 pub fn edit_for_indent_size_adjustment(
2112 row: u32,
2113 current_size: IndentSize,
2114 new_size: IndentSize,
2115 ) -> Option<(Range<Point>, String)> {
2116 if new_size.kind == current_size.kind {
2117 match new_size.len.cmp(¤t_size.len) {
2118 Ordering::Greater => {
2119 let point = Point::new(row, 0);
2120 Some((
2121 point..point,
2122 iter::repeat(new_size.char())
2123 .take((new_size.len - current_size.len) as usize)
2124 .collect::<String>(),
2125 ))
2126 }
2127
2128 Ordering::Less => Some((
2129 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2130 String::new(),
2131 )),
2132
2133 Ordering::Equal => None,
2134 }
2135 } else {
2136 Some((
2137 Point::new(row, 0)..Point::new(row, current_size.len),
2138 iter::repeat(new_size.char())
2139 .take(new_size.len as usize)
2140 .collect::<String>(),
2141 ))
2142 }
2143 }
2144
2145 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2146 /// and the given new text.
2147 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2148 let old_text = self.as_rope().clone();
2149 let base_version = self.version();
2150 cx.background_executor()
2151 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2152 let old_text = old_text.to_string();
2153 let line_ending = LineEnding::detect(&new_text);
2154 LineEnding::normalize(&mut new_text);
2155 let edits = text_diff(&old_text, &new_text);
2156 Diff {
2157 base_version,
2158 line_ending,
2159 edits,
2160 }
2161 })
2162 }
2163
2164 /// Spawns a background task that searches the buffer for any whitespace
2165 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2166 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2167 let old_text = self.as_rope().clone();
2168 let line_ending = self.line_ending();
2169 let base_version = self.version();
2170 cx.background_spawn(async move {
2171 let ranges = trailing_whitespace_ranges(&old_text);
2172 let empty = Arc::<str>::from("");
2173 Diff {
2174 base_version,
2175 line_ending,
2176 edits: ranges
2177 .into_iter()
2178 .map(|range| (range, empty.clone()))
2179 .collect(),
2180 }
2181 })
2182 }
2183
2184 /// Ensures that the buffer ends with a single newline character, and
2185 /// no other whitespace. Skips if the buffer is empty.
2186 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2187 let len = self.len();
2188 if len == 0 {
2189 return;
2190 }
2191 let mut offset = len;
2192 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2193 let non_whitespace_len = chunk
2194 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2195 .len();
2196 offset -= chunk.len();
2197 offset += non_whitespace_len;
2198 if non_whitespace_len != 0 {
2199 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2200 return;
2201 }
2202 break;
2203 }
2204 }
2205 self.edit([(offset..len, "\n")], None, cx);
2206 }
2207
2208 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2209 /// calculated, then adjust the diff to account for those changes, and discard any
2210 /// parts of the diff that conflict with those changes.
2211 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2212 let snapshot = self.snapshot();
2213 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2214 let mut delta = 0;
2215 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2216 while let Some(edit_since) = edits_since.peek() {
2217 // If the edit occurs after a diff hunk, then it does not
2218 // affect that hunk.
2219 if edit_since.old.start > range.end {
2220 break;
2221 }
2222 // If the edit precedes the diff hunk, then adjust the hunk
2223 // to reflect the edit.
2224 else if edit_since.old.end < range.start {
2225 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2226 edits_since.next();
2227 }
2228 // If the edit intersects a diff hunk, then discard that hunk.
2229 else {
2230 return None;
2231 }
2232 }
2233
2234 let start = (range.start as i64 + delta) as usize;
2235 let end = (range.end as i64 + delta) as usize;
2236 Some((start..end, new_text))
2237 });
2238
2239 self.start_transaction();
2240 self.text.set_line_ending(diff.line_ending);
2241 self.edit(adjusted_edits, None, cx);
2242 self.end_transaction(cx)
2243 }
2244
2245 pub fn has_unsaved_edits(&self) -> bool {
2246 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2247
2248 if last_version == self.version {
2249 self.has_unsaved_edits
2250 .set((last_version, has_unsaved_edits));
2251 return has_unsaved_edits;
2252 }
2253
2254 let has_edits = self.has_edits_since(&self.saved_version);
2255 self.has_unsaved_edits
2256 .set((self.version.clone(), has_edits));
2257 has_edits
2258 }
2259
2260 /// Checks if the buffer has unsaved changes.
2261 pub fn is_dirty(&self) -> bool {
2262 if self.capability == Capability::ReadOnly {
2263 return false;
2264 }
2265 if self.has_conflict {
2266 return true;
2267 }
2268 match self.file.as_ref().map(|f| f.disk_state()) {
2269 Some(DiskState::New) | Some(DiskState::Deleted) => {
2270 !self.is_empty() && self.has_unsaved_edits()
2271 }
2272 _ => self.has_unsaved_edits(),
2273 }
2274 }
2275
2276 /// Marks the buffer as having a conflict regardless of current buffer state.
2277 pub fn set_conflict(&mut self) {
2278 self.has_conflict = true;
2279 }
2280
2281 /// Checks if the buffer and its file have both changed since the buffer
2282 /// was last saved or reloaded.
2283 pub fn has_conflict(&self) -> bool {
2284 if self.has_conflict {
2285 return true;
2286 }
2287 let Some(file) = self.file.as_ref() else {
2288 return false;
2289 };
2290 match file.disk_state() {
2291 DiskState::New => false,
2292 DiskState::Present { mtime } => match self.saved_mtime {
2293 Some(saved_mtime) => {
2294 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2295 }
2296 None => true,
2297 },
2298 DiskState::Deleted => false,
2299 DiskState::Historic { .. } => false,
2300 }
2301 }
2302
2303 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2304 pub fn subscribe(&mut self) -> Subscription<usize> {
2305 self.text.subscribe()
2306 }
2307
2308 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2309 ///
2310 /// This allows downstream code to check if the buffer's text has changed without
2311 /// waiting for an effect cycle, which would be required if using eents.
2312 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2313 if let Err(ix) = self
2314 .change_bits
2315 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2316 {
2317 self.change_bits.insert(ix, bit);
2318 }
2319 }
2320
2321 /// Set the change bit for all "listeners".
2322 fn was_changed(&mut self) {
2323 self.change_bits.retain(|change_bit| {
2324 change_bit
2325 .upgrade()
2326 .inspect(|bit| {
2327 _ = bit.replace(true);
2328 })
2329 .is_some()
2330 });
2331 }
2332
2333 /// Starts a transaction, if one is not already in-progress. When undoing or
2334 /// redoing edits, all of the edits performed within a transaction are undone
2335 /// or redone together.
2336 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2337 self.start_transaction_at(Instant::now())
2338 }
2339
2340 /// Starts a transaction, providing the current time. Subsequent transactions
2341 /// that occur within a short period of time will be grouped together. This
2342 /// is controlled by the buffer's undo grouping duration.
2343 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2344 self.transaction_depth += 1;
2345 if self.was_dirty_before_starting_transaction.is_none() {
2346 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2347 }
2348 self.text.start_transaction_at(now)
2349 }
2350
2351 /// Terminates the current transaction, if this is the outermost transaction.
2352 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2353 self.end_transaction_at(Instant::now(), cx)
2354 }
2355
2356 /// Terminates the current transaction, providing the current time. Subsequent transactions
2357 /// that occur within a short period of time will be grouped together. This
2358 /// is controlled by the buffer's undo grouping duration.
2359 pub fn end_transaction_at(
2360 &mut self,
2361 now: Instant,
2362 cx: &mut Context<Self>,
2363 ) -> Option<TransactionId> {
2364 assert!(self.transaction_depth > 0);
2365 self.transaction_depth -= 1;
2366 let was_dirty = if self.transaction_depth == 0 {
2367 self.was_dirty_before_starting_transaction.take().unwrap()
2368 } else {
2369 false
2370 };
2371 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2372 self.did_edit(&start_version, was_dirty, cx);
2373 Some(transaction_id)
2374 } else {
2375 None
2376 }
2377 }
2378
2379 /// Manually add a transaction to the buffer's undo history.
2380 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2381 self.text.push_transaction(transaction, now);
2382 }
2383
2384 /// Differs from `push_transaction` in that it does not clear the redo
2385 /// stack. Intended to be used to create a parent transaction to merge
2386 /// potential child transactions into.
2387 ///
2388 /// The caller is responsible for removing it from the undo history using
2389 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2390 /// are merged into this transaction, the caller is responsible for ensuring
2391 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2392 /// cleared is to create transactions with the usual `start_transaction` and
2393 /// `end_transaction` methods and merging the resulting transactions into
2394 /// the transaction created by this method
2395 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2396 self.text.push_empty_transaction(now)
2397 }
2398
2399 /// Prevent the last transaction from being grouped with any subsequent transactions,
2400 /// even if they occur with the buffer's undo grouping duration.
2401 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2402 self.text.finalize_last_transaction()
2403 }
2404
2405 /// Manually group all changes since a given transaction.
2406 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2407 self.text.group_until_transaction(transaction_id);
2408 }
2409
2410 /// Manually remove a transaction from the buffer's undo history
2411 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2412 self.text.forget_transaction(transaction_id)
2413 }
2414
2415 /// Retrieve a transaction from the buffer's undo history
2416 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2417 self.text.get_transaction(transaction_id)
2418 }
2419
2420 /// Manually merge two transactions in the buffer's undo history.
2421 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2422 self.text.merge_transactions(transaction, destination);
2423 }
2424
2425 /// Waits for the buffer to receive operations with the given timestamps.
2426 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2427 &mut self,
2428 edit_ids: It,
2429 ) -> impl Future<Output = Result<()>> + use<It> {
2430 self.text.wait_for_edits(edit_ids)
2431 }
2432
2433 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2434 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2435 &mut self,
2436 anchors: It,
2437 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2438 self.text.wait_for_anchors(anchors)
2439 }
2440
2441 /// Waits for the buffer to receive operations up to the given version.
2442 pub fn wait_for_version(
2443 &mut self,
2444 version: clock::Global,
2445 ) -> impl Future<Output = Result<()>> + use<> {
2446 self.text.wait_for_version(version)
2447 }
2448
2449 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2450 /// [`Buffer::wait_for_version`] to resolve with an error.
2451 pub fn give_up_waiting(&mut self) {
2452 self.text.give_up_waiting();
2453 }
2454
2455 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2456 let mut rx = None;
2457 if !self.autoindent_requests.is_empty() {
2458 let channel = oneshot::channel();
2459 self.wait_for_autoindent_txs.push(channel.0);
2460 rx = Some(channel.1);
2461 }
2462 rx
2463 }
2464
2465 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2466 pub fn set_active_selections(
2467 &mut self,
2468 selections: Arc<[Selection<Anchor>]>,
2469 line_mode: bool,
2470 cursor_shape: CursorShape,
2471 cx: &mut Context<Self>,
2472 ) {
2473 let lamport_timestamp = self.text.lamport_clock.tick();
2474 self.remote_selections.insert(
2475 self.text.replica_id(),
2476 SelectionSet {
2477 selections: selections.clone(),
2478 lamport_timestamp,
2479 line_mode,
2480 cursor_shape,
2481 },
2482 );
2483 self.send_operation(
2484 Operation::UpdateSelections {
2485 selections,
2486 line_mode,
2487 lamport_timestamp,
2488 cursor_shape,
2489 },
2490 true,
2491 cx,
2492 );
2493 self.non_text_state_update_count += 1;
2494 cx.notify();
2495 }
2496
2497 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2498 /// this replica.
2499 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2500 if self
2501 .remote_selections
2502 .get(&self.text.replica_id())
2503 .is_none_or(|set| !set.selections.is_empty())
2504 {
2505 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2506 }
2507 }
2508
2509 pub fn set_agent_selections(
2510 &mut self,
2511 selections: Arc<[Selection<Anchor>]>,
2512 line_mode: bool,
2513 cursor_shape: CursorShape,
2514 cx: &mut Context<Self>,
2515 ) {
2516 let lamport_timestamp = self.text.lamport_clock.tick();
2517 self.remote_selections.insert(
2518 ReplicaId::AGENT,
2519 SelectionSet {
2520 selections,
2521 lamport_timestamp,
2522 line_mode,
2523 cursor_shape,
2524 },
2525 );
2526 self.non_text_state_update_count += 1;
2527 cx.notify();
2528 }
2529
2530 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2531 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2532 }
2533
2534 /// Replaces the buffer's entire text.
2535 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2536 where
2537 T: Into<Arc<str>>,
2538 {
2539 self.autoindent_requests.clear();
2540 self.edit([(0..self.len(), text)], None, cx)
2541 }
2542
2543 /// Appends the given text to the end of the buffer.
2544 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2545 where
2546 T: Into<Arc<str>>,
2547 {
2548 self.edit([(self.len()..self.len(), text)], None, cx)
2549 }
2550
2551 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2552 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2553 ///
2554 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2555 /// request for the edited ranges, which will be processed when the buffer finishes
2556 /// parsing.
2557 ///
2558 /// Parsing takes place at the end of a transaction, and may compute synchronously
2559 /// or asynchronously, depending on the changes.
2560 pub fn edit<I, S, T>(
2561 &mut self,
2562 edits_iter: I,
2563 autoindent_mode: Option<AutoindentMode>,
2564 cx: &mut Context<Self>,
2565 ) -> Option<clock::Lamport>
2566 where
2567 I: IntoIterator<Item = (Range<S>, T)>,
2568 S: ToOffset,
2569 T: Into<Arc<str>>,
2570 {
2571 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2572 }
2573
2574 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2575 pub fn edit_non_coalesce<I, S, T>(
2576 &mut self,
2577 edits_iter: I,
2578 autoindent_mode: Option<AutoindentMode>,
2579 cx: &mut Context<Self>,
2580 ) -> Option<clock::Lamport>
2581 where
2582 I: IntoIterator<Item = (Range<S>, T)>,
2583 S: ToOffset,
2584 T: Into<Arc<str>>,
2585 {
2586 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2587 }
2588
2589 fn edit_internal<I, S, T>(
2590 &mut self,
2591 edits_iter: I,
2592 autoindent_mode: Option<AutoindentMode>,
2593 coalesce_adjacent: bool,
2594 cx: &mut Context<Self>,
2595 ) -> Option<clock::Lamport>
2596 where
2597 I: IntoIterator<Item = (Range<S>, T)>,
2598 S: ToOffset,
2599 T: Into<Arc<str>>,
2600 {
2601 // Skip invalid edits and coalesce contiguous ones.
2602 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2603
2604 for (range, new_text) in edits_iter {
2605 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2606
2607 if range.start > range.end {
2608 mem::swap(&mut range.start, &mut range.end);
2609 }
2610 let new_text = new_text.into();
2611 if !new_text.is_empty() || !range.is_empty() {
2612 let prev_edit = edits.last_mut();
2613 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2614 if coalesce_adjacent {
2615 prev_range.end >= range.start
2616 } else {
2617 prev_range.end > range.start
2618 }
2619 });
2620
2621 if let Some((prev_range, prev_text)) = prev_edit
2622 && should_coalesce
2623 {
2624 prev_range.end = cmp::max(prev_range.end, range.end);
2625 *prev_text = format!("{prev_text}{new_text}").into();
2626 } else {
2627 edits.push((range, new_text));
2628 }
2629 }
2630 }
2631 if edits.is_empty() {
2632 return None;
2633 }
2634
2635 self.start_transaction();
2636 self.pending_autoindent.take();
2637 let autoindent_request = autoindent_mode
2638 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2639
2640 let edit_operation = self.text.edit(edits.iter().cloned());
2641 let edit_id = edit_operation.timestamp();
2642
2643 if let Some((before_edit, mode)) = autoindent_request {
2644 let mut delta = 0isize;
2645 let mut previous_setting = None;
2646 let entries: Vec<_> = edits
2647 .into_iter()
2648 .enumerate()
2649 .zip(&edit_operation.as_edit().unwrap().new_text)
2650 .filter(|((_, (range, _)), _)| {
2651 let language = before_edit.language_at(range.start);
2652 let language_id = language.map(|l| l.id());
2653 if let Some((cached_language_id, auto_indent)) = previous_setting
2654 && cached_language_id == language_id
2655 {
2656 auto_indent
2657 } else {
2658 // The auto-indent setting is not present in editorconfigs, hence
2659 // we can avoid passing the file here.
2660 let auto_indent =
2661 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2662 previous_setting = Some((language_id, auto_indent));
2663 auto_indent
2664 }
2665 })
2666 .map(|((ix, (range, _)), new_text)| {
2667 let new_text_length = new_text.len();
2668 let old_start = range.start.to_point(&before_edit);
2669 let new_start = (delta + range.start as isize) as usize;
2670 let range_len = range.end - range.start;
2671 delta += new_text_length as isize - range_len as isize;
2672
2673 // Decide what range of the insertion to auto-indent, and whether
2674 // the first line of the insertion should be considered a newly-inserted line
2675 // or an edit to an existing line.
2676 let mut range_of_insertion_to_indent = 0..new_text_length;
2677 let mut first_line_is_new = true;
2678
2679 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2680 let old_line_end = before_edit.line_len(old_start.row);
2681
2682 if old_start.column > old_line_start {
2683 first_line_is_new = false;
2684 }
2685
2686 if !new_text.contains('\n')
2687 && (old_start.column + (range_len as u32) < old_line_end
2688 || old_line_end == old_line_start)
2689 {
2690 first_line_is_new = false;
2691 }
2692
2693 // When inserting text starting with a newline, avoid auto-indenting the
2694 // previous line.
2695 if new_text.starts_with('\n') {
2696 range_of_insertion_to_indent.start += 1;
2697 first_line_is_new = true;
2698 }
2699
2700 let mut original_indent_column = None;
2701 if let AutoindentMode::Block {
2702 original_indent_columns,
2703 } = &mode
2704 {
2705 original_indent_column = Some(if new_text.starts_with('\n') {
2706 indent_size_for_text(
2707 new_text[range_of_insertion_to_indent.clone()].chars(),
2708 )
2709 .len
2710 } else {
2711 original_indent_columns
2712 .get(ix)
2713 .copied()
2714 .flatten()
2715 .unwrap_or_else(|| {
2716 indent_size_for_text(
2717 new_text[range_of_insertion_to_indent.clone()].chars(),
2718 )
2719 .len
2720 })
2721 });
2722
2723 // Avoid auto-indenting the line after the edit.
2724 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2725 range_of_insertion_to_indent.end -= 1;
2726 }
2727 }
2728
2729 AutoindentRequestEntry {
2730 original_indent_column,
2731 old_row: if first_line_is_new {
2732 None
2733 } else {
2734 Some(old_start.row)
2735 },
2736 indent_size: before_edit.language_indent_size_at(range.start, cx),
2737 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2738 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2739 }
2740 })
2741 .collect();
2742
2743 if !entries.is_empty() {
2744 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2745 before_edit,
2746 entries,
2747 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2748 ignore_empty_lines: false,
2749 }));
2750 }
2751 }
2752
2753 self.end_transaction(cx);
2754 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2755 Some(edit_id)
2756 }
2757
2758 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2759 self.was_changed();
2760
2761 if self.edits_since::<usize>(old_version).next().is_none() {
2762 return;
2763 }
2764
2765 self.reparse(cx, true);
2766 cx.emit(BufferEvent::Edited);
2767 if was_dirty != self.is_dirty() {
2768 cx.emit(BufferEvent::DirtyChanged);
2769 }
2770 cx.notify();
2771 }
2772
2773 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2774 where
2775 I: IntoIterator<Item = Range<T>>,
2776 T: ToOffset + Copy,
2777 {
2778 let before_edit = self.snapshot();
2779 let entries = ranges
2780 .into_iter()
2781 .map(|range| AutoindentRequestEntry {
2782 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2783 old_row: None,
2784 indent_size: before_edit.language_indent_size_at(range.start, cx),
2785 original_indent_column: None,
2786 })
2787 .collect();
2788 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2789 before_edit,
2790 entries,
2791 is_block_mode: false,
2792 ignore_empty_lines: true,
2793 }));
2794 self.request_autoindent(cx, Duration::from_micros(300));
2795 }
2796
2797 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2798 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2799 pub fn insert_empty_line(
2800 &mut self,
2801 position: impl ToPoint,
2802 space_above: bool,
2803 space_below: bool,
2804 cx: &mut Context<Self>,
2805 ) -> Point {
2806 let mut position = position.to_point(self);
2807
2808 self.start_transaction();
2809
2810 self.edit(
2811 [(position..position, "\n")],
2812 Some(AutoindentMode::EachLine),
2813 cx,
2814 );
2815
2816 if position.column > 0 {
2817 position += Point::new(1, 0);
2818 }
2819
2820 if !self.is_line_blank(position.row) {
2821 self.edit(
2822 [(position..position, "\n")],
2823 Some(AutoindentMode::EachLine),
2824 cx,
2825 );
2826 }
2827
2828 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2829 self.edit(
2830 [(position..position, "\n")],
2831 Some(AutoindentMode::EachLine),
2832 cx,
2833 );
2834 position.row += 1;
2835 }
2836
2837 if space_below
2838 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2839 {
2840 self.edit(
2841 [(position..position, "\n")],
2842 Some(AutoindentMode::EachLine),
2843 cx,
2844 );
2845 }
2846
2847 self.end_transaction(cx);
2848
2849 position
2850 }
2851
2852 /// Applies the given remote operations to the buffer.
2853 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2854 self.pending_autoindent.take();
2855 let was_dirty = self.is_dirty();
2856 let old_version = self.version.clone();
2857 let mut deferred_ops = Vec::new();
2858 let buffer_ops = ops
2859 .into_iter()
2860 .filter_map(|op| match op {
2861 Operation::Buffer(op) => Some(op),
2862 _ => {
2863 if self.can_apply_op(&op) {
2864 self.apply_op(op, cx);
2865 } else {
2866 deferred_ops.push(op);
2867 }
2868 None
2869 }
2870 })
2871 .collect::<Vec<_>>();
2872 for operation in buffer_ops.iter() {
2873 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2874 }
2875 self.text.apply_ops(buffer_ops);
2876 self.deferred_ops.insert(deferred_ops);
2877 self.flush_deferred_ops(cx);
2878 self.did_edit(&old_version, was_dirty, cx);
2879 // Notify independently of whether the buffer was edited as the operations could include a
2880 // selection update.
2881 cx.notify();
2882 }
2883
2884 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2885 let mut deferred_ops = Vec::new();
2886 for op in self.deferred_ops.drain().iter().cloned() {
2887 if self.can_apply_op(&op) {
2888 self.apply_op(op, cx);
2889 } else {
2890 deferred_ops.push(op);
2891 }
2892 }
2893 self.deferred_ops.insert(deferred_ops);
2894 }
2895
2896 pub fn has_deferred_ops(&self) -> bool {
2897 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2898 }
2899
2900 fn can_apply_op(&self, operation: &Operation) -> bool {
2901 match operation {
2902 Operation::Buffer(_) => {
2903 unreachable!("buffer operations should never be applied at this layer")
2904 }
2905 Operation::UpdateDiagnostics {
2906 diagnostics: diagnostic_set,
2907 ..
2908 } => diagnostic_set.iter().all(|diagnostic| {
2909 self.text.can_resolve(&diagnostic.range.start)
2910 && self.text.can_resolve(&diagnostic.range.end)
2911 }),
2912 Operation::UpdateSelections { selections, .. } => selections
2913 .iter()
2914 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2915 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2916 }
2917 }
2918
2919 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2920 match operation {
2921 Operation::Buffer(_) => {
2922 unreachable!("buffer operations should never be applied at this layer")
2923 }
2924 Operation::UpdateDiagnostics {
2925 server_id,
2926 diagnostics: diagnostic_set,
2927 lamport_timestamp,
2928 } => {
2929 let snapshot = self.snapshot();
2930 self.apply_diagnostic_update(
2931 server_id,
2932 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2933 lamport_timestamp,
2934 cx,
2935 );
2936 }
2937 Operation::UpdateSelections {
2938 selections,
2939 lamport_timestamp,
2940 line_mode,
2941 cursor_shape,
2942 } => {
2943 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2944 && set.lamport_timestamp > lamport_timestamp
2945 {
2946 return;
2947 }
2948
2949 self.remote_selections.insert(
2950 lamport_timestamp.replica_id,
2951 SelectionSet {
2952 selections,
2953 lamport_timestamp,
2954 line_mode,
2955 cursor_shape,
2956 },
2957 );
2958 self.text.lamport_clock.observe(lamport_timestamp);
2959 self.non_text_state_update_count += 1;
2960 }
2961 Operation::UpdateCompletionTriggers {
2962 triggers,
2963 lamport_timestamp,
2964 server_id,
2965 } => {
2966 if triggers.is_empty() {
2967 self.completion_triggers_per_language_server
2968 .remove(&server_id);
2969 self.completion_triggers = self
2970 .completion_triggers_per_language_server
2971 .values()
2972 .flat_map(|triggers| triggers.iter().cloned())
2973 .collect();
2974 } else {
2975 self.completion_triggers_per_language_server
2976 .insert(server_id, triggers.iter().cloned().collect());
2977 self.completion_triggers.extend(triggers);
2978 }
2979 self.text.lamport_clock.observe(lamport_timestamp);
2980 }
2981 Operation::UpdateLineEnding {
2982 line_ending,
2983 lamport_timestamp,
2984 } => {
2985 self.text.set_line_ending(line_ending);
2986 self.text.lamport_clock.observe(lamport_timestamp);
2987 }
2988 }
2989 }
2990
2991 fn apply_diagnostic_update(
2992 &mut self,
2993 server_id: LanguageServerId,
2994 diagnostics: DiagnosticSet,
2995 lamport_timestamp: clock::Lamport,
2996 cx: &mut Context<Self>,
2997 ) {
2998 if lamport_timestamp > self.diagnostics_timestamp {
2999 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3000 if diagnostics.is_empty() {
3001 if let Ok(ix) = ix {
3002 self.diagnostics.remove(ix);
3003 }
3004 } else {
3005 match ix {
3006 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3007 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3008 };
3009 }
3010 self.diagnostics_timestamp = lamport_timestamp;
3011 self.non_text_state_update_count += 1;
3012 self.text.lamport_clock.observe(lamport_timestamp);
3013 cx.notify();
3014 cx.emit(BufferEvent::DiagnosticsUpdated);
3015 }
3016 }
3017
3018 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3019 self.was_changed();
3020 cx.emit(BufferEvent::Operation {
3021 operation,
3022 is_local,
3023 });
3024 }
3025
3026 /// Removes the selections for a given peer.
3027 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3028 self.remote_selections.remove(&replica_id);
3029 cx.notify();
3030 }
3031
3032 /// Undoes the most recent transaction.
3033 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3034 let was_dirty = self.is_dirty();
3035 let old_version = self.version.clone();
3036
3037 if let Some((transaction_id, operation)) = self.text.undo() {
3038 self.send_operation(Operation::Buffer(operation), true, cx);
3039 self.did_edit(&old_version, was_dirty, cx);
3040 Some(transaction_id)
3041 } else {
3042 None
3043 }
3044 }
3045
3046 /// Manually undoes a specific transaction in the buffer's undo history.
3047 pub fn undo_transaction(
3048 &mut self,
3049 transaction_id: TransactionId,
3050 cx: &mut Context<Self>,
3051 ) -> bool {
3052 let was_dirty = self.is_dirty();
3053 let old_version = self.version.clone();
3054 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3055 self.send_operation(Operation::Buffer(operation), true, cx);
3056 self.did_edit(&old_version, was_dirty, cx);
3057 true
3058 } else {
3059 false
3060 }
3061 }
3062
3063 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3064 pub fn undo_to_transaction(
3065 &mut self,
3066 transaction_id: TransactionId,
3067 cx: &mut Context<Self>,
3068 ) -> bool {
3069 let was_dirty = self.is_dirty();
3070 let old_version = self.version.clone();
3071
3072 let operations = self.text.undo_to_transaction(transaction_id);
3073 let undone = !operations.is_empty();
3074 for operation in operations {
3075 self.send_operation(Operation::Buffer(operation), true, cx);
3076 }
3077 if undone {
3078 self.did_edit(&old_version, was_dirty, cx)
3079 }
3080 undone
3081 }
3082
3083 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3084 let was_dirty = self.is_dirty();
3085 let operation = self.text.undo_operations(counts);
3086 let old_version = self.version.clone();
3087 self.send_operation(Operation::Buffer(operation), true, cx);
3088 self.did_edit(&old_version, was_dirty, cx);
3089 }
3090
3091 /// Manually redoes a specific transaction in the buffer's redo history.
3092 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3093 let was_dirty = self.is_dirty();
3094 let old_version = self.version.clone();
3095
3096 if let Some((transaction_id, operation)) = self.text.redo() {
3097 self.send_operation(Operation::Buffer(operation), true, cx);
3098 self.did_edit(&old_version, was_dirty, cx);
3099 Some(transaction_id)
3100 } else {
3101 None
3102 }
3103 }
3104
3105 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3106 pub fn redo_to_transaction(
3107 &mut self,
3108 transaction_id: TransactionId,
3109 cx: &mut Context<Self>,
3110 ) -> bool {
3111 let was_dirty = self.is_dirty();
3112 let old_version = self.version.clone();
3113
3114 let operations = self.text.redo_to_transaction(transaction_id);
3115 let redone = !operations.is_empty();
3116 for operation in operations {
3117 self.send_operation(Operation::Buffer(operation), true, cx);
3118 }
3119 if redone {
3120 self.did_edit(&old_version, was_dirty, cx)
3121 }
3122 redone
3123 }
3124
3125 /// Override current completion triggers with the user-provided completion triggers.
3126 pub fn set_completion_triggers(
3127 &mut self,
3128 server_id: LanguageServerId,
3129 triggers: BTreeSet<String>,
3130 cx: &mut Context<Self>,
3131 ) {
3132 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3133 if triggers.is_empty() {
3134 self.completion_triggers_per_language_server
3135 .remove(&server_id);
3136 self.completion_triggers = self
3137 .completion_triggers_per_language_server
3138 .values()
3139 .flat_map(|triggers| triggers.iter().cloned())
3140 .collect();
3141 } else {
3142 self.completion_triggers_per_language_server
3143 .insert(server_id, triggers.clone());
3144 self.completion_triggers.extend(triggers.iter().cloned());
3145 }
3146 self.send_operation(
3147 Operation::UpdateCompletionTriggers {
3148 triggers: triggers.into_iter().collect(),
3149 lamport_timestamp: self.completion_triggers_timestamp,
3150 server_id,
3151 },
3152 true,
3153 cx,
3154 );
3155 cx.notify();
3156 }
3157
3158 /// Returns a list of strings which trigger a completion menu for this language.
3159 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3160 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3161 &self.completion_triggers
3162 }
3163
3164 /// Call this directly after performing edits to prevent the preview tab
3165 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3166 /// to return false until there are additional edits.
3167 pub fn refresh_preview(&mut self) {
3168 self.preview_version = self.version.clone();
3169 }
3170
3171 /// Whether we should preserve the preview status of a tab containing this buffer.
3172 pub fn preserve_preview(&self) -> bool {
3173 !self.has_edits_since(&self.preview_version)
3174 }
3175}
3176
3177#[doc(hidden)]
3178#[cfg(any(test, feature = "test-support"))]
3179impl Buffer {
3180 pub fn edit_via_marked_text(
3181 &mut self,
3182 marked_string: &str,
3183 autoindent_mode: Option<AutoindentMode>,
3184 cx: &mut Context<Self>,
3185 ) {
3186 let edits = self.edits_for_marked_text(marked_string);
3187 self.edit(edits, autoindent_mode, cx);
3188 }
3189
3190 pub fn set_group_interval(&mut self, group_interval: Duration) {
3191 self.text.set_group_interval(group_interval);
3192 }
3193
3194 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3195 where
3196 T: rand::Rng,
3197 {
3198 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3199 let mut last_end = None;
3200 for _ in 0..old_range_count {
3201 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3202 break;
3203 }
3204
3205 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3206 let mut range = self.random_byte_range(new_start, rng);
3207 if rng.random_bool(0.2) {
3208 mem::swap(&mut range.start, &mut range.end);
3209 }
3210 last_end = Some(range.end);
3211
3212 let new_text_len = rng.random_range(0..10);
3213 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3214 new_text = new_text.to_uppercase();
3215
3216 edits.push((range, new_text));
3217 }
3218 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3219 self.edit(edits, None, cx);
3220 }
3221
3222 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3223 let was_dirty = self.is_dirty();
3224 let old_version = self.version.clone();
3225
3226 let ops = self.text.randomly_undo_redo(rng);
3227 if !ops.is_empty() {
3228 for op in ops {
3229 self.send_operation(Operation::Buffer(op), true, cx);
3230 self.did_edit(&old_version, was_dirty, cx);
3231 }
3232 }
3233 }
3234}
3235
3236impl EventEmitter<BufferEvent> for Buffer {}
3237
3238impl Deref for Buffer {
3239 type Target = TextBuffer;
3240
3241 fn deref(&self) -> &Self::Target {
3242 &self.text
3243 }
3244}
3245
3246impl BufferSnapshot {
3247 /// Returns [`IndentSize`] for a given line that respects user settings and
3248 /// language preferences.
3249 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3250 indent_size_for_line(self, row)
3251 }
3252
3253 /// Returns [`IndentSize`] for a given position that respects user settings
3254 /// and language preferences.
3255 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3256 let settings = language_settings(
3257 self.language_at(position).map(|l| l.name()),
3258 self.file(),
3259 cx,
3260 );
3261 if settings.hard_tabs {
3262 IndentSize::tab()
3263 } else {
3264 IndentSize::spaces(settings.tab_size.get())
3265 }
3266 }
3267
3268 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3269 /// is passed in as `single_indent_size`.
3270 pub fn suggested_indents(
3271 &self,
3272 rows: impl Iterator<Item = u32>,
3273 single_indent_size: IndentSize,
3274 ) -> BTreeMap<u32, IndentSize> {
3275 let mut result = BTreeMap::new();
3276
3277 for row_range in contiguous_ranges(rows, 10) {
3278 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3279 Some(suggestions) => suggestions,
3280 _ => break,
3281 };
3282
3283 for (row, suggestion) in row_range.zip(suggestions) {
3284 let indent_size = if let Some(suggestion) = suggestion {
3285 result
3286 .get(&suggestion.basis_row)
3287 .copied()
3288 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3289 .with_delta(suggestion.delta, single_indent_size)
3290 } else {
3291 self.indent_size_for_line(row)
3292 };
3293
3294 result.insert(row, indent_size);
3295 }
3296 }
3297
3298 result
3299 }
3300
3301 fn suggest_autoindents(
3302 &self,
3303 row_range: Range<u32>,
3304 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3305 let config = &self.language.as_ref()?.config;
3306 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3307
3308 #[derive(Debug, Clone)]
3309 struct StartPosition {
3310 start: Point,
3311 suffix: SharedString,
3312 language: Arc<Language>,
3313 }
3314
3315 // Find the suggested indentation ranges based on the syntax tree.
3316 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3317 let end = Point::new(row_range.end, 0);
3318 let range = (start..end).to_offset(&self.text);
3319 let mut matches = self.syntax.matches_with_options(
3320 range.clone(),
3321 &self.text,
3322 TreeSitterOptions {
3323 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3324 max_start_depth: None,
3325 },
3326 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3327 );
3328 let indent_configs = matches
3329 .grammars()
3330 .iter()
3331 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3332 .collect::<Vec<_>>();
3333
3334 let mut indent_ranges = Vec::<Range<Point>>::new();
3335 let mut start_positions = Vec::<StartPosition>::new();
3336 let mut outdent_positions = Vec::<Point>::new();
3337 while let Some(mat) = matches.peek() {
3338 let mut start: Option<Point> = None;
3339 let mut end: Option<Point> = None;
3340
3341 let config = indent_configs[mat.grammar_index];
3342 for capture in mat.captures {
3343 if capture.index == config.indent_capture_ix {
3344 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3345 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3346 } else if Some(capture.index) == config.start_capture_ix {
3347 start = Some(Point::from_ts_point(capture.node.end_position()));
3348 } else if Some(capture.index) == config.end_capture_ix {
3349 end = Some(Point::from_ts_point(capture.node.start_position()));
3350 } else if Some(capture.index) == config.outdent_capture_ix {
3351 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3352 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3353 start_positions.push(StartPosition {
3354 start: Point::from_ts_point(capture.node.start_position()),
3355 suffix: suffix.clone(),
3356 language: mat.language.clone(),
3357 });
3358 }
3359 }
3360
3361 matches.advance();
3362 if let Some((start, end)) = start.zip(end) {
3363 if start.row == end.row {
3364 continue;
3365 }
3366 let range = start..end;
3367 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3368 Err(ix) => indent_ranges.insert(ix, range),
3369 Ok(ix) => {
3370 let prev_range = &mut indent_ranges[ix];
3371 prev_range.end = prev_range.end.max(range.end);
3372 }
3373 }
3374 }
3375 }
3376
3377 let mut error_ranges = Vec::<Range<Point>>::new();
3378 let mut matches = self
3379 .syntax
3380 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3381 while let Some(mat) = matches.peek() {
3382 let node = mat.captures[0].node;
3383 let start = Point::from_ts_point(node.start_position());
3384 let end = Point::from_ts_point(node.end_position());
3385 let range = start..end;
3386 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3387 Ok(ix) | Err(ix) => ix,
3388 };
3389 let mut end_ix = ix;
3390 while let Some(existing_range) = error_ranges.get(end_ix) {
3391 if existing_range.end < end {
3392 end_ix += 1;
3393 } else {
3394 break;
3395 }
3396 }
3397 error_ranges.splice(ix..end_ix, [range]);
3398 matches.advance();
3399 }
3400
3401 outdent_positions.sort();
3402 for outdent_position in outdent_positions {
3403 // find the innermost indent range containing this outdent_position
3404 // set its end to the outdent position
3405 if let Some(range_to_truncate) = indent_ranges
3406 .iter_mut()
3407 .rfind(|indent_range| indent_range.contains(&outdent_position))
3408 {
3409 range_to_truncate.end = outdent_position;
3410 }
3411 }
3412
3413 start_positions.sort_by_key(|b| b.start);
3414
3415 // Find the suggested indentation increases and decreased based on regexes.
3416 let mut regex_outdent_map = HashMap::default();
3417 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3418 let mut start_positions_iter = start_positions.iter().peekable();
3419
3420 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3421 self.for_each_line(
3422 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3423 ..Point::new(row_range.end, 0),
3424 |row, line| {
3425 let indent_len = self.indent_size_for_line(row).len;
3426 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3427 let row_language_config = row_language
3428 .as_ref()
3429 .map(|lang| lang.config())
3430 .unwrap_or(config);
3431
3432 if row_language_config
3433 .decrease_indent_pattern
3434 .as_ref()
3435 .is_some_and(|regex| regex.is_match(line))
3436 {
3437 indent_change_rows.push((row, Ordering::Less));
3438 }
3439 if row_language_config
3440 .increase_indent_pattern
3441 .as_ref()
3442 .is_some_and(|regex| regex.is_match(line))
3443 {
3444 indent_change_rows.push((row + 1, Ordering::Greater));
3445 }
3446 while let Some(pos) = start_positions_iter.peek() {
3447 if pos.start.row < row {
3448 let pos = start_positions_iter.next().unwrap().clone();
3449 last_seen_suffix
3450 .entry(pos.suffix.to_string())
3451 .or_default()
3452 .push(pos);
3453 } else {
3454 break;
3455 }
3456 }
3457 for rule in &row_language_config.decrease_indent_patterns {
3458 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3459 let row_start_column = self.indent_size_for_line(row).len;
3460 let basis_row = rule
3461 .valid_after
3462 .iter()
3463 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3464 .flatten()
3465 .filter(|pos| {
3466 row_language
3467 .as_ref()
3468 .or(self.language.as_ref())
3469 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3470 })
3471 .filter(|pos| pos.start.column <= row_start_column)
3472 .max_by_key(|pos| pos.start.row);
3473 if let Some(outdent_to) = basis_row {
3474 regex_outdent_map.insert(row, outdent_to.start.row);
3475 }
3476 break;
3477 }
3478 }
3479 },
3480 );
3481
3482 let mut indent_changes = indent_change_rows.into_iter().peekable();
3483 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3484 prev_non_blank_row.unwrap_or(0)
3485 } else {
3486 row_range.start.saturating_sub(1)
3487 };
3488
3489 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3490 Some(row_range.map(move |row| {
3491 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3492
3493 let mut indent_from_prev_row = false;
3494 let mut outdent_from_prev_row = false;
3495 let mut outdent_to_row = u32::MAX;
3496 let mut from_regex = false;
3497
3498 while let Some((indent_row, delta)) = indent_changes.peek() {
3499 match indent_row.cmp(&row) {
3500 Ordering::Equal => match delta {
3501 Ordering::Less => {
3502 from_regex = true;
3503 outdent_from_prev_row = true
3504 }
3505 Ordering::Greater => {
3506 indent_from_prev_row = true;
3507 from_regex = true
3508 }
3509 _ => {}
3510 },
3511
3512 Ordering::Greater => break,
3513 Ordering::Less => {}
3514 }
3515
3516 indent_changes.next();
3517 }
3518
3519 for range in &indent_ranges {
3520 if range.start.row >= row {
3521 break;
3522 }
3523 if range.start.row == prev_row && range.end > row_start {
3524 indent_from_prev_row = true;
3525 }
3526 if range.end > prev_row_start && range.end <= row_start {
3527 outdent_to_row = outdent_to_row.min(range.start.row);
3528 }
3529 }
3530
3531 if let Some(basis_row) = regex_outdent_map.get(&row) {
3532 indent_from_prev_row = false;
3533 outdent_to_row = *basis_row;
3534 from_regex = true;
3535 }
3536
3537 let within_error = error_ranges
3538 .iter()
3539 .any(|e| e.start.row < row && e.end > row_start);
3540
3541 let suggestion = if outdent_to_row == prev_row
3542 || (outdent_from_prev_row && indent_from_prev_row)
3543 {
3544 Some(IndentSuggestion {
3545 basis_row: prev_row,
3546 delta: Ordering::Equal,
3547 within_error: within_error && !from_regex,
3548 })
3549 } else if indent_from_prev_row {
3550 Some(IndentSuggestion {
3551 basis_row: prev_row,
3552 delta: Ordering::Greater,
3553 within_error: within_error && !from_regex,
3554 })
3555 } else if outdent_to_row < prev_row {
3556 Some(IndentSuggestion {
3557 basis_row: outdent_to_row,
3558 delta: Ordering::Equal,
3559 within_error: within_error && !from_regex,
3560 })
3561 } else if outdent_from_prev_row {
3562 Some(IndentSuggestion {
3563 basis_row: prev_row,
3564 delta: Ordering::Less,
3565 within_error: within_error && !from_regex,
3566 })
3567 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3568 {
3569 Some(IndentSuggestion {
3570 basis_row: prev_row,
3571 delta: Ordering::Equal,
3572 within_error: within_error && !from_regex,
3573 })
3574 } else {
3575 None
3576 };
3577
3578 prev_row = row;
3579 prev_row_start = row_start;
3580 suggestion
3581 }))
3582 }
3583
3584 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3585 while row > 0 {
3586 row -= 1;
3587 if !self.is_line_blank(row) {
3588 return Some(row);
3589 }
3590 }
3591 None
3592 }
3593
3594 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3595 let captures = self.syntax.captures(range, &self.text, |grammar| {
3596 grammar
3597 .highlights_config
3598 .as_ref()
3599 .map(|config| &config.query)
3600 });
3601 let highlight_maps = captures
3602 .grammars()
3603 .iter()
3604 .map(|grammar| grammar.highlight_map())
3605 .collect();
3606 (captures, highlight_maps)
3607 }
3608
3609 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3610 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3611 /// returned in chunks where each chunk has a single syntax highlighting style and
3612 /// diagnostic status.
3613 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3614 let range = range.start.to_offset(self)..range.end.to_offset(self);
3615
3616 let mut syntax = None;
3617 if language_aware {
3618 syntax = Some(self.get_highlights(range.clone()));
3619 }
3620 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3621 let diagnostics = language_aware;
3622 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3623 }
3624
3625 pub fn highlighted_text_for_range<T: ToOffset>(
3626 &self,
3627 range: Range<T>,
3628 override_style: Option<HighlightStyle>,
3629 syntax_theme: &SyntaxTheme,
3630 ) -> HighlightedText {
3631 HighlightedText::from_buffer_range(
3632 range,
3633 &self.text,
3634 &self.syntax,
3635 override_style,
3636 syntax_theme,
3637 )
3638 }
3639
3640 /// Invokes the given callback for each line of text in the given range of the buffer.
3641 /// Uses callback to avoid allocating a string for each line.
3642 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3643 let mut line = String::new();
3644 let mut row = range.start.row;
3645 for chunk in self
3646 .as_rope()
3647 .chunks_in_range(range.to_offset(self))
3648 .chain(["\n"])
3649 {
3650 for (newline_ix, text) in chunk.split('\n').enumerate() {
3651 if newline_ix > 0 {
3652 callback(row, &line);
3653 row += 1;
3654 line.clear();
3655 }
3656 line.push_str(text);
3657 }
3658 }
3659 }
3660
3661 /// Iterates over every [`SyntaxLayer`] in the buffer.
3662 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3663 self.syntax_layers_for_range(0..self.len(), true)
3664 }
3665
3666 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3667 let offset = position.to_offset(self);
3668 self.syntax_layers_for_range(offset..offset, false)
3669 .filter(|l| {
3670 if let Some(ranges) = l.included_sub_ranges {
3671 ranges.iter().any(|range| {
3672 let start = range.start.to_offset(self);
3673 start <= offset && {
3674 let end = range.end.to_offset(self);
3675 offset < end
3676 }
3677 })
3678 } else {
3679 l.node().start_byte() <= offset && l.node().end_byte() > offset
3680 }
3681 })
3682 .last()
3683 }
3684
3685 pub fn syntax_layers_for_range<D: ToOffset>(
3686 &self,
3687 range: Range<D>,
3688 include_hidden: bool,
3689 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3690 self.syntax
3691 .layers_for_range(range, &self.text, include_hidden)
3692 }
3693
3694 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3695 &self,
3696 range: Range<D>,
3697 ) -> Option<SyntaxLayer<'_>> {
3698 let range = range.to_offset(self);
3699 self.syntax
3700 .layers_for_range(range, &self.text, false)
3701 .max_by(|a, b| {
3702 if a.depth != b.depth {
3703 a.depth.cmp(&b.depth)
3704 } else if a.offset.0 != b.offset.0 {
3705 a.offset.0.cmp(&b.offset.0)
3706 } else {
3707 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3708 }
3709 })
3710 }
3711
3712 /// Returns the main [`Language`].
3713 pub fn language(&self) -> Option<&Arc<Language>> {
3714 self.language.as_ref()
3715 }
3716
3717 /// Returns the [`Language`] at the given location.
3718 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3719 self.syntax_layer_at(position)
3720 .map(|info| info.language)
3721 .or(self.language.as_ref())
3722 }
3723
3724 /// Returns the settings for the language at the given location.
3725 pub fn settings_at<'a, D: ToOffset>(
3726 &'a self,
3727 position: D,
3728 cx: &'a App,
3729 ) -> Cow<'a, LanguageSettings> {
3730 language_settings(
3731 self.language_at(position).map(|l| l.name()),
3732 self.file.as_ref(),
3733 cx,
3734 )
3735 }
3736
3737 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3738 CharClassifier::new(self.language_scope_at(point))
3739 }
3740
3741 /// Returns the [`LanguageScope`] at the given location.
3742 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3743 let offset = position.to_offset(self);
3744 let mut scope = None;
3745 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3746
3747 // Use the layer that has the smallest node intersecting the given point.
3748 for layer in self
3749 .syntax
3750 .layers_for_range(offset..offset, &self.text, false)
3751 {
3752 let mut cursor = layer.node().walk();
3753
3754 let mut range = None;
3755 loop {
3756 let child_range = cursor.node().byte_range();
3757 if !child_range.contains(&offset) {
3758 break;
3759 }
3760
3761 range = Some(child_range);
3762 if cursor.goto_first_child_for_byte(offset).is_none() {
3763 break;
3764 }
3765 }
3766
3767 if let Some(range) = range
3768 && smallest_range_and_depth.as_ref().is_none_or(
3769 |(smallest_range, smallest_range_depth)| {
3770 if layer.depth > *smallest_range_depth {
3771 true
3772 } else if layer.depth == *smallest_range_depth {
3773 range.len() < smallest_range.len()
3774 } else {
3775 false
3776 }
3777 },
3778 )
3779 {
3780 smallest_range_and_depth = Some((range, layer.depth));
3781 scope = Some(LanguageScope {
3782 language: layer.language.clone(),
3783 override_id: layer.override_id(offset, &self.text),
3784 });
3785 }
3786 }
3787
3788 scope.or_else(|| {
3789 self.language.clone().map(|language| LanguageScope {
3790 language,
3791 override_id: None,
3792 })
3793 })
3794 }
3795
3796 /// Returns a tuple of the range and character kind of the word
3797 /// surrounding the given position.
3798 pub fn surrounding_word<T: ToOffset>(
3799 &self,
3800 start: T,
3801 scope_context: Option<CharScopeContext>,
3802 ) -> (Range<usize>, Option<CharKind>) {
3803 let mut start = start.to_offset(self);
3804 let mut end = start;
3805 let mut next_chars = self.chars_at(start).take(128).peekable();
3806 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3807
3808 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3809 let word_kind = cmp::max(
3810 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3811 next_chars.peek().copied().map(|c| classifier.kind(c)),
3812 );
3813
3814 for ch in prev_chars {
3815 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3816 start -= ch.len_utf8();
3817 } else {
3818 break;
3819 }
3820 }
3821
3822 for ch in next_chars {
3823 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3824 end += ch.len_utf8();
3825 } else {
3826 break;
3827 }
3828 }
3829
3830 (start..end, word_kind)
3831 }
3832
3833 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3834 /// range. When `require_larger` is true, the node found must be larger than the query range.
3835 ///
3836 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3837 /// be moved to the root of the tree.
3838 fn goto_node_enclosing_range(
3839 cursor: &mut tree_sitter::TreeCursor,
3840 query_range: &Range<usize>,
3841 require_larger: bool,
3842 ) -> bool {
3843 let mut ascending = false;
3844 loop {
3845 let mut range = cursor.node().byte_range();
3846 if query_range.is_empty() {
3847 // When the query range is empty and the current node starts after it, move to the
3848 // previous sibling to find the node the containing node.
3849 if range.start > query_range.start {
3850 cursor.goto_previous_sibling();
3851 range = cursor.node().byte_range();
3852 }
3853 } else {
3854 // When the query range is non-empty and the current node ends exactly at the start,
3855 // move to the next sibling to find a node that extends beyond the start.
3856 if range.end == query_range.start {
3857 cursor.goto_next_sibling();
3858 range = cursor.node().byte_range();
3859 }
3860 }
3861
3862 let encloses = range.contains_inclusive(query_range)
3863 && (!require_larger || range.len() > query_range.len());
3864 if !encloses {
3865 ascending = true;
3866 if !cursor.goto_parent() {
3867 return false;
3868 }
3869 continue;
3870 } else if ascending {
3871 return true;
3872 }
3873
3874 // Descend into the current node.
3875 if cursor
3876 .goto_first_child_for_byte(query_range.start)
3877 .is_none()
3878 {
3879 return true;
3880 }
3881 }
3882 }
3883
3884 pub fn syntax_ancestor<'a, T: ToOffset>(
3885 &'a self,
3886 range: Range<T>,
3887 ) -> Option<tree_sitter::Node<'a>> {
3888 let range = range.start.to_offset(self)..range.end.to_offset(self);
3889 let mut result: Option<tree_sitter::Node<'a>> = None;
3890 for layer in self
3891 .syntax
3892 .layers_for_range(range.clone(), &self.text, true)
3893 {
3894 let mut cursor = layer.node().walk();
3895
3896 // Find the node that both contains the range and is larger than it.
3897 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3898 continue;
3899 }
3900
3901 let left_node = cursor.node();
3902 let mut layer_result = left_node;
3903
3904 // For an empty range, try to find another node immediately to the right of the range.
3905 if left_node.end_byte() == range.start {
3906 let mut right_node = None;
3907 while !cursor.goto_next_sibling() {
3908 if !cursor.goto_parent() {
3909 break;
3910 }
3911 }
3912
3913 while cursor.node().start_byte() == range.start {
3914 right_node = Some(cursor.node());
3915 if !cursor.goto_first_child() {
3916 break;
3917 }
3918 }
3919
3920 // If there is a candidate node on both sides of the (empty) range, then
3921 // decide between the two by favoring a named node over an anonymous token.
3922 // If both nodes are the same in that regard, favor the right one.
3923 if let Some(right_node) = right_node
3924 && (right_node.is_named() || !left_node.is_named())
3925 {
3926 layer_result = right_node;
3927 }
3928 }
3929
3930 if let Some(previous_result) = &result
3931 && previous_result.byte_range().len() < layer_result.byte_range().len()
3932 {
3933 continue;
3934 }
3935 result = Some(layer_result);
3936 }
3937
3938 result
3939 }
3940
3941 /// Find the previous sibling syntax node at the given range.
3942 ///
3943 /// This function locates the syntax node that precedes the node containing
3944 /// the given range. It searches hierarchically by:
3945 /// 1. Finding the node that contains the given range
3946 /// 2. Looking for the previous sibling at the same tree level
3947 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3948 ///
3949 /// Returns `None` if there is no previous sibling at any ancestor level.
3950 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3951 &'a self,
3952 range: Range<T>,
3953 ) -> Option<tree_sitter::Node<'a>> {
3954 let range = range.start.to_offset(self)..range.end.to_offset(self);
3955 let mut result: Option<tree_sitter::Node<'a>> = None;
3956
3957 for layer in self
3958 .syntax
3959 .layers_for_range(range.clone(), &self.text, true)
3960 {
3961 let mut cursor = layer.node().walk();
3962
3963 // Find the node that contains the range
3964 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3965 continue;
3966 }
3967
3968 // Look for the previous sibling, moving up ancestor levels if needed
3969 loop {
3970 if cursor.goto_previous_sibling() {
3971 let layer_result = cursor.node();
3972
3973 if let Some(previous_result) = &result {
3974 if previous_result.byte_range().end < layer_result.byte_range().end {
3975 continue;
3976 }
3977 }
3978 result = Some(layer_result);
3979 break;
3980 }
3981
3982 // No sibling found at this level, try moving up to parent
3983 if !cursor.goto_parent() {
3984 break;
3985 }
3986 }
3987 }
3988
3989 result
3990 }
3991
3992 /// Find the next sibling syntax node at the given range.
3993 ///
3994 /// This function locates the syntax node that follows the node containing
3995 /// the given range. It searches hierarchically by:
3996 /// 1. Finding the node that contains the given range
3997 /// 2. Looking for the next sibling at the same tree level
3998 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3999 ///
4000 /// Returns `None` if there is no next sibling at any ancestor level.
4001 pub fn syntax_next_sibling<'a, T: ToOffset>(
4002 &'a self,
4003 range: Range<T>,
4004 ) -> Option<tree_sitter::Node<'a>> {
4005 let range = range.start.to_offset(self)..range.end.to_offset(self);
4006 let mut result: Option<tree_sitter::Node<'a>> = None;
4007
4008 for layer in self
4009 .syntax
4010 .layers_for_range(range.clone(), &self.text, true)
4011 {
4012 let mut cursor = layer.node().walk();
4013
4014 // Find the node that contains the range
4015 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4016 continue;
4017 }
4018
4019 // Look for the next sibling, moving up ancestor levels if needed
4020 loop {
4021 if cursor.goto_next_sibling() {
4022 let layer_result = cursor.node();
4023
4024 if let Some(previous_result) = &result {
4025 if previous_result.byte_range().start > layer_result.byte_range().start {
4026 continue;
4027 }
4028 }
4029 result = Some(layer_result);
4030 break;
4031 }
4032
4033 // No sibling found at this level, try moving up to parent
4034 if !cursor.goto_parent() {
4035 break;
4036 }
4037 }
4038 }
4039
4040 result
4041 }
4042
4043 /// Returns the root syntax node within the given row
4044 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4045 let start_offset = position.to_offset(self);
4046
4047 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4048
4049 let layer = self
4050 .syntax
4051 .layers_for_range(start_offset..start_offset, &self.text, true)
4052 .next()?;
4053
4054 let mut cursor = layer.node().walk();
4055
4056 // Descend to the first leaf that touches the start of the range.
4057 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4058 if cursor.node().end_byte() == start_offset {
4059 cursor.goto_next_sibling();
4060 }
4061 }
4062
4063 // Ascend to the root node within the same row.
4064 while cursor.goto_parent() {
4065 if cursor.node().start_position().row != row {
4066 break;
4067 }
4068 }
4069
4070 Some(cursor.node())
4071 }
4072
4073 /// Returns the outline for the buffer.
4074 ///
4075 /// This method allows passing an optional [`SyntaxTheme`] to
4076 /// syntax-highlight the returned symbols.
4077 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4078 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4079 }
4080
4081 /// Returns all the symbols that contain the given position.
4082 ///
4083 /// This method allows passing an optional [`SyntaxTheme`] to
4084 /// syntax-highlight the returned symbols.
4085 pub fn symbols_containing<T: ToOffset>(
4086 &self,
4087 position: T,
4088 theme: Option<&SyntaxTheme>,
4089 ) -> Vec<OutlineItem<Anchor>> {
4090 let position = position.to_offset(self);
4091 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4092 let end = self.clip_offset(position + 1, Bias::Right);
4093 let mut items = self.outline_items_containing(start..end, false, theme);
4094 let mut prev_depth = None;
4095 items.retain(|item| {
4096 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4097 prev_depth = Some(item.depth);
4098 result
4099 });
4100 items
4101 }
4102
4103 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4104 let range = range.to_offset(self);
4105 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4106 grammar.outline_config.as_ref().map(|c| &c.query)
4107 });
4108 let configs = matches
4109 .grammars()
4110 .iter()
4111 .map(|g| g.outline_config.as_ref().unwrap())
4112 .collect::<Vec<_>>();
4113
4114 while let Some(mat) = matches.peek() {
4115 let config = &configs[mat.grammar_index];
4116 let containing_item_node = maybe!({
4117 let item_node = mat.captures.iter().find_map(|cap| {
4118 if cap.index == config.item_capture_ix {
4119 Some(cap.node)
4120 } else {
4121 None
4122 }
4123 })?;
4124
4125 let item_byte_range = item_node.byte_range();
4126 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4127 None
4128 } else {
4129 Some(item_node)
4130 }
4131 });
4132
4133 if let Some(item_node) = containing_item_node {
4134 return Some(
4135 Point::from_ts_point(item_node.start_position())
4136 ..Point::from_ts_point(item_node.end_position()),
4137 );
4138 }
4139
4140 matches.advance();
4141 }
4142 None
4143 }
4144
4145 pub fn outline_items_containing<T: ToOffset>(
4146 &self,
4147 range: Range<T>,
4148 include_extra_context: bool,
4149 theme: Option<&SyntaxTheme>,
4150 ) -> Vec<OutlineItem<Anchor>> {
4151 self.outline_items_containing_internal(
4152 range,
4153 include_extra_context,
4154 theme,
4155 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4156 )
4157 }
4158
4159 pub fn outline_items_as_points_containing<T: ToOffset>(
4160 &self,
4161 range: Range<T>,
4162 include_extra_context: bool,
4163 theme: Option<&SyntaxTheme>,
4164 ) -> Vec<OutlineItem<Point>> {
4165 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4166 range
4167 })
4168 }
4169
4170 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4171 &self,
4172 range: Range<T>,
4173 include_extra_context: bool,
4174 theme: Option<&SyntaxTheme>,
4175 ) -> Vec<OutlineItem<usize>> {
4176 self.outline_items_containing_internal(
4177 range,
4178 include_extra_context,
4179 theme,
4180 |buffer, range| range.to_offset(buffer),
4181 )
4182 }
4183
4184 fn outline_items_containing_internal<T: ToOffset, U>(
4185 &self,
4186 range: Range<T>,
4187 include_extra_context: bool,
4188 theme: Option<&SyntaxTheme>,
4189 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4190 ) -> Vec<OutlineItem<U>> {
4191 let range = range.to_offset(self);
4192 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4193 grammar.outline_config.as_ref().map(|c| &c.query)
4194 });
4195
4196 let mut items = Vec::new();
4197 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4198 while let Some(mat) = matches.peek() {
4199 let config = matches.grammars()[mat.grammar_index]
4200 .outline_config
4201 .as_ref()
4202 .unwrap();
4203 if let Some(item) =
4204 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4205 {
4206 items.push(item);
4207 } else if let Some(capture) = mat
4208 .captures
4209 .iter()
4210 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4211 {
4212 let capture_range = capture.node.start_position()..capture.node.end_position();
4213 let mut capture_row_range =
4214 capture_range.start.row as u32..capture_range.end.row as u32;
4215 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4216 {
4217 capture_row_range.end -= 1;
4218 }
4219 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4220 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4221 last_row_range.end = capture_row_range.end;
4222 } else {
4223 annotation_row_ranges.push(capture_row_range);
4224 }
4225 } else {
4226 annotation_row_ranges.push(capture_row_range);
4227 }
4228 }
4229 matches.advance();
4230 }
4231
4232 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4233
4234 // Assign depths based on containment relationships and convert to anchors.
4235 let mut item_ends_stack = Vec::<Point>::new();
4236 let mut anchor_items = Vec::new();
4237 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4238 for item in items {
4239 while let Some(last_end) = item_ends_stack.last().copied() {
4240 if last_end < item.range.end {
4241 item_ends_stack.pop();
4242 } else {
4243 break;
4244 }
4245 }
4246
4247 let mut annotation_row_range = None;
4248 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4249 let row_preceding_item = item.range.start.row.saturating_sub(1);
4250 if next_annotation_row_range.end < row_preceding_item {
4251 annotation_row_ranges.next();
4252 } else {
4253 if next_annotation_row_range.end == row_preceding_item {
4254 annotation_row_range = Some(next_annotation_row_range.clone());
4255 annotation_row_ranges.next();
4256 }
4257 break;
4258 }
4259 }
4260
4261 anchor_items.push(OutlineItem {
4262 depth: item_ends_stack.len(),
4263 range: range_callback(self, item.range.clone()),
4264 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4265 text: item.text,
4266 highlight_ranges: item.highlight_ranges,
4267 name_ranges: item.name_ranges,
4268 body_range: item.body_range.map(|r| range_callback(self, r)),
4269 annotation_range: annotation_row_range.map(|annotation_range| {
4270 let point_range = Point::new(annotation_range.start, 0)
4271 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4272 range_callback(self, point_range)
4273 }),
4274 });
4275 item_ends_stack.push(item.range.end);
4276 }
4277
4278 anchor_items
4279 }
4280
4281 fn next_outline_item(
4282 &self,
4283 config: &OutlineConfig,
4284 mat: &SyntaxMapMatch,
4285 range: &Range<usize>,
4286 include_extra_context: bool,
4287 theme: Option<&SyntaxTheme>,
4288 ) -> Option<OutlineItem<Point>> {
4289 let item_node = mat.captures.iter().find_map(|cap| {
4290 if cap.index == config.item_capture_ix {
4291 Some(cap.node)
4292 } else {
4293 None
4294 }
4295 })?;
4296
4297 let item_byte_range = item_node.byte_range();
4298 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4299 return None;
4300 }
4301 let item_point_range = Point::from_ts_point(item_node.start_position())
4302 ..Point::from_ts_point(item_node.end_position());
4303
4304 let mut open_point = None;
4305 let mut close_point = None;
4306
4307 let mut buffer_ranges = Vec::new();
4308 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4309 let mut range = node.start_byte()..node.end_byte();
4310 let start = node.start_position();
4311 if node.end_position().row > start.row {
4312 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4313 }
4314
4315 if !range.is_empty() {
4316 buffer_ranges.push((range, node_is_name));
4317 }
4318 };
4319
4320 for capture in mat.captures {
4321 if capture.index == config.name_capture_ix {
4322 add_to_buffer_ranges(capture.node, true);
4323 } else if Some(capture.index) == config.context_capture_ix
4324 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4325 {
4326 add_to_buffer_ranges(capture.node, false);
4327 } else {
4328 if Some(capture.index) == config.open_capture_ix {
4329 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4330 } else if Some(capture.index) == config.close_capture_ix {
4331 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4332 }
4333 }
4334 }
4335
4336 if buffer_ranges.is_empty() {
4337 return None;
4338 }
4339 let source_range_for_text =
4340 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4341
4342 let mut text = String::new();
4343 let mut highlight_ranges = Vec::new();
4344 let mut name_ranges = Vec::new();
4345 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4346 let mut last_buffer_range_end = 0;
4347 for (buffer_range, is_name) in buffer_ranges {
4348 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4349 if space_added {
4350 text.push(' ');
4351 }
4352 let before_append_len = text.len();
4353 let mut offset = buffer_range.start;
4354 chunks.seek(buffer_range.clone());
4355 for mut chunk in chunks.by_ref() {
4356 if chunk.text.len() > buffer_range.end - offset {
4357 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4358 offset = buffer_range.end;
4359 } else {
4360 offset += chunk.text.len();
4361 }
4362 let style = chunk
4363 .syntax_highlight_id
4364 .zip(theme)
4365 .and_then(|(highlight, theme)| highlight.style(theme));
4366 if let Some(style) = style {
4367 let start = text.len();
4368 let end = start + chunk.text.len();
4369 highlight_ranges.push((start..end, style));
4370 }
4371 text.push_str(chunk.text);
4372 if offset >= buffer_range.end {
4373 break;
4374 }
4375 }
4376 if is_name {
4377 let after_append_len = text.len();
4378 let start = if space_added && !name_ranges.is_empty() {
4379 before_append_len - 1
4380 } else {
4381 before_append_len
4382 };
4383 name_ranges.push(start..after_append_len);
4384 }
4385 last_buffer_range_end = buffer_range.end;
4386 }
4387
4388 Some(OutlineItem {
4389 depth: 0, // We'll calculate the depth later
4390 range: item_point_range,
4391 source_range_for_text: source_range_for_text.to_point(self),
4392 text,
4393 highlight_ranges,
4394 name_ranges,
4395 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4396 annotation_range: None,
4397 })
4398 }
4399
4400 pub fn function_body_fold_ranges<T: ToOffset>(
4401 &self,
4402 within: Range<T>,
4403 ) -> impl Iterator<Item = Range<usize>> + '_ {
4404 self.text_object_ranges(within, TreeSitterOptions::default())
4405 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4406 }
4407
4408 /// For each grammar in the language, runs the provided
4409 /// [`tree_sitter::Query`] against the given range.
4410 pub fn matches(
4411 &self,
4412 range: Range<usize>,
4413 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4414 ) -> SyntaxMapMatches<'_> {
4415 self.syntax.matches(range, self, query)
4416 }
4417
4418 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4419 /// Hence, may return more bracket pairs than the range contains.
4420 ///
4421 /// Will omit known chunks.
4422 /// The resulting bracket match collections are not ordered.
4423 pub fn fetch_bracket_ranges(
4424 &self,
4425 range: Range<usize>,
4426 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4427 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4428 let mut all_bracket_matches = HashMap::default();
4429
4430 for chunk in self
4431 .tree_sitter_data
4432 .chunks
4433 .applicable_chunks(&[range.to_point(self)])
4434 {
4435 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4436 continue;
4437 }
4438 let chunk_range = chunk.anchor_range();
4439 let chunk_range = chunk_range.to_offset(&self);
4440
4441 if let Some(cached_brackets) =
4442 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4443 {
4444 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4445 continue;
4446 }
4447
4448 let mut all_brackets = Vec::new();
4449 let mut opens = Vec::new();
4450 let mut color_pairs = Vec::new();
4451
4452 let mut matches = self.syntax.matches_with_options(
4453 chunk_range.clone(),
4454 &self.text,
4455 TreeSitterOptions {
4456 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4457 max_start_depth: None,
4458 },
4459 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4460 );
4461 let configs = matches
4462 .grammars()
4463 .iter()
4464 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4465 .collect::<Vec<_>>();
4466
4467 while let Some(mat) = matches.peek() {
4468 let mut open = None;
4469 let mut close = None;
4470 let syntax_layer_depth = mat.depth;
4471 let config = configs[mat.grammar_index];
4472 let pattern = &config.patterns[mat.pattern_index];
4473 for capture in mat.captures {
4474 if capture.index == config.open_capture_ix {
4475 open = Some(capture.node.byte_range());
4476 } else if capture.index == config.close_capture_ix {
4477 close = Some(capture.node.byte_range());
4478 }
4479 }
4480
4481 matches.advance();
4482
4483 let Some((open_range, close_range)) = open.zip(close) else {
4484 continue;
4485 };
4486
4487 let bracket_range = open_range.start..=close_range.end;
4488 if !bracket_range.overlaps(&chunk_range) {
4489 continue;
4490 }
4491
4492 let index = all_brackets.len();
4493 all_brackets.push(BracketMatch {
4494 open_range: open_range.clone(),
4495 close_range: close_range.clone(),
4496 newline_only: pattern.newline_only,
4497 syntax_layer_depth,
4498 color_index: None,
4499 });
4500
4501 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4502 // bracket will match the entire tag with all text inside.
4503 // For now, avoid highlighting any pair that has more than single char in each bracket.
4504 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4505 let should_color =
4506 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4507 if should_color {
4508 opens.push(open_range.clone());
4509 color_pairs.push((open_range, close_range, index));
4510 }
4511 }
4512
4513 opens.sort_by_key(|r| (r.start, r.end));
4514 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4515 color_pairs.sort_by_key(|(_, close, _)| close.end);
4516
4517 let mut open_stack = Vec::new();
4518 let mut open_index = 0;
4519 for (open, close, index) in color_pairs {
4520 while open_index < opens.len() && opens[open_index].start < close.start {
4521 open_stack.push(opens[open_index].clone());
4522 open_index += 1;
4523 }
4524
4525 if open_stack.last() == Some(&open) {
4526 let depth_index = open_stack.len() - 1;
4527 all_brackets[index].color_index = Some(depth_index);
4528 open_stack.pop();
4529 }
4530 }
4531
4532 all_brackets.sort_by_key(|bracket_match| {
4533 (bracket_match.open_range.start, bracket_match.open_range.end)
4534 });
4535
4536 if let empty_slot @ None =
4537 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4538 {
4539 *empty_slot = Some(all_brackets.clone());
4540 }
4541 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4542 }
4543
4544 all_bracket_matches
4545 }
4546
4547 pub fn all_bracket_ranges(
4548 &self,
4549 range: Range<usize>,
4550 ) -> impl Iterator<Item = BracketMatch<usize>> {
4551 self.fetch_bracket_ranges(range.clone(), None)
4552 .into_values()
4553 .flatten()
4554 .filter(move |bracket_match| {
4555 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4556 bracket_range.overlaps(&range)
4557 })
4558 }
4559
4560 /// Returns bracket range pairs overlapping or adjacent to `range`
4561 pub fn bracket_ranges<T: ToOffset>(
4562 &self,
4563 range: Range<T>,
4564 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4565 // Find bracket pairs that *inclusively* contain the given range.
4566 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4567 self.all_bracket_ranges(range)
4568 .filter(|pair| !pair.newline_only)
4569 }
4570
4571 pub fn debug_variables_query<T: ToOffset>(
4572 &self,
4573 range: Range<T>,
4574 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4575 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4576
4577 let mut matches = self.syntax.matches_with_options(
4578 range.clone(),
4579 &self.text,
4580 TreeSitterOptions::default(),
4581 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4582 );
4583
4584 let configs = matches
4585 .grammars()
4586 .iter()
4587 .map(|grammar| grammar.debug_variables_config.as_ref())
4588 .collect::<Vec<_>>();
4589
4590 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4591
4592 iter::from_fn(move || {
4593 loop {
4594 while let Some(capture) = captures.pop() {
4595 if capture.0.overlaps(&range) {
4596 return Some(capture);
4597 }
4598 }
4599
4600 let mat = matches.peek()?;
4601
4602 let Some(config) = configs[mat.grammar_index].as_ref() else {
4603 matches.advance();
4604 continue;
4605 };
4606
4607 for capture in mat.captures {
4608 let Some(ix) = config
4609 .objects_by_capture_ix
4610 .binary_search_by_key(&capture.index, |e| e.0)
4611 .ok()
4612 else {
4613 continue;
4614 };
4615 let text_object = config.objects_by_capture_ix[ix].1;
4616 let byte_range = capture.node.byte_range();
4617
4618 let mut found = false;
4619 for (range, existing) in captures.iter_mut() {
4620 if existing == &text_object {
4621 range.start = range.start.min(byte_range.start);
4622 range.end = range.end.max(byte_range.end);
4623 found = true;
4624 break;
4625 }
4626 }
4627
4628 if !found {
4629 captures.push((byte_range, text_object));
4630 }
4631 }
4632
4633 matches.advance();
4634 }
4635 })
4636 }
4637
4638 pub fn text_object_ranges<T: ToOffset>(
4639 &self,
4640 range: Range<T>,
4641 options: TreeSitterOptions,
4642 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4643 let range =
4644 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4645
4646 let mut matches =
4647 self.syntax
4648 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4649 grammar.text_object_config.as_ref().map(|c| &c.query)
4650 });
4651
4652 let configs = matches
4653 .grammars()
4654 .iter()
4655 .map(|grammar| grammar.text_object_config.as_ref())
4656 .collect::<Vec<_>>();
4657
4658 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4659
4660 iter::from_fn(move || {
4661 loop {
4662 while let Some(capture) = captures.pop() {
4663 if capture.0.overlaps(&range) {
4664 return Some(capture);
4665 }
4666 }
4667
4668 let mat = matches.peek()?;
4669
4670 let Some(config) = configs[mat.grammar_index].as_ref() else {
4671 matches.advance();
4672 continue;
4673 };
4674
4675 for capture in mat.captures {
4676 let Some(ix) = config
4677 .text_objects_by_capture_ix
4678 .binary_search_by_key(&capture.index, |e| e.0)
4679 .ok()
4680 else {
4681 continue;
4682 };
4683 let text_object = config.text_objects_by_capture_ix[ix].1;
4684 let byte_range = capture.node.byte_range();
4685
4686 let mut found = false;
4687 for (range, existing) in captures.iter_mut() {
4688 if existing == &text_object {
4689 range.start = range.start.min(byte_range.start);
4690 range.end = range.end.max(byte_range.end);
4691 found = true;
4692 break;
4693 }
4694 }
4695
4696 if !found {
4697 captures.push((byte_range, text_object));
4698 }
4699 }
4700
4701 matches.advance();
4702 }
4703 })
4704 }
4705
4706 /// Returns enclosing bracket ranges containing the given range
4707 pub fn enclosing_bracket_ranges<T: ToOffset>(
4708 &self,
4709 range: Range<T>,
4710 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4711 let range = range.start.to_offset(self)..range.end.to_offset(self);
4712
4713 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4714 let max_depth = result
4715 .iter()
4716 .map(|mat| mat.syntax_layer_depth)
4717 .max()
4718 .unwrap_or(0);
4719 result.into_iter().filter(move |pair| {
4720 pair.open_range.start <= range.start
4721 && pair.close_range.end >= range.end
4722 && pair.syntax_layer_depth == max_depth
4723 })
4724 }
4725
4726 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4727 ///
4728 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4729 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4730 &self,
4731 range: Range<T>,
4732 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4733 ) -> Option<(Range<usize>, Range<usize>)> {
4734 let range = range.start.to_offset(self)..range.end.to_offset(self);
4735
4736 // Get the ranges of the innermost pair of brackets.
4737 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4738
4739 for pair in self.enclosing_bracket_ranges(range) {
4740 if let Some(range_filter) = range_filter
4741 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4742 {
4743 continue;
4744 }
4745
4746 let len = pair.close_range.end - pair.open_range.start;
4747
4748 if let Some((existing_open, existing_close)) = &result {
4749 let existing_len = existing_close.end - existing_open.start;
4750 if len > existing_len {
4751 continue;
4752 }
4753 }
4754
4755 result = Some((pair.open_range, pair.close_range));
4756 }
4757
4758 result
4759 }
4760
4761 /// Returns anchor ranges for any matches of the redaction query.
4762 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4763 /// will be run on the relevant section of the buffer.
4764 pub fn redacted_ranges<T: ToOffset>(
4765 &self,
4766 range: Range<T>,
4767 ) -> impl Iterator<Item = Range<usize>> + '_ {
4768 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4769 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4770 grammar
4771 .redactions_config
4772 .as_ref()
4773 .map(|config| &config.query)
4774 });
4775
4776 let configs = syntax_matches
4777 .grammars()
4778 .iter()
4779 .map(|grammar| grammar.redactions_config.as_ref())
4780 .collect::<Vec<_>>();
4781
4782 iter::from_fn(move || {
4783 let redacted_range = syntax_matches
4784 .peek()
4785 .and_then(|mat| {
4786 configs[mat.grammar_index].and_then(|config| {
4787 mat.captures
4788 .iter()
4789 .find(|capture| capture.index == config.redaction_capture_ix)
4790 })
4791 })
4792 .map(|mat| mat.node.byte_range());
4793 syntax_matches.advance();
4794 redacted_range
4795 })
4796 }
4797
4798 pub fn injections_intersecting_range<T: ToOffset>(
4799 &self,
4800 range: Range<T>,
4801 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4802 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4803
4804 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4805 grammar
4806 .injection_config
4807 .as_ref()
4808 .map(|config| &config.query)
4809 });
4810
4811 let configs = syntax_matches
4812 .grammars()
4813 .iter()
4814 .map(|grammar| grammar.injection_config.as_ref())
4815 .collect::<Vec<_>>();
4816
4817 iter::from_fn(move || {
4818 let ranges = syntax_matches.peek().and_then(|mat| {
4819 let config = &configs[mat.grammar_index]?;
4820 let content_capture_range = mat.captures.iter().find_map(|capture| {
4821 if capture.index == config.content_capture_ix {
4822 Some(capture.node.byte_range())
4823 } else {
4824 None
4825 }
4826 })?;
4827 let language = self.language_at(content_capture_range.start)?;
4828 Some((content_capture_range, language))
4829 });
4830 syntax_matches.advance();
4831 ranges
4832 })
4833 }
4834
4835 pub fn runnable_ranges(
4836 &self,
4837 offset_range: Range<usize>,
4838 ) -> impl Iterator<Item = RunnableRange> + '_ {
4839 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4840 grammar.runnable_config.as_ref().map(|config| &config.query)
4841 });
4842
4843 let test_configs = syntax_matches
4844 .grammars()
4845 .iter()
4846 .map(|grammar| grammar.runnable_config.as_ref())
4847 .collect::<Vec<_>>();
4848
4849 iter::from_fn(move || {
4850 loop {
4851 let mat = syntax_matches.peek()?;
4852
4853 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4854 let mut run_range = None;
4855 let full_range = mat.captures.iter().fold(
4856 Range {
4857 start: usize::MAX,
4858 end: 0,
4859 },
4860 |mut acc, next| {
4861 let byte_range = next.node.byte_range();
4862 if acc.start > byte_range.start {
4863 acc.start = byte_range.start;
4864 }
4865 if acc.end < byte_range.end {
4866 acc.end = byte_range.end;
4867 }
4868 acc
4869 },
4870 );
4871 if full_range.start > full_range.end {
4872 // We did not find a full spanning range of this match.
4873 return None;
4874 }
4875 let extra_captures: SmallVec<[_; 1]> =
4876 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4877 test_configs
4878 .extra_captures
4879 .get(capture.index as usize)
4880 .cloned()
4881 .and_then(|tag_name| match tag_name {
4882 RunnableCapture::Named(name) => {
4883 Some((capture.node.byte_range(), name))
4884 }
4885 RunnableCapture::Run => {
4886 let _ = run_range.insert(capture.node.byte_range());
4887 None
4888 }
4889 })
4890 }));
4891 let run_range = run_range?;
4892 let tags = test_configs
4893 .query
4894 .property_settings(mat.pattern_index)
4895 .iter()
4896 .filter_map(|property| {
4897 if *property.key == *"tag" {
4898 property
4899 .value
4900 .as_ref()
4901 .map(|value| RunnableTag(value.to_string().into()))
4902 } else {
4903 None
4904 }
4905 })
4906 .collect();
4907 let extra_captures = extra_captures
4908 .into_iter()
4909 .map(|(range, name)| {
4910 (
4911 name.to_string(),
4912 self.text_for_range(range).collect::<String>(),
4913 )
4914 })
4915 .collect();
4916 // All tags should have the same range.
4917 Some(RunnableRange {
4918 run_range,
4919 full_range,
4920 runnable: Runnable {
4921 tags,
4922 language: mat.language,
4923 buffer: self.remote_id(),
4924 },
4925 extra_captures,
4926 buffer_id: self.remote_id(),
4927 })
4928 });
4929
4930 syntax_matches.advance();
4931 if test_range.is_some() {
4932 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4933 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4934 return test_range;
4935 }
4936 }
4937 })
4938 }
4939
4940 /// Returns selections for remote peers intersecting the given range.
4941 #[allow(clippy::type_complexity)]
4942 pub fn selections_in_range(
4943 &self,
4944 range: Range<Anchor>,
4945 include_local: bool,
4946 ) -> impl Iterator<
4947 Item = (
4948 ReplicaId,
4949 bool,
4950 CursorShape,
4951 impl Iterator<Item = &Selection<Anchor>> + '_,
4952 ),
4953 > + '_ {
4954 self.remote_selections
4955 .iter()
4956 .filter(move |(replica_id, set)| {
4957 (include_local || **replica_id != self.text.replica_id())
4958 && !set.selections.is_empty()
4959 })
4960 .map(move |(replica_id, set)| {
4961 let start_ix = match set.selections.binary_search_by(|probe| {
4962 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4963 }) {
4964 Ok(ix) | Err(ix) => ix,
4965 };
4966 let end_ix = match set.selections.binary_search_by(|probe| {
4967 probe.start.cmp(&range.end, self).then(Ordering::Less)
4968 }) {
4969 Ok(ix) | Err(ix) => ix,
4970 };
4971
4972 (
4973 *replica_id,
4974 set.line_mode,
4975 set.cursor_shape,
4976 set.selections[start_ix..end_ix].iter(),
4977 )
4978 })
4979 }
4980
4981 /// Returns if the buffer contains any diagnostics.
4982 pub fn has_diagnostics(&self) -> bool {
4983 !self.diagnostics.is_empty()
4984 }
4985
4986 /// Returns all the diagnostics intersecting the given range.
4987 pub fn diagnostics_in_range<'a, T, O>(
4988 &'a self,
4989 search_range: Range<T>,
4990 reversed: bool,
4991 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4992 where
4993 T: 'a + Clone + ToOffset,
4994 O: 'a + FromAnchor,
4995 {
4996 let mut iterators: Vec<_> = self
4997 .diagnostics
4998 .iter()
4999 .map(|(_, collection)| {
5000 collection
5001 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5002 .peekable()
5003 })
5004 .collect();
5005
5006 std::iter::from_fn(move || {
5007 let (next_ix, _) = iterators
5008 .iter_mut()
5009 .enumerate()
5010 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5011 .min_by(|(_, a), (_, b)| {
5012 let cmp = a
5013 .range
5014 .start
5015 .cmp(&b.range.start, self)
5016 // when range is equal, sort by diagnostic severity
5017 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5018 // and stabilize order with group_id
5019 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5020 if reversed { cmp.reverse() } else { cmp }
5021 })?;
5022 iterators[next_ix]
5023 .next()
5024 .map(
5025 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5026 diagnostic,
5027 range: FromAnchor::from_anchor(&range.start, self)
5028 ..FromAnchor::from_anchor(&range.end, self),
5029 },
5030 )
5031 })
5032 }
5033
5034 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5035 /// should be used instead.
5036 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5037 &self.diagnostics
5038 }
5039
5040 /// Returns all the diagnostic groups associated with the given
5041 /// language server ID. If no language server ID is provided,
5042 /// all diagnostics groups are returned.
5043 pub fn diagnostic_groups(
5044 &self,
5045 language_server_id: Option<LanguageServerId>,
5046 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5047 let mut groups = Vec::new();
5048
5049 if let Some(language_server_id) = language_server_id {
5050 if let Ok(ix) = self
5051 .diagnostics
5052 .binary_search_by_key(&language_server_id, |e| e.0)
5053 {
5054 self.diagnostics[ix]
5055 .1
5056 .groups(language_server_id, &mut groups, self);
5057 }
5058 } else {
5059 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5060 diagnostics.groups(*language_server_id, &mut groups, self);
5061 }
5062 }
5063
5064 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5065 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5066 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5067 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5068 });
5069
5070 groups
5071 }
5072
5073 /// Returns an iterator over the diagnostics for the given group.
5074 pub fn diagnostic_group<O>(
5075 &self,
5076 group_id: usize,
5077 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5078 where
5079 O: FromAnchor + 'static,
5080 {
5081 self.diagnostics
5082 .iter()
5083 .flat_map(move |(_, set)| set.group(group_id, self))
5084 }
5085
5086 /// An integer version number that accounts for all updates besides
5087 /// the buffer's text itself (which is versioned via a version vector).
5088 pub fn non_text_state_update_count(&self) -> usize {
5089 self.non_text_state_update_count
5090 }
5091
5092 /// An integer version that changes when the buffer's syntax changes.
5093 pub fn syntax_update_count(&self) -> usize {
5094 self.syntax.update_count()
5095 }
5096
5097 /// Returns a snapshot of underlying file.
5098 pub fn file(&self) -> Option<&Arc<dyn File>> {
5099 self.file.as_ref()
5100 }
5101
5102 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5103 if let Some(file) = self.file() {
5104 if file.path().file_name().is_none() || include_root {
5105 Some(file.full_path(cx).to_string_lossy().into_owned())
5106 } else {
5107 Some(file.path().display(file.path_style(cx)).to_string())
5108 }
5109 } else {
5110 None
5111 }
5112 }
5113
5114 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5115 let query_str = query.fuzzy_contents;
5116 if query_str.is_some_and(|query| query.is_empty()) {
5117 return BTreeMap::default();
5118 }
5119
5120 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5121 language,
5122 override_id: None,
5123 }));
5124
5125 let mut query_ix = 0;
5126 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5127 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5128
5129 let mut words = BTreeMap::default();
5130 let mut current_word_start_ix = None;
5131 let mut chunk_ix = query.range.start;
5132 for chunk in self.chunks(query.range, false) {
5133 for (i, c) in chunk.text.char_indices() {
5134 let ix = chunk_ix + i;
5135 if classifier.is_word(c) {
5136 if current_word_start_ix.is_none() {
5137 current_word_start_ix = Some(ix);
5138 }
5139
5140 if let Some(query_chars) = &query_chars
5141 && query_ix < query_len
5142 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5143 {
5144 query_ix += 1;
5145 }
5146 continue;
5147 } else if let Some(word_start) = current_word_start_ix.take()
5148 && query_ix == query_len
5149 {
5150 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5151 let mut word_text = self.text_for_range(word_start..ix).peekable();
5152 let first_char = word_text
5153 .peek()
5154 .and_then(|first_chunk| first_chunk.chars().next());
5155 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5156 if !query.skip_digits
5157 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5158 {
5159 words.insert(word_text.collect(), word_range);
5160 }
5161 }
5162 query_ix = 0;
5163 }
5164 chunk_ix += chunk.text.len();
5165 }
5166
5167 words
5168 }
5169}
5170
5171pub struct WordsQuery<'a> {
5172 /// Only returns words with all chars from the fuzzy string in them.
5173 pub fuzzy_contents: Option<&'a str>,
5174 /// Skips words that start with a digit.
5175 pub skip_digits: bool,
5176 /// Buffer offset range, to look for words.
5177 pub range: Range<usize>,
5178}
5179
5180fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5181 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5182}
5183
5184fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5185 let mut result = IndentSize::spaces(0);
5186 for c in text {
5187 let kind = match c {
5188 ' ' => IndentKind::Space,
5189 '\t' => IndentKind::Tab,
5190 _ => break,
5191 };
5192 if result.len == 0 {
5193 result.kind = kind;
5194 }
5195 result.len += 1;
5196 }
5197 result
5198}
5199
5200impl Clone for BufferSnapshot {
5201 fn clone(&self) -> Self {
5202 Self {
5203 text: self.text.clone(),
5204 syntax: self.syntax.clone(),
5205 file: self.file.clone(),
5206 remote_selections: self.remote_selections.clone(),
5207 diagnostics: self.diagnostics.clone(),
5208 language: self.language.clone(),
5209 tree_sitter_data: self.tree_sitter_data.clone(),
5210 non_text_state_update_count: self.non_text_state_update_count,
5211 capability: self.capability,
5212 }
5213 }
5214}
5215
5216impl Deref for BufferSnapshot {
5217 type Target = text::BufferSnapshot;
5218
5219 fn deref(&self) -> &Self::Target {
5220 &self.text
5221 }
5222}
5223
5224unsafe impl Send for BufferChunks<'_> {}
5225
5226impl<'a> BufferChunks<'a> {
5227 pub(crate) fn new(
5228 text: &'a Rope,
5229 range: Range<usize>,
5230 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5231 diagnostics: bool,
5232 buffer_snapshot: Option<&'a BufferSnapshot>,
5233 ) -> Self {
5234 let mut highlights = None;
5235 if let Some((captures, highlight_maps)) = syntax {
5236 highlights = Some(BufferChunkHighlights {
5237 captures,
5238 next_capture: None,
5239 stack: Default::default(),
5240 highlight_maps,
5241 })
5242 }
5243
5244 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5245 let chunks = text.chunks_in_range(range.clone());
5246
5247 let mut this = BufferChunks {
5248 range,
5249 buffer_snapshot,
5250 chunks,
5251 diagnostic_endpoints,
5252 error_depth: 0,
5253 warning_depth: 0,
5254 information_depth: 0,
5255 hint_depth: 0,
5256 unnecessary_depth: 0,
5257 underline: true,
5258 highlights,
5259 };
5260 this.initialize_diagnostic_endpoints();
5261 this
5262 }
5263
5264 /// Seeks to the given byte offset in the buffer.
5265 pub fn seek(&mut self, range: Range<usize>) {
5266 let old_range = std::mem::replace(&mut self.range, range.clone());
5267 self.chunks.set_range(self.range.clone());
5268 if let Some(highlights) = self.highlights.as_mut() {
5269 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5270 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5271 highlights
5272 .stack
5273 .retain(|(end_offset, _)| *end_offset > range.start);
5274 if let Some(capture) = &highlights.next_capture
5275 && range.start >= capture.node.start_byte()
5276 {
5277 let next_capture_end = capture.node.end_byte();
5278 if range.start < next_capture_end {
5279 highlights.stack.push((
5280 next_capture_end,
5281 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5282 ));
5283 }
5284 highlights.next_capture.take();
5285 }
5286 } else if let Some(snapshot) = self.buffer_snapshot {
5287 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5288 *highlights = BufferChunkHighlights {
5289 captures,
5290 next_capture: None,
5291 stack: Default::default(),
5292 highlight_maps,
5293 };
5294 } else {
5295 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5296 // Seeking such BufferChunks is not supported.
5297 debug_assert!(
5298 false,
5299 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5300 );
5301 }
5302
5303 highlights.captures.set_byte_range(self.range.clone());
5304 self.initialize_diagnostic_endpoints();
5305 }
5306 }
5307
5308 fn initialize_diagnostic_endpoints(&mut self) {
5309 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5310 && let Some(buffer) = self.buffer_snapshot
5311 {
5312 let mut diagnostic_endpoints = Vec::new();
5313 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5314 diagnostic_endpoints.push(DiagnosticEndpoint {
5315 offset: entry.range.start,
5316 is_start: true,
5317 severity: entry.diagnostic.severity,
5318 is_unnecessary: entry.diagnostic.is_unnecessary,
5319 underline: entry.diagnostic.underline,
5320 });
5321 diagnostic_endpoints.push(DiagnosticEndpoint {
5322 offset: entry.range.end,
5323 is_start: false,
5324 severity: entry.diagnostic.severity,
5325 is_unnecessary: entry.diagnostic.is_unnecessary,
5326 underline: entry.diagnostic.underline,
5327 });
5328 }
5329 diagnostic_endpoints
5330 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5331 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5332 self.hint_depth = 0;
5333 self.error_depth = 0;
5334 self.warning_depth = 0;
5335 self.information_depth = 0;
5336 }
5337 }
5338
5339 /// The current byte offset in the buffer.
5340 pub fn offset(&self) -> usize {
5341 self.range.start
5342 }
5343
5344 pub fn range(&self) -> Range<usize> {
5345 self.range.clone()
5346 }
5347
5348 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5349 let depth = match endpoint.severity {
5350 DiagnosticSeverity::ERROR => &mut self.error_depth,
5351 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5352 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5353 DiagnosticSeverity::HINT => &mut self.hint_depth,
5354 _ => return,
5355 };
5356 if endpoint.is_start {
5357 *depth += 1;
5358 } else {
5359 *depth -= 1;
5360 }
5361
5362 if endpoint.is_unnecessary {
5363 if endpoint.is_start {
5364 self.unnecessary_depth += 1;
5365 } else {
5366 self.unnecessary_depth -= 1;
5367 }
5368 }
5369 }
5370
5371 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5372 if self.error_depth > 0 {
5373 Some(DiagnosticSeverity::ERROR)
5374 } else if self.warning_depth > 0 {
5375 Some(DiagnosticSeverity::WARNING)
5376 } else if self.information_depth > 0 {
5377 Some(DiagnosticSeverity::INFORMATION)
5378 } else if self.hint_depth > 0 {
5379 Some(DiagnosticSeverity::HINT)
5380 } else {
5381 None
5382 }
5383 }
5384
5385 fn current_code_is_unnecessary(&self) -> bool {
5386 self.unnecessary_depth > 0
5387 }
5388}
5389
5390impl<'a> Iterator for BufferChunks<'a> {
5391 type Item = Chunk<'a>;
5392
5393 fn next(&mut self) -> Option<Self::Item> {
5394 let mut next_capture_start = usize::MAX;
5395 let mut next_diagnostic_endpoint = usize::MAX;
5396
5397 if let Some(highlights) = self.highlights.as_mut() {
5398 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5399 if *parent_capture_end <= self.range.start {
5400 highlights.stack.pop();
5401 } else {
5402 break;
5403 }
5404 }
5405
5406 if highlights.next_capture.is_none() {
5407 highlights.next_capture = highlights.captures.next();
5408 }
5409
5410 while let Some(capture) = highlights.next_capture.as_ref() {
5411 if self.range.start < capture.node.start_byte() {
5412 next_capture_start = capture.node.start_byte();
5413 break;
5414 } else {
5415 let highlight_id =
5416 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5417 highlights
5418 .stack
5419 .push((capture.node.end_byte(), highlight_id));
5420 highlights.next_capture = highlights.captures.next();
5421 }
5422 }
5423 }
5424
5425 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5426 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5427 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5428 if endpoint.offset <= self.range.start {
5429 self.update_diagnostic_depths(endpoint);
5430 diagnostic_endpoints.next();
5431 self.underline = endpoint.underline;
5432 } else {
5433 next_diagnostic_endpoint = endpoint.offset;
5434 break;
5435 }
5436 }
5437 }
5438 self.diagnostic_endpoints = diagnostic_endpoints;
5439
5440 if let Some(ChunkBitmaps {
5441 text: chunk,
5442 chars: chars_map,
5443 tabs,
5444 }) = self.chunks.peek_with_bitmaps()
5445 {
5446 let chunk_start = self.range.start;
5447 let mut chunk_end = (self.chunks.offset() + chunk.len())
5448 .min(next_capture_start)
5449 .min(next_diagnostic_endpoint);
5450 let mut highlight_id = None;
5451 if let Some(highlights) = self.highlights.as_ref()
5452 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5453 {
5454 chunk_end = chunk_end.min(*parent_capture_end);
5455 highlight_id = Some(*parent_highlight_id);
5456 }
5457 let bit_start = chunk_start - self.chunks.offset();
5458 let bit_end = chunk_end - self.chunks.offset();
5459
5460 let slice = &chunk[bit_start..bit_end];
5461
5462 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5463 let tabs = (tabs >> bit_start) & mask;
5464 let chars = (chars_map >> bit_start) & mask;
5465
5466 self.range.start = chunk_end;
5467 if self.range.start == self.chunks.offset() + chunk.len() {
5468 self.chunks.next().unwrap();
5469 }
5470
5471 Some(Chunk {
5472 text: slice,
5473 syntax_highlight_id: highlight_id,
5474 underline: self.underline,
5475 diagnostic_severity: self.current_diagnostic_severity(),
5476 is_unnecessary: self.current_code_is_unnecessary(),
5477 tabs,
5478 chars,
5479 ..Chunk::default()
5480 })
5481 } else {
5482 None
5483 }
5484 }
5485}
5486
5487impl operation_queue::Operation for Operation {
5488 fn lamport_timestamp(&self) -> clock::Lamport {
5489 match self {
5490 Operation::Buffer(_) => {
5491 unreachable!("buffer operations should never be deferred at this layer")
5492 }
5493 Operation::UpdateDiagnostics {
5494 lamport_timestamp, ..
5495 }
5496 | Operation::UpdateSelections {
5497 lamport_timestamp, ..
5498 }
5499 | Operation::UpdateCompletionTriggers {
5500 lamport_timestamp, ..
5501 }
5502 | Operation::UpdateLineEnding {
5503 lamport_timestamp, ..
5504 } => *lamport_timestamp,
5505 }
5506 }
5507}
5508
5509impl Default for Diagnostic {
5510 fn default() -> Self {
5511 Self {
5512 source: Default::default(),
5513 source_kind: DiagnosticSourceKind::Other,
5514 code: None,
5515 code_description: None,
5516 severity: DiagnosticSeverity::ERROR,
5517 message: Default::default(),
5518 markdown: None,
5519 group_id: 0,
5520 is_primary: false,
5521 is_disk_based: false,
5522 is_unnecessary: false,
5523 underline: true,
5524 data: None,
5525 registration_id: None,
5526 }
5527 }
5528}
5529
5530impl IndentSize {
5531 /// Returns an [`IndentSize`] representing the given spaces.
5532 pub fn spaces(len: u32) -> Self {
5533 Self {
5534 len,
5535 kind: IndentKind::Space,
5536 }
5537 }
5538
5539 /// Returns an [`IndentSize`] representing a tab.
5540 pub fn tab() -> Self {
5541 Self {
5542 len: 1,
5543 kind: IndentKind::Tab,
5544 }
5545 }
5546
5547 /// An iterator over the characters represented by this [`IndentSize`].
5548 pub fn chars(&self) -> impl Iterator<Item = char> {
5549 iter::repeat(self.char()).take(self.len as usize)
5550 }
5551
5552 /// The character representation of this [`IndentSize`].
5553 pub fn char(&self) -> char {
5554 match self.kind {
5555 IndentKind::Space => ' ',
5556 IndentKind::Tab => '\t',
5557 }
5558 }
5559
5560 /// Consumes the current [`IndentSize`] and returns a new one that has
5561 /// been shrunk or enlarged by the given size along the given direction.
5562 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5563 match direction {
5564 Ordering::Less => {
5565 if self.kind == size.kind && self.len >= size.len {
5566 self.len -= size.len;
5567 }
5568 }
5569 Ordering::Equal => {}
5570 Ordering::Greater => {
5571 if self.len == 0 {
5572 self = size;
5573 } else if self.kind == size.kind {
5574 self.len += size.len;
5575 }
5576 }
5577 }
5578 self
5579 }
5580
5581 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5582 match self.kind {
5583 IndentKind::Space => self.len as usize,
5584 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5585 }
5586 }
5587}
5588
5589#[cfg(any(test, feature = "test-support"))]
5590pub struct TestFile {
5591 pub path: Arc<RelPath>,
5592 pub root_name: String,
5593 pub local_root: Option<PathBuf>,
5594}
5595
5596#[cfg(any(test, feature = "test-support"))]
5597impl File for TestFile {
5598 fn path(&self) -> &Arc<RelPath> {
5599 &self.path
5600 }
5601
5602 fn full_path(&self, _: &gpui::App) -> PathBuf {
5603 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5604 }
5605
5606 fn as_local(&self) -> Option<&dyn LocalFile> {
5607 if self.local_root.is_some() {
5608 Some(self)
5609 } else {
5610 None
5611 }
5612 }
5613
5614 fn disk_state(&self) -> DiskState {
5615 unimplemented!()
5616 }
5617
5618 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5619 self.path().file_name().unwrap_or(self.root_name.as_ref())
5620 }
5621
5622 fn worktree_id(&self, _: &App) -> WorktreeId {
5623 WorktreeId::from_usize(0)
5624 }
5625
5626 fn to_proto(&self, _: &App) -> rpc::proto::File {
5627 unimplemented!()
5628 }
5629
5630 fn is_private(&self) -> bool {
5631 false
5632 }
5633
5634 fn path_style(&self, _cx: &App) -> PathStyle {
5635 PathStyle::local()
5636 }
5637}
5638
5639#[cfg(any(test, feature = "test-support"))]
5640impl LocalFile for TestFile {
5641 fn abs_path(&self, _cx: &App) -> PathBuf {
5642 PathBuf::from(self.local_root.as_ref().unwrap())
5643 .join(&self.root_name)
5644 .join(self.path.as_std_path())
5645 }
5646
5647 fn load(&self, _cx: &App) -> Task<Result<String>> {
5648 unimplemented!()
5649 }
5650
5651 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5652 unimplemented!()
5653 }
5654}
5655
5656pub(crate) fn contiguous_ranges(
5657 values: impl Iterator<Item = u32>,
5658 max_len: usize,
5659) -> impl Iterator<Item = Range<u32>> {
5660 let mut values = values;
5661 let mut current_range: Option<Range<u32>> = None;
5662 std::iter::from_fn(move || {
5663 loop {
5664 if let Some(value) = values.next() {
5665 if let Some(range) = &mut current_range
5666 && value == range.end
5667 && range.len() < max_len
5668 {
5669 range.end += 1;
5670 continue;
5671 }
5672
5673 let prev_range = current_range.clone();
5674 current_range = Some(value..(value + 1));
5675 if prev_range.is_some() {
5676 return prev_range;
5677 }
5678 } else {
5679 return current_range.take();
5680 }
5681 }
5682 })
5683}
5684
5685#[derive(Default, Debug)]
5686pub struct CharClassifier {
5687 scope: Option<LanguageScope>,
5688 scope_context: Option<CharScopeContext>,
5689 ignore_punctuation: bool,
5690}
5691
5692impl CharClassifier {
5693 pub fn new(scope: Option<LanguageScope>) -> Self {
5694 Self {
5695 scope,
5696 scope_context: None,
5697 ignore_punctuation: false,
5698 }
5699 }
5700
5701 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5702 Self {
5703 scope_context,
5704 ..self
5705 }
5706 }
5707
5708 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5709 Self {
5710 ignore_punctuation,
5711 ..self
5712 }
5713 }
5714
5715 pub fn is_whitespace(&self, c: char) -> bool {
5716 self.kind(c) == CharKind::Whitespace
5717 }
5718
5719 pub fn is_word(&self, c: char) -> bool {
5720 self.kind(c) == CharKind::Word
5721 }
5722
5723 pub fn is_punctuation(&self, c: char) -> bool {
5724 self.kind(c) == CharKind::Punctuation
5725 }
5726
5727 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5728 if c.is_alphanumeric() || c == '_' {
5729 return CharKind::Word;
5730 }
5731
5732 if let Some(scope) = &self.scope {
5733 let characters = match self.scope_context {
5734 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5735 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5736 None => scope.word_characters(),
5737 };
5738 if let Some(characters) = characters
5739 && characters.contains(&c)
5740 {
5741 return CharKind::Word;
5742 }
5743 }
5744
5745 if c.is_whitespace() {
5746 return CharKind::Whitespace;
5747 }
5748
5749 if ignore_punctuation {
5750 CharKind::Word
5751 } else {
5752 CharKind::Punctuation
5753 }
5754 }
5755
5756 pub fn kind(&self, c: char) -> CharKind {
5757 self.kind_with(c, self.ignore_punctuation)
5758 }
5759}
5760
5761/// Find all of the ranges of whitespace that occur at the ends of lines
5762/// in the given rope.
5763///
5764/// This could also be done with a regex search, but this implementation
5765/// avoids copying text.
5766pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5767 let mut ranges = Vec::new();
5768
5769 let mut offset = 0;
5770 let mut prev_chunk_trailing_whitespace_range = 0..0;
5771 for chunk in rope.chunks() {
5772 let mut prev_line_trailing_whitespace_range = 0..0;
5773 for (i, line) in chunk.split('\n').enumerate() {
5774 let line_end_offset = offset + line.len();
5775 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5776 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5777
5778 if i == 0 && trimmed_line_len == 0 {
5779 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5780 }
5781 if !prev_line_trailing_whitespace_range.is_empty() {
5782 ranges.push(prev_line_trailing_whitespace_range);
5783 }
5784
5785 offset = line_end_offset + 1;
5786 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5787 }
5788
5789 offset -= 1;
5790 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5791 }
5792
5793 if !prev_chunk_trailing_whitespace_range.is_empty() {
5794 ranges.push(prev_chunk_trailing_whitespace_range);
5795 }
5796
5797 ranges
5798}