1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430 /// An old version of a file that was previously present
431 /// usually from a version control system. e.g. A git blob
432 Historic { was_deleted: bool },
433}
434
435impl DiskState {
436 /// Returns the file's last known modification time on disk.
437 pub fn mtime(self) -> Option<MTime> {
438 match self {
439 DiskState::New => None,
440 DiskState::Present { mtime } => Some(mtime),
441 DiskState::Deleted => None,
442 DiskState::Historic { .. } => None,
443 }
444 }
445
446 pub fn exists(&self) -> bool {
447 match self {
448 DiskState::New => false,
449 DiskState::Present { .. } => true,
450 DiskState::Deleted => false,
451 DiskState::Historic { .. } => false,
452 }
453 }
454
455 /// Returns true if this state represents a deleted file.
456 pub fn is_deleted(&self) -> bool {
457 match self {
458 DiskState::Deleted => true,
459 DiskState::Historic { was_deleted } => *was_deleted,
460 _ => false,
461 }
462 }
463}
464
465/// The file associated with a buffer, in the case where the file is on the local disk.
466pub trait LocalFile: File {
467 /// Returns the absolute path of this file
468 fn abs_path(&self, cx: &App) -> PathBuf;
469
470 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
471 fn load(&self, cx: &App) -> Task<Result<String>>;
472
473 /// Loads the file's contents from disk.
474 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
475}
476
477/// The auto-indent behavior associated with an editing operation.
478/// For some editing operations, each affected line of text has its
479/// indentation recomputed. For other operations, the entire block
480/// of edited text is adjusted uniformly.
481#[derive(Clone, Debug)]
482pub enum AutoindentMode {
483 /// Indent each line of inserted text.
484 EachLine,
485 /// Apply the same indentation adjustment to all of the lines
486 /// in a given insertion.
487 Block {
488 /// The original indentation column of the first line of each
489 /// insertion, if it has been copied.
490 ///
491 /// Knowing this makes it possible to preserve the relative indentation
492 /// of every line in the insertion from when it was copied.
493 ///
494 /// If the original indent column is `a`, and the first line of insertion
495 /// is then auto-indented to column `b`, then every other line of
496 /// the insertion will be auto-indented to column `b - a`
497 original_indent_columns: Vec<Option<u32>>,
498 },
499}
500
501#[derive(Clone)]
502struct AutoindentRequest {
503 before_edit: BufferSnapshot,
504 entries: Vec<AutoindentRequestEntry>,
505 is_block_mode: bool,
506 ignore_empty_lines: bool,
507}
508
509#[derive(Debug, Clone)]
510struct AutoindentRequestEntry {
511 /// A range of the buffer whose indentation should be adjusted.
512 range: Range<Anchor>,
513 /// Whether or not these lines should be considered brand new, for the
514 /// purpose of auto-indent. When text is not new, its indentation will
515 /// only be adjusted if the suggested indentation level has *changed*
516 /// since the edit was made.
517 first_line_is_new: bool,
518 indent_size: IndentSize,
519 original_indent_column: Option<u32>,
520}
521
522#[derive(Debug)]
523struct IndentSuggestion {
524 basis_row: u32,
525 delta: Ordering,
526 within_error: bool,
527}
528
529struct BufferChunkHighlights<'a> {
530 captures: SyntaxMapCaptures<'a>,
531 next_capture: Option<SyntaxMapCapture<'a>>,
532 stack: Vec<(usize, HighlightId)>,
533 highlight_maps: Vec<HighlightMap>,
534}
535
536/// An iterator that yields chunks of a buffer's text, along with their
537/// syntax highlights and diagnostic status.
538pub struct BufferChunks<'a> {
539 buffer_snapshot: Option<&'a BufferSnapshot>,
540 range: Range<usize>,
541 chunks: text::Chunks<'a>,
542 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
543 error_depth: usize,
544 warning_depth: usize,
545 information_depth: usize,
546 hint_depth: usize,
547 unnecessary_depth: usize,
548 underline: bool,
549 highlights: Option<BufferChunkHighlights<'a>>,
550}
551
552/// A chunk of a buffer's text, along with its syntax highlight and
553/// diagnostic status.
554#[derive(Clone, Debug, Default)]
555pub struct Chunk<'a> {
556 /// The text of the chunk.
557 pub text: &'a str,
558 /// The syntax highlighting style of the chunk.
559 pub syntax_highlight_id: Option<HighlightId>,
560 /// The highlight style that has been applied to this chunk in
561 /// the editor.
562 pub highlight_style: Option<HighlightStyle>,
563 /// The severity of diagnostic associated with this chunk, if any.
564 pub diagnostic_severity: Option<DiagnosticSeverity>,
565 /// A bitset of which characters are tabs in this string.
566 pub tabs: u128,
567 /// Bitmap of character indices in this chunk
568 pub chars: u128,
569 /// Whether this chunk of text is marked as unnecessary.
570 pub is_unnecessary: bool,
571 /// Whether this chunk of text was originally a tab character.
572 pub is_tab: bool,
573 /// Whether this chunk of text was originally an inlay.
574 pub is_inlay: bool,
575 /// Whether to underline the corresponding text range in the editor.
576 pub underline: bool,
577}
578
579/// A set of edits to a given version of a buffer, computed asynchronously.
580#[derive(Debug)]
581pub struct Diff {
582 pub base_version: clock::Global,
583 pub line_ending: LineEnding,
584 pub edits: Vec<(Range<usize>, Arc<str>)>,
585}
586
587#[derive(Debug, Clone, Copy)]
588pub(crate) struct DiagnosticEndpoint {
589 offset: usize,
590 is_start: bool,
591 underline: bool,
592 severity: DiagnosticSeverity,
593 is_unnecessary: bool,
594}
595
596/// A class of characters, used for characterizing a run of text.
597#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
598pub enum CharKind {
599 /// Whitespace.
600 Whitespace,
601 /// Punctuation.
602 Punctuation,
603 /// Word.
604 Word,
605}
606
607/// Context for character classification within a specific scope.
608#[derive(Copy, Clone, Eq, PartialEq, Debug)]
609pub enum CharScopeContext {
610 /// Character classification for completion queries.
611 ///
612 /// This context treats certain characters as word constituents that would
613 /// normally be considered punctuation, such as '-' in Tailwind classes
614 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
615 Completion,
616 /// Character classification for linked edits.
617 ///
618 /// This context handles characters that should be treated as part of
619 /// identifiers during linked editing operations, such as '.' in JSX
620 /// component names like `<Animated.View>`.
621 LinkedEdit,
622}
623
624/// A runnable is a set of data about a region that could be resolved into a task
625pub struct Runnable {
626 pub tags: SmallVec<[RunnableTag; 1]>,
627 pub language: Arc<Language>,
628 pub buffer: BufferId,
629}
630
631#[derive(Default, Clone, Debug)]
632pub struct HighlightedText {
633 pub text: SharedString,
634 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
635}
636
637#[derive(Default, Debug)]
638struct HighlightedTextBuilder {
639 pub text: String,
640 highlights: Vec<(Range<usize>, HighlightStyle)>,
641}
642
643impl HighlightedText {
644 pub fn from_buffer_range<T: ToOffset>(
645 range: Range<T>,
646 snapshot: &text::BufferSnapshot,
647 syntax_snapshot: &SyntaxSnapshot,
648 override_style: Option<HighlightStyle>,
649 syntax_theme: &SyntaxTheme,
650 ) -> Self {
651 let mut highlighted_text = HighlightedTextBuilder::default();
652 highlighted_text.add_text_from_buffer_range(
653 range,
654 snapshot,
655 syntax_snapshot,
656 override_style,
657 syntax_theme,
658 );
659 highlighted_text.build()
660 }
661
662 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
663 gpui::StyledText::new(self.text.clone())
664 .with_default_highlights(default_style, self.highlights.iter().cloned())
665 }
666
667 /// Returns the first line without leading whitespace unless highlighted
668 /// and a boolean indicating if there are more lines after
669 pub fn first_line_preview(self) -> (Self, bool) {
670 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
671 let first_line = &self.text[..newline_ix];
672
673 // Trim leading whitespace, unless an edit starts prior to it.
674 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
675 if let Some((first_highlight_range, _)) = self.highlights.first() {
676 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
677 }
678
679 let preview_text = &first_line[preview_start_ix..];
680 let preview_highlights = self
681 .highlights
682 .into_iter()
683 .skip_while(|(range, _)| range.end <= preview_start_ix)
684 .take_while(|(range, _)| range.start < newline_ix)
685 .filter_map(|(mut range, highlight)| {
686 range.start = range.start.saturating_sub(preview_start_ix);
687 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
688 if range.is_empty() {
689 None
690 } else {
691 Some((range, highlight))
692 }
693 });
694
695 let preview = Self {
696 text: SharedString::new(preview_text),
697 highlights: preview_highlights.collect(),
698 };
699
700 (preview, self.text.len() > newline_ix)
701 }
702}
703
704impl HighlightedTextBuilder {
705 pub fn build(self) -> HighlightedText {
706 HighlightedText {
707 text: self.text.into(),
708 highlights: self.highlights,
709 }
710 }
711
712 pub fn add_text_from_buffer_range<T: ToOffset>(
713 &mut self,
714 range: Range<T>,
715 snapshot: &text::BufferSnapshot,
716 syntax_snapshot: &SyntaxSnapshot,
717 override_style: Option<HighlightStyle>,
718 syntax_theme: &SyntaxTheme,
719 ) {
720 let range = range.to_offset(snapshot);
721 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
722 let start = self.text.len();
723 self.text.push_str(chunk.text);
724 let end = self.text.len();
725
726 if let Some(highlight_style) = chunk
727 .syntax_highlight_id
728 .and_then(|id| id.style(syntax_theme))
729 {
730 let highlight_style = override_style.map_or(highlight_style, |override_style| {
731 highlight_style.highlight(override_style)
732 });
733 self.highlights.push((start..end, highlight_style));
734 } else if let Some(override_style) = override_style {
735 self.highlights.push((start..end, override_style));
736 }
737 }
738 }
739
740 fn highlighted_chunks<'a>(
741 range: Range<usize>,
742 snapshot: &'a text::BufferSnapshot,
743 syntax_snapshot: &'a SyntaxSnapshot,
744 ) -> BufferChunks<'a> {
745 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
746 grammar
747 .highlights_config
748 .as_ref()
749 .map(|config| &config.query)
750 });
751
752 let highlight_maps = captures
753 .grammars()
754 .iter()
755 .map(|grammar| grammar.highlight_map())
756 .collect();
757
758 BufferChunks::new(
759 snapshot.as_rope(),
760 range,
761 Some((captures, highlight_maps)),
762 false,
763 None,
764 )
765 }
766}
767
768#[derive(Clone)]
769pub struct EditPreview {
770 old_snapshot: text::BufferSnapshot,
771 applied_edits_snapshot: text::BufferSnapshot,
772 syntax_snapshot: SyntaxSnapshot,
773}
774
775impl EditPreview {
776 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
777 let (first, _) = edits.first()?;
778 let (last, _) = edits.last()?;
779
780 let start = first.start.to_point(&self.old_snapshot);
781 let old_end = last.end.to_point(&self.old_snapshot);
782 let new_end = last
783 .end
784 .bias_right(&self.old_snapshot)
785 .to_point(&self.applied_edits_snapshot);
786
787 let start = Point::new(start.row.saturating_sub(3), 0);
788 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
789 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
790
791 Some(unified_diff(
792 &self
793 .old_snapshot
794 .text_for_range(start..old_end)
795 .collect::<String>(),
796 &self
797 .applied_edits_snapshot
798 .text_for_range(start..new_end)
799 .collect::<String>(),
800 ))
801 }
802
803 pub fn highlight_edits(
804 &self,
805 current_snapshot: &BufferSnapshot,
806 edits: &[(Range<Anchor>, impl AsRef<str>)],
807 include_deletions: bool,
808 cx: &App,
809 ) -> HighlightedText {
810 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
811 return HighlightedText::default();
812 };
813
814 let mut highlighted_text = HighlightedTextBuilder::default();
815
816 let visible_range_in_preview_snapshot =
817 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
818 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
819
820 let insertion_highlight_style = HighlightStyle {
821 background_color: Some(cx.theme().status().created_background),
822 ..Default::default()
823 };
824 let deletion_highlight_style = HighlightStyle {
825 background_color: Some(cx.theme().status().deleted_background),
826 ..Default::default()
827 };
828 let syntax_theme = cx.theme().syntax();
829
830 for (range, edit_text) in edits {
831 let edit_new_end_in_preview_snapshot = range
832 .end
833 .bias_right(&self.old_snapshot)
834 .to_offset(&self.applied_edits_snapshot);
835 let edit_start_in_preview_snapshot =
836 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
837
838 let unchanged_range_in_preview_snapshot =
839 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
840 if !unchanged_range_in_preview_snapshot.is_empty() {
841 highlighted_text.add_text_from_buffer_range(
842 unchanged_range_in_preview_snapshot,
843 &self.applied_edits_snapshot,
844 &self.syntax_snapshot,
845 None,
846 syntax_theme,
847 );
848 }
849
850 let range_in_current_snapshot = range.to_offset(current_snapshot);
851 if include_deletions && !range_in_current_snapshot.is_empty() {
852 highlighted_text.add_text_from_buffer_range(
853 range_in_current_snapshot,
854 ¤t_snapshot.text,
855 ¤t_snapshot.syntax,
856 Some(deletion_highlight_style),
857 syntax_theme,
858 );
859 }
860
861 if !edit_text.as_ref().is_empty() {
862 highlighted_text.add_text_from_buffer_range(
863 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
864 &self.applied_edits_snapshot,
865 &self.syntax_snapshot,
866 Some(insertion_highlight_style),
867 syntax_theme,
868 );
869 }
870
871 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
872 }
873
874 highlighted_text.add_text_from_buffer_range(
875 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
876 &self.applied_edits_snapshot,
877 &self.syntax_snapshot,
878 None,
879 syntax_theme,
880 );
881
882 highlighted_text.build()
883 }
884
885 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
886 cx.new(|cx| {
887 let mut buffer = Buffer::local_normalized(
888 self.applied_edits_snapshot.as_rope().clone(),
889 self.applied_edits_snapshot.line_ending(),
890 cx,
891 );
892 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
893 buffer
894 })
895 }
896
897 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
898 let (first, _) = edits.first()?;
899 let (last, _) = edits.last()?;
900
901 let start = first
902 .start
903 .bias_left(&self.old_snapshot)
904 .to_point(&self.applied_edits_snapshot);
905 let end = last
906 .end
907 .bias_right(&self.old_snapshot)
908 .to_point(&self.applied_edits_snapshot);
909
910 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
911 let range = Point::new(start.row, 0)
912 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
913
914 Some(range)
915 }
916}
917
918#[derive(Clone, Debug, PartialEq, Eq)]
919pub struct BracketMatch<T> {
920 pub open_range: Range<T>,
921 pub close_range: Range<T>,
922 pub newline_only: bool,
923 pub syntax_layer_depth: usize,
924 pub color_index: Option<usize>,
925}
926
927impl<T> BracketMatch<T> {
928 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
929 (self.open_range, self.close_range)
930 }
931}
932
933impl Buffer {
934 /// Create a new buffer with the given base text.
935 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
936 Self::build(
937 TextBuffer::new(
938 ReplicaId::LOCAL,
939 cx.entity_id().as_non_zero_u64().into(),
940 base_text.into(),
941 ),
942 None,
943 Capability::ReadWrite,
944 )
945 }
946
947 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
948 pub fn local_normalized(
949 base_text_normalized: Rope,
950 line_ending: LineEnding,
951 cx: &Context<Self>,
952 ) -> Self {
953 Self::build(
954 TextBuffer::new_normalized(
955 ReplicaId::LOCAL,
956 cx.entity_id().as_non_zero_u64().into(),
957 line_ending,
958 base_text_normalized,
959 ),
960 None,
961 Capability::ReadWrite,
962 )
963 }
964
965 /// Create a new buffer that is a replica of a remote buffer.
966 pub fn remote(
967 remote_id: BufferId,
968 replica_id: ReplicaId,
969 capability: Capability,
970 base_text: impl Into<String>,
971 ) -> Self {
972 Self::build(
973 TextBuffer::new(replica_id, remote_id, base_text.into()),
974 None,
975 capability,
976 )
977 }
978
979 /// Create a new buffer that is a replica of a remote buffer, populating its
980 /// state from the given protobuf message.
981 pub fn from_proto(
982 replica_id: ReplicaId,
983 capability: Capability,
984 message: proto::BufferState,
985 file: Option<Arc<dyn File>>,
986 ) -> Result<Self> {
987 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
988 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
989 let mut this = Self::build(buffer, file, capability);
990 this.text.set_line_ending(proto::deserialize_line_ending(
991 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
992 ));
993 this.saved_version = proto::deserialize_version(&message.saved_version);
994 this.saved_mtime = message.saved_mtime.map(|time| time.into());
995 Ok(this)
996 }
997
998 /// Serialize the buffer's state to a protobuf message.
999 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1000 proto::BufferState {
1001 id: self.remote_id().into(),
1002 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1003 base_text: self.base_text().to_string(),
1004 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1005 saved_version: proto::serialize_version(&self.saved_version),
1006 saved_mtime: self.saved_mtime.map(|time| time.into()),
1007 }
1008 }
1009
1010 /// Serialize as protobufs all of the changes to the buffer since the given version.
1011 pub fn serialize_ops(
1012 &self,
1013 since: Option<clock::Global>,
1014 cx: &App,
1015 ) -> Task<Vec<proto::Operation>> {
1016 let mut operations = Vec::new();
1017 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1018
1019 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1020 proto::serialize_operation(&Operation::UpdateSelections {
1021 selections: set.selections.clone(),
1022 lamport_timestamp: set.lamport_timestamp,
1023 line_mode: set.line_mode,
1024 cursor_shape: set.cursor_shape,
1025 })
1026 }));
1027
1028 for (server_id, diagnostics) in &self.diagnostics {
1029 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1030 lamport_timestamp: self.diagnostics_timestamp,
1031 server_id: *server_id,
1032 diagnostics: diagnostics.iter().cloned().collect(),
1033 }));
1034 }
1035
1036 for (server_id, completions) in &self.completion_triggers_per_language_server {
1037 operations.push(proto::serialize_operation(
1038 &Operation::UpdateCompletionTriggers {
1039 triggers: completions.iter().cloned().collect(),
1040 lamport_timestamp: self.completion_triggers_timestamp,
1041 server_id: *server_id,
1042 },
1043 ));
1044 }
1045
1046 let text_operations = self.text.operations().clone();
1047 cx.background_spawn(async move {
1048 let since = since.unwrap_or_default();
1049 operations.extend(
1050 text_operations
1051 .iter()
1052 .filter(|(_, op)| !since.observed(op.timestamp()))
1053 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1054 );
1055 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1056 operations
1057 })
1058 }
1059
1060 /// Assign a language to the buffer, returning the buffer.
1061 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1062 self.set_language_async(Some(language), cx);
1063 self
1064 }
1065
1066 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1067 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1068 self.set_language(Some(language), cx);
1069 self
1070 }
1071
1072 /// Returns the [`Capability`] of this buffer.
1073 pub fn capability(&self) -> Capability {
1074 self.capability
1075 }
1076
1077 /// Whether this buffer can only be read.
1078 pub fn read_only(&self) -> bool {
1079 self.capability == Capability::ReadOnly
1080 }
1081
1082 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1083 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1084 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1085 let snapshot = buffer.snapshot();
1086 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1087 let tree_sitter_data = TreeSitterData::new(snapshot);
1088 Self {
1089 saved_mtime,
1090 tree_sitter_data: Arc::new(tree_sitter_data),
1091 saved_version: buffer.version(),
1092 preview_version: buffer.version(),
1093 reload_task: None,
1094 transaction_depth: 0,
1095 was_dirty_before_starting_transaction: None,
1096 has_unsaved_edits: Cell::new((buffer.version(), false)),
1097 text: buffer,
1098 branch_state: None,
1099 file,
1100 capability,
1101 syntax_map,
1102 reparse: None,
1103 non_text_state_update_count: 0,
1104 sync_parse_timeout: Duration::from_millis(1),
1105 parse_status: watch::channel(ParseStatus::Idle),
1106 autoindent_requests: Default::default(),
1107 wait_for_autoindent_txs: Default::default(),
1108 pending_autoindent: Default::default(),
1109 language: None,
1110 remote_selections: Default::default(),
1111 diagnostics: Default::default(),
1112 diagnostics_timestamp: Lamport::MIN,
1113 completion_triggers: Default::default(),
1114 completion_triggers_per_language_server: Default::default(),
1115 completion_triggers_timestamp: Lamport::MIN,
1116 deferred_ops: OperationQueue::new(),
1117 has_conflict: false,
1118 change_bits: Default::default(),
1119 _subscriptions: Vec::new(),
1120 encoding: encoding_rs::UTF_8,
1121 has_bom: false,
1122 }
1123 }
1124
1125 pub fn build_snapshot(
1126 text: Rope,
1127 language: Option<Arc<Language>>,
1128 language_registry: Option<Arc<LanguageRegistry>>,
1129 cx: &mut App,
1130 ) -> impl Future<Output = BufferSnapshot> + use<> {
1131 let entity_id = cx.reserve_entity::<Self>().entity_id();
1132 let buffer_id = entity_id.as_non_zero_u64().into();
1133 async move {
1134 let text =
1135 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1136 .snapshot();
1137 let mut syntax = SyntaxMap::new(&text).snapshot();
1138 if let Some(language) = language.clone() {
1139 let language_registry = language_registry.clone();
1140 syntax.reparse(&text, language_registry, language);
1141 }
1142 let tree_sitter_data = TreeSitterData::new(text.clone());
1143 BufferSnapshot {
1144 text,
1145 syntax,
1146 file: None,
1147 diagnostics: Default::default(),
1148 remote_selections: Default::default(),
1149 tree_sitter_data: Arc::new(tree_sitter_data),
1150 language,
1151 non_text_state_update_count: 0,
1152 }
1153 }
1154 }
1155
1156 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1157 let entity_id = cx.reserve_entity::<Self>().entity_id();
1158 let buffer_id = entity_id.as_non_zero_u64().into();
1159 let text = TextBuffer::new_normalized(
1160 ReplicaId::LOCAL,
1161 buffer_id,
1162 Default::default(),
1163 Rope::new(),
1164 )
1165 .snapshot();
1166 let syntax = SyntaxMap::new(&text).snapshot();
1167 let tree_sitter_data = TreeSitterData::new(text.clone());
1168 BufferSnapshot {
1169 text,
1170 syntax,
1171 tree_sitter_data: Arc::new(tree_sitter_data),
1172 file: None,
1173 diagnostics: Default::default(),
1174 remote_selections: Default::default(),
1175 language: None,
1176 non_text_state_update_count: 0,
1177 }
1178 }
1179
1180 #[cfg(any(test, feature = "test-support"))]
1181 pub fn build_snapshot_sync(
1182 text: Rope,
1183 language: Option<Arc<Language>>,
1184 language_registry: Option<Arc<LanguageRegistry>>,
1185 cx: &mut App,
1186 ) -> BufferSnapshot {
1187 let entity_id = cx.reserve_entity::<Self>().entity_id();
1188 let buffer_id = entity_id.as_non_zero_u64().into();
1189 let text =
1190 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1191 .snapshot();
1192 let mut syntax = SyntaxMap::new(&text).snapshot();
1193 if let Some(language) = language.clone() {
1194 syntax.reparse(&text, language_registry, language);
1195 }
1196 let tree_sitter_data = TreeSitterData::new(text.clone());
1197 BufferSnapshot {
1198 text,
1199 syntax,
1200 tree_sitter_data: Arc::new(tree_sitter_data),
1201 file: None,
1202 diagnostics: Default::default(),
1203 remote_selections: Default::default(),
1204 language,
1205 non_text_state_update_count: 0,
1206 }
1207 }
1208
1209 /// Retrieve a snapshot of the buffer's current state. This is computationally
1210 /// cheap, and allows reading from the buffer on a background thread.
1211 pub fn snapshot(&self) -> BufferSnapshot {
1212 let text = self.text.snapshot();
1213 let mut syntax_map = self.syntax_map.lock();
1214 syntax_map.interpolate(&text);
1215 let syntax = syntax_map.snapshot();
1216
1217 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1218 Arc::new(TreeSitterData::new(text.clone()))
1219 } else {
1220 self.tree_sitter_data.clone()
1221 };
1222
1223 BufferSnapshot {
1224 text,
1225 syntax,
1226 tree_sitter_data,
1227 file: self.file.clone(),
1228 remote_selections: self.remote_selections.clone(),
1229 diagnostics: self.diagnostics.clone(),
1230 language: self.language.clone(),
1231 non_text_state_update_count: self.non_text_state_update_count,
1232 }
1233 }
1234
1235 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1236 let this = cx.entity();
1237 cx.new(|cx| {
1238 let mut branch = Self {
1239 branch_state: Some(BufferBranchState {
1240 base_buffer: this.clone(),
1241 merged_operations: Default::default(),
1242 }),
1243 language: self.language.clone(),
1244 has_conflict: self.has_conflict,
1245 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1246 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1247 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1248 };
1249 if let Some(language_registry) = self.language_registry() {
1250 branch.set_language_registry(language_registry);
1251 }
1252
1253 // Reparse the branch buffer so that we get syntax highlighting immediately.
1254 branch.reparse(cx, true);
1255
1256 branch
1257 })
1258 }
1259
1260 pub fn preview_edits(
1261 &self,
1262 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1263 cx: &App,
1264 ) -> Task<EditPreview> {
1265 let registry = self.language_registry();
1266 let language = self.language().cloned();
1267 let old_snapshot = self.text.snapshot();
1268 let mut branch_buffer = self.text.branch();
1269 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1270 cx.background_spawn(async move {
1271 if !edits.is_empty() {
1272 if let Some(language) = language.clone() {
1273 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1274 }
1275
1276 branch_buffer.edit(edits.iter().cloned());
1277 let snapshot = branch_buffer.snapshot();
1278 syntax_snapshot.interpolate(&snapshot);
1279
1280 if let Some(language) = language {
1281 syntax_snapshot.reparse(&snapshot, registry, language);
1282 }
1283 }
1284 EditPreview {
1285 old_snapshot,
1286 applied_edits_snapshot: branch_buffer.snapshot(),
1287 syntax_snapshot,
1288 }
1289 })
1290 }
1291
1292 /// Applies all of the changes in this buffer that intersect any of the
1293 /// given `ranges` to its base buffer.
1294 ///
1295 /// If `ranges` is empty, then all changes will be applied. This buffer must
1296 /// be a branch buffer to call this method.
1297 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1298 let Some(base_buffer) = self.base_buffer() else {
1299 debug_panic!("not a branch buffer");
1300 return;
1301 };
1302
1303 let mut ranges = if ranges.is_empty() {
1304 &[0..usize::MAX]
1305 } else {
1306 ranges.as_slice()
1307 }
1308 .iter()
1309 .peekable();
1310
1311 let mut edits = Vec::new();
1312 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1313 let mut is_included = false;
1314 while let Some(range) = ranges.peek() {
1315 if range.end < edit.new.start {
1316 ranges.next().unwrap();
1317 } else {
1318 if range.start <= edit.new.end {
1319 is_included = true;
1320 }
1321 break;
1322 }
1323 }
1324
1325 if is_included {
1326 edits.push((
1327 edit.old.clone(),
1328 self.text_for_range(edit.new.clone()).collect::<String>(),
1329 ));
1330 }
1331 }
1332
1333 let operation = base_buffer.update(cx, |base_buffer, cx| {
1334 // cx.emit(BufferEvent::DiffBaseChanged);
1335 base_buffer.edit(edits, None, cx)
1336 });
1337
1338 if let Some(operation) = operation
1339 && let Some(BufferBranchState {
1340 merged_operations, ..
1341 }) = &mut self.branch_state
1342 {
1343 merged_operations.push(operation);
1344 }
1345 }
1346
1347 fn on_base_buffer_event(
1348 &mut self,
1349 _: Entity<Buffer>,
1350 event: &BufferEvent,
1351 cx: &mut Context<Self>,
1352 ) {
1353 let BufferEvent::Operation { operation, .. } = event else {
1354 return;
1355 };
1356 let Some(BufferBranchState {
1357 merged_operations, ..
1358 }) = &mut self.branch_state
1359 else {
1360 return;
1361 };
1362
1363 let mut operation_to_undo = None;
1364 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1365 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1366 {
1367 merged_operations.remove(ix);
1368 operation_to_undo = Some(operation.timestamp);
1369 }
1370
1371 self.apply_ops([operation.clone()], cx);
1372
1373 if let Some(timestamp) = operation_to_undo {
1374 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1375 self.undo_operations(counts, cx);
1376 }
1377 }
1378
1379 #[cfg(test)]
1380 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1381 &self.text
1382 }
1383
1384 /// Retrieve a snapshot of the buffer's raw text, without any
1385 /// language-related state like the syntax tree or diagnostics.
1386 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1387 self.text.snapshot()
1388 }
1389
1390 /// The file associated with the buffer, if any.
1391 pub fn file(&self) -> Option<&Arc<dyn File>> {
1392 self.file.as_ref()
1393 }
1394
1395 /// The version of the buffer that was last saved or reloaded from disk.
1396 pub fn saved_version(&self) -> &clock::Global {
1397 &self.saved_version
1398 }
1399
1400 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1401 pub fn saved_mtime(&self) -> Option<MTime> {
1402 self.saved_mtime
1403 }
1404
1405 /// Returns the character encoding of the buffer's file.
1406 pub fn encoding(&self) -> &'static Encoding {
1407 self.encoding
1408 }
1409
1410 /// Sets the character encoding of the buffer.
1411 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1412 self.encoding = encoding;
1413 }
1414
1415 /// Returns whether the buffer has a Byte Order Mark.
1416 pub fn has_bom(&self) -> bool {
1417 self.has_bom
1418 }
1419
1420 /// Sets whether the buffer has a Byte Order Mark.
1421 pub fn set_has_bom(&mut self, has_bom: bool) {
1422 self.has_bom = has_bom;
1423 }
1424
1425 /// Assign a language to the buffer.
1426 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1427 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1428 }
1429
1430 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1431 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1432 self.set_language_(language, true, cx);
1433 }
1434
1435 fn set_language_(
1436 &mut self,
1437 language: Option<Arc<Language>>,
1438 may_block: bool,
1439 cx: &mut Context<Self>,
1440 ) {
1441 self.non_text_state_update_count += 1;
1442 self.syntax_map.lock().clear(&self.text);
1443 let old_language = std::mem::replace(&mut self.language, language);
1444 self.was_changed();
1445 self.reparse(cx, may_block);
1446 let has_fresh_language =
1447 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1448 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1449 }
1450
1451 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1452 /// other languages if parts of the buffer are written in different languages.
1453 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1454 self.syntax_map
1455 .lock()
1456 .set_language_registry(language_registry);
1457 }
1458
1459 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1460 self.syntax_map.lock().language_registry()
1461 }
1462
1463 /// Assign the line ending type to the buffer.
1464 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1465 self.text.set_line_ending(line_ending);
1466
1467 let lamport_timestamp = self.text.lamport_clock.tick();
1468 self.send_operation(
1469 Operation::UpdateLineEnding {
1470 line_ending,
1471 lamport_timestamp,
1472 },
1473 true,
1474 cx,
1475 );
1476 }
1477
1478 /// Assign the buffer a new [`Capability`].
1479 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1480 if self.capability != capability {
1481 self.capability = capability;
1482 cx.emit(BufferEvent::CapabilityChanged)
1483 }
1484 }
1485
1486 /// This method is called to signal that the buffer has been saved.
1487 pub fn did_save(
1488 &mut self,
1489 version: clock::Global,
1490 mtime: Option<MTime>,
1491 cx: &mut Context<Self>,
1492 ) {
1493 self.saved_version = version.clone();
1494 self.has_unsaved_edits.set((version, false));
1495 self.has_conflict = false;
1496 self.saved_mtime = mtime;
1497 self.was_changed();
1498 cx.emit(BufferEvent::Saved);
1499 cx.notify();
1500 }
1501
1502 /// Reloads the contents of the buffer from disk.
1503 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1504 let (tx, rx) = futures::channel::oneshot::channel();
1505 let prev_version = self.text.version();
1506 self.reload_task = Some(cx.spawn(async move |this, cx| {
1507 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1508 let file = this.file.as_ref()?.as_local()?;
1509 Some((
1510 file.disk_state().mtime(),
1511 file.load_bytes(cx),
1512 this.encoding,
1513 ))
1514 })?
1515 else {
1516 return Ok(());
1517 };
1518
1519 let bytes = load_bytes_task.await?;
1520 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1521 let new_text = cow.into_owned();
1522
1523 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1524 this.update(cx, |this, cx| {
1525 if this.version() == diff.base_version {
1526 this.finalize_last_transaction();
1527 this.apply_diff(diff, cx);
1528 tx.send(this.finalize_last_transaction().cloned()).ok();
1529 this.has_conflict = false;
1530 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1531 } else {
1532 if !diff.edits.is_empty()
1533 || this
1534 .edits_since::<usize>(&diff.base_version)
1535 .next()
1536 .is_some()
1537 {
1538 this.has_conflict = true;
1539 }
1540
1541 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1542 }
1543
1544 this.reload_task.take();
1545 })
1546 }));
1547 rx
1548 }
1549
1550 /// This method is called to signal that the buffer has been reloaded.
1551 pub fn did_reload(
1552 &mut self,
1553 version: clock::Global,
1554 line_ending: LineEnding,
1555 mtime: Option<MTime>,
1556 cx: &mut Context<Self>,
1557 ) {
1558 self.saved_version = version;
1559 self.has_unsaved_edits
1560 .set((self.saved_version.clone(), false));
1561 self.text.set_line_ending(line_ending);
1562 self.saved_mtime = mtime;
1563 cx.emit(BufferEvent::Reloaded);
1564 cx.notify();
1565 }
1566
1567 /// Updates the [`File`] backing this buffer. This should be called when
1568 /// the file has changed or has been deleted.
1569 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1570 let was_dirty = self.is_dirty();
1571 let mut file_changed = false;
1572
1573 if let Some(old_file) = self.file.as_ref() {
1574 if new_file.path() != old_file.path() {
1575 file_changed = true;
1576 }
1577
1578 let old_state = old_file.disk_state();
1579 let new_state = new_file.disk_state();
1580 if old_state != new_state {
1581 file_changed = true;
1582 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1583 cx.emit(BufferEvent::ReloadNeeded)
1584 }
1585 }
1586 } else {
1587 file_changed = true;
1588 };
1589
1590 self.file = Some(new_file);
1591 if file_changed {
1592 self.was_changed();
1593 self.non_text_state_update_count += 1;
1594 if was_dirty != self.is_dirty() {
1595 cx.emit(BufferEvent::DirtyChanged);
1596 }
1597 cx.emit(BufferEvent::FileHandleChanged);
1598 cx.notify();
1599 }
1600 }
1601
1602 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1603 Some(self.branch_state.as_ref()?.base_buffer.clone())
1604 }
1605
1606 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1607 pub fn language(&self) -> Option<&Arc<Language>> {
1608 self.language.as_ref()
1609 }
1610
1611 /// Returns the [`Language`] at the given location.
1612 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1613 let offset = position.to_offset(self);
1614 let mut is_first = true;
1615 let start_anchor = self.anchor_before(offset);
1616 let end_anchor = self.anchor_after(offset);
1617 self.syntax_map
1618 .lock()
1619 .layers_for_range(offset..offset, &self.text, false)
1620 .filter(|layer| {
1621 if is_first {
1622 is_first = false;
1623 return true;
1624 }
1625
1626 layer
1627 .included_sub_ranges
1628 .map(|sub_ranges| {
1629 sub_ranges.iter().any(|sub_range| {
1630 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1631 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1632 !is_before_start && !is_after_end
1633 })
1634 })
1635 .unwrap_or(true)
1636 })
1637 .last()
1638 .map(|info| info.language.clone())
1639 .or_else(|| self.language.clone())
1640 }
1641
1642 /// Returns each [`Language`] for the active syntax layers at the given location.
1643 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1644 let offset = position.to_offset(self);
1645 let mut languages: Vec<Arc<Language>> = self
1646 .syntax_map
1647 .lock()
1648 .layers_for_range(offset..offset, &self.text, false)
1649 .map(|info| info.language.clone())
1650 .collect();
1651
1652 if languages.is_empty()
1653 && let Some(buffer_language) = self.language()
1654 {
1655 languages.push(buffer_language.clone());
1656 }
1657
1658 languages
1659 }
1660
1661 /// An integer version number that accounts for all updates besides
1662 /// the buffer's text itself (which is versioned via a version vector).
1663 pub fn non_text_state_update_count(&self) -> usize {
1664 self.non_text_state_update_count
1665 }
1666
1667 /// Whether the buffer is being parsed in the background.
1668 #[cfg(any(test, feature = "test-support"))]
1669 pub fn is_parsing(&self) -> bool {
1670 self.reparse.is_some()
1671 }
1672
1673 /// Indicates whether the buffer contains any regions that may be
1674 /// written in a language that hasn't been loaded yet.
1675 pub fn contains_unknown_injections(&self) -> bool {
1676 self.syntax_map.lock().contains_unknown_injections()
1677 }
1678
1679 #[cfg(any(test, feature = "test-support"))]
1680 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1681 self.sync_parse_timeout = timeout;
1682 }
1683
1684 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1685 match Arc::get_mut(&mut self.tree_sitter_data) {
1686 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1687 None => {
1688 let tree_sitter_data = TreeSitterData::new(snapshot);
1689 self.tree_sitter_data = Arc::new(tree_sitter_data)
1690 }
1691 }
1692 }
1693
1694 /// Called after an edit to synchronize the buffer's main parse tree with
1695 /// the buffer's new underlying state.
1696 ///
1697 /// Locks the syntax map and interpolates the edits since the last reparse
1698 /// into the foreground syntax tree.
1699 ///
1700 /// Then takes a stable snapshot of the syntax map before unlocking it.
1701 /// The snapshot with the interpolated edits is sent to a background thread,
1702 /// where we ask Tree-sitter to perform an incremental parse.
1703 ///
1704 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1705 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1706 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1707 ///
1708 /// If we time out waiting on the parse, we spawn a second task waiting
1709 /// until the parse does complete and return with the interpolated tree still
1710 /// in the foreground. When the background parse completes, call back into
1711 /// the main thread and assign the foreground parse state.
1712 ///
1713 /// If the buffer or grammar changed since the start of the background parse,
1714 /// initiate an additional reparse recursively. To avoid concurrent parses
1715 /// for the same buffer, we only initiate a new parse if we are not already
1716 /// parsing in the background.
1717 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1718 if self.text.version() != *self.tree_sitter_data.version() {
1719 self.invalidate_tree_sitter_data(self.text.snapshot());
1720 }
1721 if self.reparse.is_some() {
1722 return;
1723 }
1724 let language = if let Some(language) = self.language.clone() {
1725 language
1726 } else {
1727 return;
1728 };
1729
1730 let text = self.text_snapshot();
1731 let parsed_version = self.version();
1732
1733 let mut syntax_map = self.syntax_map.lock();
1734 syntax_map.interpolate(&text);
1735 let language_registry = syntax_map.language_registry();
1736 let mut syntax_snapshot = syntax_map.snapshot();
1737 drop(syntax_map);
1738
1739 let parse_task = cx.background_spawn({
1740 let language = language.clone();
1741 let language_registry = language_registry.clone();
1742 async move {
1743 syntax_snapshot.reparse(&text, language_registry, language);
1744 syntax_snapshot
1745 }
1746 });
1747
1748 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1749 if may_block {
1750 match cx
1751 .background_executor()
1752 .block_with_timeout(self.sync_parse_timeout, parse_task)
1753 {
1754 Ok(new_syntax_snapshot) => {
1755 self.did_finish_parsing(new_syntax_snapshot, cx);
1756 self.reparse = None;
1757 }
1758 Err(parse_task) => {
1759 self.reparse = Some(cx.spawn(async move |this, cx| {
1760 let new_syntax_map = cx.background_spawn(parse_task).await;
1761 this.update(cx, move |this, cx| {
1762 let grammar_changed = || {
1763 this.language.as_ref().is_none_or(|current_language| {
1764 !Arc::ptr_eq(&language, current_language)
1765 })
1766 };
1767 let language_registry_changed = || {
1768 new_syntax_map.contains_unknown_injections()
1769 && language_registry.is_some_and(|registry| {
1770 registry.version()
1771 != new_syntax_map.language_registry_version()
1772 })
1773 };
1774 let parse_again = this.version.changed_since(&parsed_version)
1775 || language_registry_changed()
1776 || grammar_changed();
1777 this.did_finish_parsing(new_syntax_map, cx);
1778 this.reparse = None;
1779 if parse_again {
1780 this.reparse(cx, false);
1781 }
1782 })
1783 .ok();
1784 }));
1785 }
1786 }
1787 } else {
1788 self.reparse = Some(cx.spawn(async move |this, cx| {
1789 let new_syntax_map = cx.background_spawn(parse_task).await;
1790 this.update(cx, move |this, cx| {
1791 let grammar_changed = || {
1792 this.language.as_ref().is_none_or(|current_language| {
1793 !Arc::ptr_eq(&language, current_language)
1794 })
1795 };
1796 let language_registry_changed = || {
1797 new_syntax_map.contains_unknown_injections()
1798 && language_registry.is_some_and(|registry| {
1799 registry.version() != new_syntax_map.language_registry_version()
1800 })
1801 };
1802 let parse_again = this.version.changed_since(&parsed_version)
1803 || language_registry_changed()
1804 || grammar_changed();
1805 this.did_finish_parsing(new_syntax_map, cx);
1806 this.reparse = None;
1807 if parse_again {
1808 this.reparse(cx, false);
1809 }
1810 })
1811 .ok();
1812 }));
1813 }
1814 }
1815
1816 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1817 self.was_changed();
1818 self.non_text_state_update_count += 1;
1819 self.syntax_map.lock().did_parse(syntax_snapshot);
1820 self.request_autoindent(cx);
1821 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1822 self.invalidate_tree_sitter_data(self.text.snapshot());
1823 cx.emit(BufferEvent::Reparsed);
1824 cx.notify();
1825 }
1826
1827 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1828 self.parse_status.1.clone()
1829 }
1830
1831 /// Wait until the buffer is no longer parsing
1832 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1833 let mut parse_status = self.parse_status();
1834 async move {
1835 while *parse_status.borrow() != ParseStatus::Idle {
1836 if parse_status.changed().await.is_err() {
1837 break;
1838 }
1839 }
1840 }
1841 }
1842
1843 /// Assign to the buffer a set of diagnostics created by a given language server.
1844 pub fn update_diagnostics(
1845 &mut self,
1846 server_id: LanguageServerId,
1847 diagnostics: DiagnosticSet,
1848 cx: &mut Context<Self>,
1849 ) {
1850 let lamport_timestamp = self.text.lamport_clock.tick();
1851 let op = Operation::UpdateDiagnostics {
1852 server_id,
1853 diagnostics: diagnostics.iter().cloned().collect(),
1854 lamport_timestamp,
1855 };
1856
1857 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1858 self.send_operation(op, true, cx);
1859 }
1860
1861 pub fn buffer_diagnostics(
1862 &self,
1863 for_server: Option<LanguageServerId>,
1864 ) -> Vec<&DiagnosticEntry<Anchor>> {
1865 match for_server {
1866 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1867 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1868 Err(_) => Vec::new(),
1869 },
1870 None => self
1871 .diagnostics
1872 .iter()
1873 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1874 .collect(),
1875 }
1876 }
1877
1878 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1879 if let Some(indent_sizes) = self.compute_autoindents() {
1880 let indent_sizes = cx.background_spawn(indent_sizes);
1881 match cx
1882 .background_executor()
1883 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1884 {
1885 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1886 Err(indent_sizes) => {
1887 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1888 let indent_sizes = indent_sizes.await;
1889 this.update(cx, |this, cx| {
1890 this.apply_autoindents(indent_sizes, cx);
1891 })
1892 .ok();
1893 }));
1894 }
1895 }
1896 } else {
1897 self.autoindent_requests.clear();
1898 for tx in self.wait_for_autoindent_txs.drain(..) {
1899 tx.send(()).ok();
1900 }
1901 }
1902 }
1903
1904 fn compute_autoindents(
1905 &self,
1906 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1907 let max_rows_between_yields = 100;
1908 let snapshot = self.snapshot();
1909 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1910 return None;
1911 }
1912
1913 let autoindent_requests = self.autoindent_requests.clone();
1914 Some(async move {
1915 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1916 for request in autoindent_requests {
1917 // Resolve each edited range to its row in the current buffer and in the
1918 // buffer before this batch of edits.
1919 let mut row_ranges = Vec::new();
1920 let mut old_to_new_rows = BTreeMap::new();
1921 let mut language_indent_sizes_by_new_row = Vec::new();
1922 for entry in &request.entries {
1923 let position = entry.range.start;
1924 let new_row = position.to_point(&snapshot).row;
1925 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1926 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1927
1928 if !entry.first_line_is_new {
1929 let old_row = position.to_point(&request.before_edit).row;
1930 old_to_new_rows.insert(old_row, new_row);
1931 }
1932 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1933 }
1934
1935 // Build a map containing the suggested indentation for each of the edited lines
1936 // with respect to the state of the buffer before these edits. This map is keyed
1937 // by the rows for these lines in the current state of the buffer.
1938 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1939 let old_edited_ranges =
1940 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1941 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1942 let mut language_indent_size = IndentSize::default();
1943 for old_edited_range in old_edited_ranges {
1944 let suggestions = request
1945 .before_edit
1946 .suggest_autoindents(old_edited_range.clone())
1947 .into_iter()
1948 .flatten();
1949 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1950 if let Some(suggestion) = suggestion {
1951 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1952
1953 // Find the indent size based on the language for this row.
1954 while let Some((row, size)) = language_indent_sizes.peek() {
1955 if *row > new_row {
1956 break;
1957 }
1958 language_indent_size = *size;
1959 language_indent_sizes.next();
1960 }
1961
1962 let suggested_indent = old_to_new_rows
1963 .get(&suggestion.basis_row)
1964 .and_then(|from_row| {
1965 Some(old_suggestions.get(from_row).copied()?.0)
1966 })
1967 .unwrap_or_else(|| {
1968 request
1969 .before_edit
1970 .indent_size_for_line(suggestion.basis_row)
1971 })
1972 .with_delta(suggestion.delta, language_indent_size);
1973 old_suggestions
1974 .insert(new_row, (suggested_indent, suggestion.within_error));
1975 }
1976 }
1977 yield_now().await;
1978 }
1979
1980 // Compute new suggestions for each line, but only include them in the result
1981 // if they differ from the old suggestion for that line.
1982 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1983 let mut language_indent_size = IndentSize::default();
1984 for (row_range, original_indent_column) in row_ranges {
1985 let new_edited_row_range = if request.is_block_mode {
1986 row_range.start..row_range.start + 1
1987 } else {
1988 row_range.clone()
1989 };
1990
1991 let suggestions = snapshot
1992 .suggest_autoindents(new_edited_row_range.clone())
1993 .into_iter()
1994 .flatten();
1995 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1996 if let Some(suggestion) = suggestion {
1997 // Find the indent size based on the language for this row.
1998 while let Some((row, size)) = language_indent_sizes.peek() {
1999 if *row > new_row {
2000 break;
2001 }
2002 language_indent_size = *size;
2003 language_indent_sizes.next();
2004 }
2005
2006 let suggested_indent = indent_sizes
2007 .get(&suggestion.basis_row)
2008 .copied()
2009 .map(|e| e.0)
2010 .unwrap_or_else(|| {
2011 snapshot.indent_size_for_line(suggestion.basis_row)
2012 })
2013 .with_delta(suggestion.delta, language_indent_size);
2014
2015 if old_suggestions.get(&new_row).is_none_or(
2016 |(old_indentation, was_within_error)| {
2017 suggested_indent != *old_indentation
2018 && (!suggestion.within_error || *was_within_error)
2019 },
2020 ) {
2021 indent_sizes.insert(
2022 new_row,
2023 (suggested_indent, request.ignore_empty_lines),
2024 );
2025 }
2026 }
2027 }
2028
2029 if let (true, Some(original_indent_column)) =
2030 (request.is_block_mode, original_indent_column)
2031 {
2032 let new_indent =
2033 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2034 *indent
2035 } else {
2036 snapshot.indent_size_for_line(row_range.start)
2037 };
2038 let delta = new_indent.len as i64 - original_indent_column as i64;
2039 if delta != 0 {
2040 for row in row_range.skip(1) {
2041 indent_sizes.entry(row).or_insert_with(|| {
2042 let mut size = snapshot.indent_size_for_line(row);
2043 if size.kind == new_indent.kind {
2044 match delta.cmp(&0) {
2045 Ordering::Greater => size.len += delta as u32,
2046 Ordering::Less => {
2047 size.len = size.len.saturating_sub(-delta as u32)
2048 }
2049 Ordering::Equal => {}
2050 }
2051 }
2052 (size, request.ignore_empty_lines)
2053 });
2054 }
2055 }
2056 }
2057
2058 yield_now().await;
2059 }
2060 }
2061
2062 indent_sizes
2063 .into_iter()
2064 .filter_map(|(row, (indent, ignore_empty_lines))| {
2065 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2066 None
2067 } else {
2068 Some((row, indent))
2069 }
2070 })
2071 .collect()
2072 })
2073 }
2074
2075 fn apply_autoindents(
2076 &mut self,
2077 indent_sizes: BTreeMap<u32, IndentSize>,
2078 cx: &mut Context<Self>,
2079 ) {
2080 self.autoindent_requests.clear();
2081 for tx in self.wait_for_autoindent_txs.drain(..) {
2082 tx.send(()).ok();
2083 }
2084
2085 let edits: Vec<_> = indent_sizes
2086 .into_iter()
2087 .filter_map(|(row, indent_size)| {
2088 let current_size = indent_size_for_line(self, row);
2089 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2090 })
2091 .collect();
2092
2093 let preserve_preview = self.preserve_preview();
2094 self.edit(edits, None, cx);
2095 if preserve_preview {
2096 self.refresh_preview();
2097 }
2098 }
2099
2100 /// Create a minimal edit that will cause the given row to be indented
2101 /// with the given size. After applying this edit, the length of the line
2102 /// will always be at least `new_size.len`.
2103 pub fn edit_for_indent_size_adjustment(
2104 row: u32,
2105 current_size: IndentSize,
2106 new_size: IndentSize,
2107 ) -> Option<(Range<Point>, String)> {
2108 if new_size.kind == current_size.kind {
2109 match new_size.len.cmp(¤t_size.len) {
2110 Ordering::Greater => {
2111 let point = Point::new(row, 0);
2112 Some((
2113 point..point,
2114 iter::repeat(new_size.char())
2115 .take((new_size.len - current_size.len) as usize)
2116 .collect::<String>(),
2117 ))
2118 }
2119
2120 Ordering::Less => Some((
2121 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2122 String::new(),
2123 )),
2124
2125 Ordering::Equal => None,
2126 }
2127 } else {
2128 Some((
2129 Point::new(row, 0)..Point::new(row, current_size.len),
2130 iter::repeat(new_size.char())
2131 .take(new_size.len as usize)
2132 .collect::<String>(),
2133 ))
2134 }
2135 }
2136
2137 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2138 /// and the given new text.
2139 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2140 let old_text = self.as_rope().clone();
2141 let base_version = self.version();
2142 cx.background_executor()
2143 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2144 let old_text = old_text.to_string();
2145 let line_ending = LineEnding::detect(&new_text);
2146 LineEnding::normalize(&mut new_text);
2147 let edits = text_diff(&old_text, &new_text);
2148 Diff {
2149 base_version,
2150 line_ending,
2151 edits,
2152 }
2153 })
2154 }
2155
2156 /// Spawns a background task that searches the buffer for any whitespace
2157 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2158 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2159 let old_text = self.as_rope().clone();
2160 let line_ending = self.line_ending();
2161 let base_version = self.version();
2162 cx.background_spawn(async move {
2163 let ranges = trailing_whitespace_ranges(&old_text);
2164 let empty = Arc::<str>::from("");
2165 Diff {
2166 base_version,
2167 line_ending,
2168 edits: ranges
2169 .into_iter()
2170 .map(|range| (range, empty.clone()))
2171 .collect(),
2172 }
2173 })
2174 }
2175
2176 /// Ensures that the buffer ends with a single newline character, and
2177 /// no other whitespace. Skips if the buffer is empty.
2178 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2179 let len = self.len();
2180 if len == 0 {
2181 return;
2182 }
2183 let mut offset = len;
2184 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2185 let non_whitespace_len = chunk
2186 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2187 .len();
2188 offset -= chunk.len();
2189 offset += non_whitespace_len;
2190 if non_whitespace_len != 0 {
2191 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2192 return;
2193 }
2194 break;
2195 }
2196 }
2197 self.edit([(offset..len, "\n")], None, cx);
2198 }
2199
2200 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2201 /// calculated, then adjust the diff to account for those changes, and discard any
2202 /// parts of the diff that conflict with those changes.
2203 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2204 let snapshot = self.snapshot();
2205 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2206 let mut delta = 0;
2207 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2208 while let Some(edit_since) = edits_since.peek() {
2209 // If the edit occurs after a diff hunk, then it does not
2210 // affect that hunk.
2211 if edit_since.old.start > range.end {
2212 break;
2213 }
2214 // If the edit precedes the diff hunk, then adjust the hunk
2215 // to reflect the edit.
2216 else if edit_since.old.end < range.start {
2217 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2218 edits_since.next();
2219 }
2220 // If the edit intersects a diff hunk, then discard that hunk.
2221 else {
2222 return None;
2223 }
2224 }
2225
2226 let start = (range.start as i64 + delta) as usize;
2227 let end = (range.end as i64 + delta) as usize;
2228 Some((start..end, new_text))
2229 });
2230
2231 self.start_transaction();
2232 self.text.set_line_ending(diff.line_ending);
2233 self.edit(adjusted_edits, None, cx);
2234 self.end_transaction(cx)
2235 }
2236
2237 pub fn has_unsaved_edits(&self) -> bool {
2238 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2239
2240 if last_version == self.version {
2241 self.has_unsaved_edits
2242 .set((last_version, has_unsaved_edits));
2243 return has_unsaved_edits;
2244 }
2245
2246 let has_edits = self.has_edits_since(&self.saved_version);
2247 self.has_unsaved_edits
2248 .set((self.version.clone(), has_edits));
2249 has_edits
2250 }
2251
2252 /// Checks if the buffer has unsaved changes.
2253 pub fn is_dirty(&self) -> bool {
2254 if self.capability == Capability::ReadOnly {
2255 return false;
2256 }
2257 if self.has_conflict {
2258 return true;
2259 }
2260 match self.file.as_ref().map(|f| f.disk_state()) {
2261 Some(DiskState::New) | Some(DiskState::Deleted) => {
2262 !self.is_empty() && self.has_unsaved_edits()
2263 }
2264 _ => self.has_unsaved_edits(),
2265 }
2266 }
2267
2268 /// Marks the buffer as having a conflict regardless of current buffer state.
2269 pub fn set_conflict(&mut self) {
2270 self.has_conflict = true;
2271 }
2272
2273 /// Checks if the buffer and its file have both changed since the buffer
2274 /// was last saved or reloaded.
2275 pub fn has_conflict(&self) -> bool {
2276 if self.has_conflict {
2277 return true;
2278 }
2279 let Some(file) = self.file.as_ref() else {
2280 return false;
2281 };
2282 match file.disk_state() {
2283 DiskState::New => false,
2284 DiskState::Present { mtime } => match self.saved_mtime {
2285 Some(saved_mtime) => {
2286 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2287 }
2288 None => true,
2289 },
2290 DiskState::Deleted => false,
2291 DiskState::Historic { .. } => false,
2292 }
2293 }
2294
2295 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2296 pub fn subscribe(&mut self) -> Subscription<usize> {
2297 self.text.subscribe()
2298 }
2299
2300 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2301 ///
2302 /// This allows downstream code to check if the buffer's text has changed without
2303 /// waiting for an effect cycle, which would be required if using eents.
2304 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2305 if let Err(ix) = self
2306 .change_bits
2307 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2308 {
2309 self.change_bits.insert(ix, bit);
2310 }
2311 }
2312
2313 /// Set the change bit for all "listeners".
2314 fn was_changed(&mut self) {
2315 self.change_bits.retain(|change_bit| {
2316 change_bit
2317 .upgrade()
2318 .inspect(|bit| {
2319 _ = bit.replace(true);
2320 })
2321 .is_some()
2322 });
2323 }
2324
2325 /// Starts a transaction, if one is not already in-progress. When undoing or
2326 /// redoing edits, all of the edits performed within a transaction are undone
2327 /// or redone together.
2328 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2329 self.start_transaction_at(Instant::now())
2330 }
2331
2332 /// Starts a transaction, providing the current time. Subsequent transactions
2333 /// that occur within a short period of time will be grouped together. This
2334 /// is controlled by the buffer's undo grouping duration.
2335 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2336 self.transaction_depth += 1;
2337 if self.was_dirty_before_starting_transaction.is_none() {
2338 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2339 }
2340 self.text.start_transaction_at(now)
2341 }
2342
2343 /// Terminates the current transaction, if this is the outermost transaction.
2344 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2345 self.end_transaction_at(Instant::now(), cx)
2346 }
2347
2348 /// Terminates the current transaction, providing the current time. Subsequent transactions
2349 /// that occur within a short period of time will be grouped together. This
2350 /// is controlled by the buffer's undo grouping duration.
2351 pub fn end_transaction_at(
2352 &mut self,
2353 now: Instant,
2354 cx: &mut Context<Self>,
2355 ) -> Option<TransactionId> {
2356 assert!(self.transaction_depth > 0);
2357 self.transaction_depth -= 1;
2358 let was_dirty = if self.transaction_depth == 0 {
2359 self.was_dirty_before_starting_transaction.take().unwrap()
2360 } else {
2361 false
2362 };
2363 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2364 self.did_edit(&start_version, was_dirty, cx);
2365 Some(transaction_id)
2366 } else {
2367 None
2368 }
2369 }
2370
2371 /// Manually add a transaction to the buffer's undo history.
2372 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2373 self.text.push_transaction(transaction, now);
2374 }
2375
2376 /// Differs from `push_transaction` in that it does not clear the redo
2377 /// stack. Intended to be used to create a parent transaction to merge
2378 /// potential child transactions into.
2379 ///
2380 /// The caller is responsible for removing it from the undo history using
2381 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2382 /// are merged into this transaction, the caller is responsible for ensuring
2383 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2384 /// cleared is to create transactions with the usual `start_transaction` and
2385 /// `end_transaction` methods and merging the resulting transactions into
2386 /// the transaction created by this method
2387 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2388 self.text.push_empty_transaction(now)
2389 }
2390
2391 /// Prevent the last transaction from being grouped with any subsequent transactions,
2392 /// even if they occur with the buffer's undo grouping duration.
2393 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2394 self.text.finalize_last_transaction()
2395 }
2396
2397 /// Manually group all changes since a given transaction.
2398 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2399 self.text.group_until_transaction(transaction_id);
2400 }
2401
2402 /// Manually remove a transaction from the buffer's undo history
2403 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2404 self.text.forget_transaction(transaction_id)
2405 }
2406
2407 /// Retrieve a transaction from the buffer's undo history
2408 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2409 self.text.get_transaction(transaction_id)
2410 }
2411
2412 /// Manually merge two transactions in the buffer's undo history.
2413 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2414 self.text.merge_transactions(transaction, destination);
2415 }
2416
2417 /// Waits for the buffer to receive operations with the given timestamps.
2418 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2419 &mut self,
2420 edit_ids: It,
2421 ) -> impl Future<Output = Result<()>> + use<It> {
2422 self.text.wait_for_edits(edit_ids)
2423 }
2424
2425 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2426 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2427 &mut self,
2428 anchors: It,
2429 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2430 self.text.wait_for_anchors(anchors)
2431 }
2432
2433 /// Waits for the buffer to receive operations up to the given version.
2434 pub fn wait_for_version(
2435 &mut self,
2436 version: clock::Global,
2437 ) -> impl Future<Output = Result<()>> + use<> {
2438 self.text.wait_for_version(version)
2439 }
2440
2441 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2442 /// [`Buffer::wait_for_version`] to resolve with an error.
2443 pub fn give_up_waiting(&mut self) {
2444 self.text.give_up_waiting();
2445 }
2446
2447 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2448 let mut rx = None;
2449 if !self.autoindent_requests.is_empty() {
2450 let channel = oneshot::channel();
2451 self.wait_for_autoindent_txs.push(channel.0);
2452 rx = Some(channel.1);
2453 }
2454 rx
2455 }
2456
2457 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2458 pub fn set_active_selections(
2459 &mut self,
2460 selections: Arc<[Selection<Anchor>]>,
2461 line_mode: bool,
2462 cursor_shape: CursorShape,
2463 cx: &mut Context<Self>,
2464 ) {
2465 let lamport_timestamp = self.text.lamport_clock.tick();
2466 self.remote_selections.insert(
2467 self.text.replica_id(),
2468 SelectionSet {
2469 selections: selections.clone(),
2470 lamport_timestamp,
2471 line_mode,
2472 cursor_shape,
2473 },
2474 );
2475 self.send_operation(
2476 Operation::UpdateSelections {
2477 selections,
2478 line_mode,
2479 lamport_timestamp,
2480 cursor_shape,
2481 },
2482 true,
2483 cx,
2484 );
2485 self.non_text_state_update_count += 1;
2486 cx.notify();
2487 }
2488
2489 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2490 /// this replica.
2491 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2492 if self
2493 .remote_selections
2494 .get(&self.text.replica_id())
2495 .is_none_or(|set| !set.selections.is_empty())
2496 {
2497 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2498 }
2499 }
2500
2501 pub fn set_agent_selections(
2502 &mut self,
2503 selections: Arc<[Selection<Anchor>]>,
2504 line_mode: bool,
2505 cursor_shape: CursorShape,
2506 cx: &mut Context<Self>,
2507 ) {
2508 let lamport_timestamp = self.text.lamport_clock.tick();
2509 self.remote_selections.insert(
2510 ReplicaId::AGENT,
2511 SelectionSet {
2512 selections,
2513 lamport_timestamp,
2514 line_mode,
2515 cursor_shape,
2516 },
2517 );
2518 self.non_text_state_update_count += 1;
2519 cx.notify();
2520 }
2521
2522 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2523 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2524 }
2525
2526 /// Replaces the buffer's entire text.
2527 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2528 where
2529 T: Into<Arc<str>>,
2530 {
2531 self.autoindent_requests.clear();
2532 self.edit([(0..self.len(), text)], None, cx)
2533 }
2534
2535 /// Appends the given text to the end of the buffer.
2536 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2537 where
2538 T: Into<Arc<str>>,
2539 {
2540 self.edit([(self.len()..self.len(), text)], None, cx)
2541 }
2542
2543 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2544 /// delete, and a string of text to insert at that location.
2545 ///
2546 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2547 /// request for the edited ranges, which will be processed when the buffer finishes
2548 /// parsing.
2549 ///
2550 /// Parsing takes place at the end of a transaction, and may compute synchronously
2551 /// or asynchronously, depending on the changes.
2552 pub fn edit<I, S, T>(
2553 &mut self,
2554 edits_iter: I,
2555 autoindent_mode: Option<AutoindentMode>,
2556 cx: &mut Context<Self>,
2557 ) -> Option<clock::Lamport>
2558 where
2559 I: IntoIterator<Item = (Range<S>, T)>,
2560 S: ToOffset,
2561 T: Into<Arc<str>>,
2562 {
2563 // Skip invalid edits and coalesce contiguous ones.
2564 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2565
2566 for (range, new_text) in edits_iter {
2567 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2568
2569 if range.start > range.end {
2570 mem::swap(&mut range.start, &mut range.end);
2571 }
2572 let new_text = new_text.into();
2573 if !new_text.is_empty() || !range.is_empty() {
2574 if let Some((prev_range, prev_text)) = edits.last_mut()
2575 && prev_range.end >= range.start
2576 {
2577 prev_range.end = cmp::max(prev_range.end, range.end);
2578 *prev_text = format!("{prev_text}{new_text}").into();
2579 } else {
2580 edits.push((range, new_text));
2581 }
2582 }
2583 }
2584 if edits.is_empty() {
2585 return None;
2586 }
2587
2588 self.start_transaction();
2589 self.pending_autoindent.take();
2590 let autoindent_request = autoindent_mode
2591 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2592
2593 let edit_operation = self.text.edit(edits.iter().cloned());
2594 let edit_id = edit_operation.timestamp();
2595
2596 if let Some((before_edit, mode)) = autoindent_request {
2597 let mut delta = 0isize;
2598 let mut previous_setting = None;
2599 let entries: Vec<_> = edits
2600 .into_iter()
2601 .enumerate()
2602 .zip(&edit_operation.as_edit().unwrap().new_text)
2603 .filter(|((_, (range, _)), _)| {
2604 let language = before_edit.language_at(range.start);
2605 let language_id = language.map(|l| l.id());
2606 if let Some((cached_language_id, auto_indent)) = previous_setting
2607 && cached_language_id == language_id
2608 {
2609 auto_indent
2610 } else {
2611 // The auto-indent setting is not present in editorconfigs, hence
2612 // we can avoid passing the file here.
2613 let auto_indent =
2614 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2615 previous_setting = Some((language_id, auto_indent));
2616 auto_indent
2617 }
2618 })
2619 .map(|((ix, (range, _)), new_text)| {
2620 let new_text_length = new_text.len();
2621 let old_start = range.start.to_point(&before_edit);
2622 let new_start = (delta + range.start as isize) as usize;
2623 let range_len = range.end - range.start;
2624 delta += new_text_length as isize - range_len as isize;
2625
2626 // Decide what range of the insertion to auto-indent, and whether
2627 // the first line of the insertion should be considered a newly-inserted line
2628 // or an edit to an existing line.
2629 let mut range_of_insertion_to_indent = 0..new_text_length;
2630 let mut first_line_is_new = true;
2631
2632 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2633 let old_line_end = before_edit.line_len(old_start.row);
2634
2635 if old_start.column > old_line_start {
2636 first_line_is_new = false;
2637 }
2638
2639 if !new_text.contains('\n')
2640 && (old_start.column + (range_len as u32) < old_line_end
2641 || old_line_end == old_line_start)
2642 {
2643 first_line_is_new = false;
2644 }
2645
2646 // When inserting text starting with a newline, avoid auto-indenting the
2647 // previous line.
2648 if new_text.starts_with('\n') {
2649 range_of_insertion_to_indent.start += 1;
2650 first_line_is_new = true;
2651 }
2652
2653 let mut original_indent_column = None;
2654 if let AutoindentMode::Block {
2655 original_indent_columns,
2656 } = &mode
2657 {
2658 original_indent_column = Some(if new_text.starts_with('\n') {
2659 indent_size_for_text(
2660 new_text[range_of_insertion_to_indent.clone()].chars(),
2661 )
2662 .len
2663 } else {
2664 original_indent_columns
2665 .get(ix)
2666 .copied()
2667 .flatten()
2668 .unwrap_or_else(|| {
2669 indent_size_for_text(
2670 new_text[range_of_insertion_to_indent.clone()].chars(),
2671 )
2672 .len
2673 })
2674 });
2675
2676 // Avoid auto-indenting the line after the edit.
2677 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2678 range_of_insertion_to_indent.end -= 1;
2679 }
2680 }
2681
2682 AutoindentRequestEntry {
2683 first_line_is_new,
2684 original_indent_column,
2685 indent_size: before_edit.language_indent_size_at(range.start, cx),
2686 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2687 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2688 }
2689 })
2690 .collect();
2691
2692 if !entries.is_empty() {
2693 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2694 before_edit,
2695 entries,
2696 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2697 ignore_empty_lines: false,
2698 }));
2699 }
2700 }
2701
2702 self.end_transaction(cx);
2703 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2704 Some(edit_id)
2705 }
2706
2707 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2708 self.was_changed();
2709
2710 if self.edits_since::<usize>(old_version).next().is_none() {
2711 return;
2712 }
2713
2714 self.reparse(cx, true);
2715 cx.emit(BufferEvent::Edited);
2716 if was_dirty != self.is_dirty() {
2717 cx.emit(BufferEvent::DirtyChanged);
2718 }
2719 cx.notify();
2720 }
2721
2722 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2723 where
2724 I: IntoIterator<Item = Range<T>>,
2725 T: ToOffset + Copy,
2726 {
2727 let before_edit = self.snapshot();
2728 let entries = ranges
2729 .into_iter()
2730 .map(|range| AutoindentRequestEntry {
2731 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2732 first_line_is_new: true,
2733 indent_size: before_edit.language_indent_size_at(range.start, cx),
2734 original_indent_column: None,
2735 })
2736 .collect();
2737 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2738 before_edit,
2739 entries,
2740 is_block_mode: false,
2741 ignore_empty_lines: true,
2742 }));
2743 self.request_autoindent(cx);
2744 }
2745
2746 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2747 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2748 pub fn insert_empty_line(
2749 &mut self,
2750 position: impl ToPoint,
2751 space_above: bool,
2752 space_below: bool,
2753 cx: &mut Context<Self>,
2754 ) -> Point {
2755 let mut position = position.to_point(self);
2756
2757 self.start_transaction();
2758
2759 self.edit(
2760 [(position..position, "\n")],
2761 Some(AutoindentMode::EachLine),
2762 cx,
2763 );
2764
2765 if position.column > 0 {
2766 position += Point::new(1, 0);
2767 }
2768
2769 if !self.is_line_blank(position.row) {
2770 self.edit(
2771 [(position..position, "\n")],
2772 Some(AutoindentMode::EachLine),
2773 cx,
2774 );
2775 }
2776
2777 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2778 self.edit(
2779 [(position..position, "\n")],
2780 Some(AutoindentMode::EachLine),
2781 cx,
2782 );
2783 position.row += 1;
2784 }
2785
2786 if space_below
2787 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2788 {
2789 self.edit(
2790 [(position..position, "\n")],
2791 Some(AutoindentMode::EachLine),
2792 cx,
2793 );
2794 }
2795
2796 self.end_transaction(cx);
2797
2798 position
2799 }
2800
2801 /// Applies the given remote operations to the buffer.
2802 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2803 self.pending_autoindent.take();
2804 let was_dirty = self.is_dirty();
2805 let old_version = self.version.clone();
2806 let mut deferred_ops = Vec::new();
2807 let buffer_ops = ops
2808 .into_iter()
2809 .filter_map(|op| match op {
2810 Operation::Buffer(op) => Some(op),
2811 _ => {
2812 if self.can_apply_op(&op) {
2813 self.apply_op(op, cx);
2814 } else {
2815 deferred_ops.push(op);
2816 }
2817 None
2818 }
2819 })
2820 .collect::<Vec<_>>();
2821 for operation in buffer_ops.iter() {
2822 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2823 }
2824 self.text.apply_ops(buffer_ops);
2825 self.deferred_ops.insert(deferred_ops);
2826 self.flush_deferred_ops(cx);
2827 self.did_edit(&old_version, was_dirty, cx);
2828 // Notify independently of whether the buffer was edited as the operations could include a
2829 // selection update.
2830 cx.notify();
2831 }
2832
2833 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2834 let mut deferred_ops = Vec::new();
2835 for op in self.deferred_ops.drain().iter().cloned() {
2836 if self.can_apply_op(&op) {
2837 self.apply_op(op, cx);
2838 } else {
2839 deferred_ops.push(op);
2840 }
2841 }
2842 self.deferred_ops.insert(deferred_ops);
2843 }
2844
2845 pub fn has_deferred_ops(&self) -> bool {
2846 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2847 }
2848
2849 fn can_apply_op(&self, operation: &Operation) -> bool {
2850 match operation {
2851 Operation::Buffer(_) => {
2852 unreachable!("buffer operations should never be applied at this layer")
2853 }
2854 Operation::UpdateDiagnostics {
2855 diagnostics: diagnostic_set,
2856 ..
2857 } => diagnostic_set.iter().all(|diagnostic| {
2858 self.text.can_resolve(&diagnostic.range.start)
2859 && self.text.can_resolve(&diagnostic.range.end)
2860 }),
2861 Operation::UpdateSelections { selections, .. } => selections
2862 .iter()
2863 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2864 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2865 }
2866 }
2867
2868 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2869 match operation {
2870 Operation::Buffer(_) => {
2871 unreachable!("buffer operations should never be applied at this layer")
2872 }
2873 Operation::UpdateDiagnostics {
2874 server_id,
2875 diagnostics: diagnostic_set,
2876 lamport_timestamp,
2877 } => {
2878 let snapshot = self.snapshot();
2879 self.apply_diagnostic_update(
2880 server_id,
2881 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2882 lamport_timestamp,
2883 cx,
2884 );
2885 }
2886 Operation::UpdateSelections {
2887 selections,
2888 lamport_timestamp,
2889 line_mode,
2890 cursor_shape,
2891 } => {
2892 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2893 && set.lamport_timestamp > lamport_timestamp
2894 {
2895 return;
2896 }
2897
2898 self.remote_selections.insert(
2899 lamport_timestamp.replica_id,
2900 SelectionSet {
2901 selections,
2902 lamport_timestamp,
2903 line_mode,
2904 cursor_shape,
2905 },
2906 );
2907 self.text.lamport_clock.observe(lamport_timestamp);
2908 self.non_text_state_update_count += 1;
2909 }
2910 Operation::UpdateCompletionTriggers {
2911 triggers,
2912 lamport_timestamp,
2913 server_id,
2914 } => {
2915 if triggers.is_empty() {
2916 self.completion_triggers_per_language_server
2917 .remove(&server_id);
2918 self.completion_triggers = self
2919 .completion_triggers_per_language_server
2920 .values()
2921 .flat_map(|triggers| triggers.iter().cloned())
2922 .collect();
2923 } else {
2924 self.completion_triggers_per_language_server
2925 .insert(server_id, triggers.iter().cloned().collect());
2926 self.completion_triggers.extend(triggers);
2927 }
2928 self.text.lamport_clock.observe(lamport_timestamp);
2929 }
2930 Operation::UpdateLineEnding {
2931 line_ending,
2932 lamport_timestamp,
2933 } => {
2934 self.text.set_line_ending(line_ending);
2935 self.text.lamport_clock.observe(lamport_timestamp);
2936 }
2937 }
2938 }
2939
2940 fn apply_diagnostic_update(
2941 &mut self,
2942 server_id: LanguageServerId,
2943 diagnostics: DiagnosticSet,
2944 lamport_timestamp: clock::Lamport,
2945 cx: &mut Context<Self>,
2946 ) {
2947 if lamport_timestamp > self.diagnostics_timestamp {
2948 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2949 if diagnostics.is_empty() {
2950 if let Ok(ix) = ix {
2951 self.diagnostics.remove(ix);
2952 }
2953 } else {
2954 match ix {
2955 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2956 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2957 };
2958 }
2959 self.diagnostics_timestamp = lamport_timestamp;
2960 self.non_text_state_update_count += 1;
2961 self.text.lamport_clock.observe(lamport_timestamp);
2962 cx.notify();
2963 cx.emit(BufferEvent::DiagnosticsUpdated);
2964 }
2965 }
2966
2967 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2968 self.was_changed();
2969 cx.emit(BufferEvent::Operation {
2970 operation,
2971 is_local,
2972 });
2973 }
2974
2975 /// Removes the selections for a given peer.
2976 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2977 self.remote_selections.remove(&replica_id);
2978 cx.notify();
2979 }
2980
2981 /// Undoes the most recent transaction.
2982 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2983 let was_dirty = self.is_dirty();
2984 let old_version = self.version.clone();
2985
2986 if let Some((transaction_id, operation)) = self.text.undo() {
2987 self.send_operation(Operation::Buffer(operation), true, cx);
2988 self.did_edit(&old_version, was_dirty, cx);
2989 Some(transaction_id)
2990 } else {
2991 None
2992 }
2993 }
2994
2995 /// Manually undoes a specific transaction in the buffer's undo history.
2996 pub fn undo_transaction(
2997 &mut self,
2998 transaction_id: TransactionId,
2999 cx: &mut Context<Self>,
3000 ) -> bool {
3001 let was_dirty = self.is_dirty();
3002 let old_version = self.version.clone();
3003 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3004 self.send_operation(Operation::Buffer(operation), true, cx);
3005 self.did_edit(&old_version, was_dirty, cx);
3006 true
3007 } else {
3008 false
3009 }
3010 }
3011
3012 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3013 pub fn undo_to_transaction(
3014 &mut self,
3015 transaction_id: TransactionId,
3016 cx: &mut Context<Self>,
3017 ) -> bool {
3018 let was_dirty = self.is_dirty();
3019 let old_version = self.version.clone();
3020
3021 let operations = self.text.undo_to_transaction(transaction_id);
3022 let undone = !operations.is_empty();
3023 for operation in operations {
3024 self.send_operation(Operation::Buffer(operation), true, cx);
3025 }
3026 if undone {
3027 self.did_edit(&old_version, was_dirty, cx)
3028 }
3029 undone
3030 }
3031
3032 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3033 let was_dirty = self.is_dirty();
3034 let operation = self.text.undo_operations(counts);
3035 let old_version = self.version.clone();
3036 self.send_operation(Operation::Buffer(operation), true, cx);
3037 self.did_edit(&old_version, was_dirty, cx);
3038 }
3039
3040 /// Manually redoes a specific transaction in the buffer's redo history.
3041 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3042 let was_dirty = self.is_dirty();
3043 let old_version = self.version.clone();
3044
3045 if let Some((transaction_id, operation)) = self.text.redo() {
3046 self.send_operation(Operation::Buffer(operation), true, cx);
3047 self.did_edit(&old_version, was_dirty, cx);
3048 Some(transaction_id)
3049 } else {
3050 None
3051 }
3052 }
3053
3054 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3055 pub fn redo_to_transaction(
3056 &mut self,
3057 transaction_id: TransactionId,
3058 cx: &mut Context<Self>,
3059 ) -> bool {
3060 let was_dirty = self.is_dirty();
3061 let old_version = self.version.clone();
3062
3063 let operations = self.text.redo_to_transaction(transaction_id);
3064 let redone = !operations.is_empty();
3065 for operation in operations {
3066 self.send_operation(Operation::Buffer(operation), true, cx);
3067 }
3068 if redone {
3069 self.did_edit(&old_version, was_dirty, cx)
3070 }
3071 redone
3072 }
3073
3074 /// Override current completion triggers with the user-provided completion triggers.
3075 pub fn set_completion_triggers(
3076 &mut self,
3077 server_id: LanguageServerId,
3078 triggers: BTreeSet<String>,
3079 cx: &mut Context<Self>,
3080 ) {
3081 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3082 if triggers.is_empty() {
3083 self.completion_triggers_per_language_server
3084 .remove(&server_id);
3085 self.completion_triggers = self
3086 .completion_triggers_per_language_server
3087 .values()
3088 .flat_map(|triggers| triggers.iter().cloned())
3089 .collect();
3090 } else {
3091 self.completion_triggers_per_language_server
3092 .insert(server_id, triggers.clone());
3093 self.completion_triggers.extend(triggers.iter().cloned());
3094 }
3095 self.send_operation(
3096 Operation::UpdateCompletionTriggers {
3097 triggers: triggers.into_iter().collect(),
3098 lamport_timestamp: self.completion_triggers_timestamp,
3099 server_id,
3100 },
3101 true,
3102 cx,
3103 );
3104 cx.notify();
3105 }
3106
3107 /// Returns a list of strings which trigger a completion menu for this language.
3108 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3109 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3110 &self.completion_triggers
3111 }
3112
3113 /// Call this directly after performing edits to prevent the preview tab
3114 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3115 /// to return false until there are additional edits.
3116 pub fn refresh_preview(&mut self) {
3117 self.preview_version = self.version.clone();
3118 }
3119
3120 /// Whether we should preserve the preview status of a tab containing this buffer.
3121 pub fn preserve_preview(&self) -> bool {
3122 !self.has_edits_since(&self.preview_version)
3123 }
3124}
3125
3126#[doc(hidden)]
3127#[cfg(any(test, feature = "test-support"))]
3128impl Buffer {
3129 pub fn edit_via_marked_text(
3130 &mut self,
3131 marked_string: &str,
3132 autoindent_mode: Option<AutoindentMode>,
3133 cx: &mut Context<Self>,
3134 ) {
3135 let edits = self.edits_for_marked_text(marked_string);
3136 self.edit(edits, autoindent_mode, cx);
3137 }
3138
3139 pub fn set_group_interval(&mut self, group_interval: Duration) {
3140 self.text.set_group_interval(group_interval);
3141 }
3142
3143 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3144 where
3145 T: rand::Rng,
3146 {
3147 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3148 let mut last_end = None;
3149 for _ in 0..old_range_count {
3150 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3151 break;
3152 }
3153
3154 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3155 let mut range = self.random_byte_range(new_start, rng);
3156 if rng.random_bool(0.2) {
3157 mem::swap(&mut range.start, &mut range.end);
3158 }
3159 last_end = Some(range.end);
3160
3161 let new_text_len = rng.random_range(0..10);
3162 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3163 new_text = new_text.to_uppercase();
3164
3165 edits.push((range, new_text));
3166 }
3167 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3168 self.edit(edits, None, cx);
3169 }
3170
3171 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3172 let was_dirty = self.is_dirty();
3173 let old_version = self.version.clone();
3174
3175 let ops = self.text.randomly_undo_redo(rng);
3176 if !ops.is_empty() {
3177 for op in ops {
3178 self.send_operation(Operation::Buffer(op), true, cx);
3179 self.did_edit(&old_version, was_dirty, cx);
3180 }
3181 }
3182 }
3183}
3184
3185impl EventEmitter<BufferEvent> for Buffer {}
3186
3187impl Deref for Buffer {
3188 type Target = TextBuffer;
3189
3190 fn deref(&self) -> &Self::Target {
3191 &self.text
3192 }
3193}
3194
3195impl BufferSnapshot {
3196 /// Returns [`IndentSize`] for a given line that respects user settings and
3197 /// language preferences.
3198 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3199 indent_size_for_line(self, row)
3200 }
3201
3202 /// Returns [`IndentSize`] for a given position that respects user settings
3203 /// and language preferences.
3204 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3205 let settings = language_settings(
3206 self.language_at(position).map(|l| l.name()),
3207 self.file(),
3208 cx,
3209 );
3210 if settings.hard_tabs {
3211 IndentSize::tab()
3212 } else {
3213 IndentSize::spaces(settings.tab_size.get())
3214 }
3215 }
3216
3217 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3218 /// is passed in as `single_indent_size`.
3219 pub fn suggested_indents(
3220 &self,
3221 rows: impl Iterator<Item = u32>,
3222 single_indent_size: IndentSize,
3223 ) -> BTreeMap<u32, IndentSize> {
3224 let mut result = BTreeMap::new();
3225
3226 for row_range in contiguous_ranges(rows, 10) {
3227 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3228 Some(suggestions) => suggestions,
3229 _ => break,
3230 };
3231
3232 for (row, suggestion) in row_range.zip(suggestions) {
3233 let indent_size = if let Some(suggestion) = suggestion {
3234 result
3235 .get(&suggestion.basis_row)
3236 .copied()
3237 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3238 .with_delta(suggestion.delta, single_indent_size)
3239 } else {
3240 self.indent_size_for_line(row)
3241 };
3242
3243 result.insert(row, indent_size);
3244 }
3245 }
3246
3247 result
3248 }
3249
3250 fn suggest_autoindents(
3251 &self,
3252 row_range: Range<u32>,
3253 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3254 let config = &self.language.as_ref()?.config;
3255 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3256
3257 #[derive(Debug, Clone)]
3258 struct StartPosition {
3259 start: Point,
3260 suffix: SharedString,
3261 language: Arc<Language>,
3262 }
3263
3264 // Find the suggested indentation ranges based on the syntax tree.
3265 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3266 let end = Point::new(row_range.end, 0);
3267 let range = (start..end).to_offset(&self.text);
3268 let mut matches = self.syntax.matches_with_options(
3269 range.clone(),
3270 &self.text,
3271 TreeSitterOptions {
3272 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3273 max_start_depth: None,
3274 },
3275 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3276 );
3277 let indent_configs = matches
3278 .grammars()
3279 .iter()
3280 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3281 .collect::<Vec<_>>();
3282
3283 let mut indent_ranges = Vec::<Range<Point>>::new();
3284 let mut start_positions = Vec::<StartPosition>::new();
3285 let mut outdent_positions = Vec::<Point>::new();
3286 while let Some(mat) = matches.peek() {
3287 let mut start: Option<Point> = None;
3288 let mut end: Option<Point> = None;
3289
3290 let config = indent_configs[mat.grammar_index];
3291 for capture in mat.captures {
3292 if capture.index == config.indent_capture_ix {
3293 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3294 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3295 } else if Some(capture.index) == config.start_capture_ix {
3296 start = Some(Point::from_ts_point(capture.node.end_position()));
3297 } else if Some(capture.index) == config.end_capture_ix {
3298 end = Some(Point::from_ts_point(capture.node.start_position()));
3299 } else if Some(capture.index) == config.outdent_capture_ix {
3300 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3301 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3302 start_positions.push(StartPosition {
3303 start: Point::from_ts_point(capture.node.start_position()),
3304 suffix: suffix.clone(),
3305 language: mat.language.clone(),
3306 });
3307 }
3308 }
3309
3310 matches.advance();
3311 if let Some((start, end)) = start.zip(end) {
3312 if start.row == end.row {
3313 continue;
3314 }
3315 let range = start..end;
3316 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3317 Err(ix) => indent_ranges.insert(ix, range),
3318 Ok(ix) => {
3319 let prev_range = &mut indent_ranges[ix];
3320 prev_range.end = prev_range.end.max(range.end);
3321 }
3322 }
3323 }
3324 }
3325
3326 let mut error_ranges = Vec::<Range<Point>>::new();
3327 let mut matches = self
3328 .syntax
3329 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3330 while let Some(mat) = matches.peek() {
3331 let node = mat.captures[0].node;
3332 let start = Point::from_ts_point(node.start_position());
3333 let end = Point::from_ts_point(node.end_position());
3334 let range = start..end;
3335 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3336 Ok(ix) | Err(ix) => ix,
3337 };
3338 let mut end_ix = ix;
3339 while let Some(existing_range) = error_ranges.get(end_ix) {
3340 if existing_range.end < end {
3341 end_ix += 1;
3342 } else {
3343 break;
3344 }
3345 }
3346 error_ranges.splice(ix..end_ix, [range]);
3347 matches.advance();
3348 }
3349
3350 outdent_positions.sort();
3351 for outdent_position in outdent_positions {
3352 // find the innermost indent range containing this outdent_position
3353 // set its end to the outdent position
3354 if let Some(range_to_truncate) = indent_ranges
3355 .iter_mut()
3356 .rfind(|indent_range| indent_range.contains(&outdent_position))
3357 {
3358 range_to_truncate.end = outdent_position;
3359 }
3360 }
3361
3362 start_positions.sort_by_key(|b| b.start);
3363
3364 // Find the suggested indentation increases and decreased based on regexes.
3365 let mut regex_outdent_map = HashMap::default();
3366 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3367 let mut start_positions_iter = start_positions.iter().peekable();
3368
3369 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3370 self.for_each_line(
3371 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3372 ..Point::new(row_range.end, 0),
3373 |row, line| {
3374 let indent_len = self.indent_size_for_line(row).len;
3375 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3376 let row_language_config = row_language
3377 .as_ref()
3378 .map(|lang| lang.config())
3379 .unwrap_or(config);
3380
3381 if row_language_config
3382 .decrease_indent_pattern
3383 .as_ref()
3384 .is_some_and(|regex| regex.is_match(line))
3385 {
3386 indent_change_rows.push((row, Ordering::Less));
3387 }
3388 if row_language_config
3389 .increase_indent_pattern
3390 .as_ref()
3391 .is_some_and(|regex| regex.is_match(line))
3392 {
3393 indent_change_rows.push((row + 1, Ordering::Greater));
3394 }
3395 while let Some(pos) = start_positions_iter.peek() {
3396 if pos.start.row < row {
3397 let pos = start_positions_iter.next().unwrap().clone();
3398 last_seen_suffix
3399 .entry(pos.suffix.to_string())
3400 .or_default()
3401 .push(pos);
3402 } else {
3403 break;
3404 }
3405 }
3406 for rule in &row_language_config.decrease_indent_patterns {
3407 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3408 let row_start_column = self.indent_size_for_line(row).len;
3409 let basis_row = rule
3410 .valid_after
3411 .iter()
3412 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3413 .flatten()
3414 .filter(|pos| {
3415 row_language
3416 .as_ref()
3417 .or(self.language.as_ref())
3418 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3419 })
3420 .filter(|pos| pos.start.column <= row_start_column)
3421 .max_by_key(|pos| pos.start.row);
3422 if let Some(outdent_to) = basis_row {
3423 regex_outdent_map.insert(row, outdent_to.start.row);
3424 }
3425 break;
3426 }
3427 }
3428 },
3429 );
3430
3431 let mut indent_changes = indent_change_rows.into_iter().peekable();
3432 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3433 prev_non_blank_row.unwrap_or(0)
3434 } else {
3435 row_range.start.saturating_sub(1)
3436 };
3437
3438 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3439 Some(row_range.map(move |row| {
3440 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3441
3442 let mut indent_from_prev_row = false;
3443 let mut outdent_from_prev_row = false;
3444 let mut outdent_to_row = u32::MAX;
3445 let mut from_regex = false;
3446
3447 while let Some((indent_row, delta)) = indent_changes.peek() {
3448 match indent_row.cmp(&row) {
3449 Ordering::Equal => match delta {
3450 Ordering::Less => {
3451 from_regex = true;
3452 outdent_from_prev_row = true
3453 }
3454 Ordering::Greater => {
3455 indent_from_prev_row = true;
3456 from_regex = true
3457 }
3458 _ => {}
3459 },
3460
3461 Ordering::Greater => break,
3462 Ordering::Less => {}
3463 }
3464
3465 indent_changes.next();
3466 }
3467
3468 for range in &indent_ranges {
3469 if range.start.row >= row {
3470 break;
3471 }
3472 if range.start.row == prev_row && range.end > row_start {
3473 indent_from_prev_row = true;
3474 }
3475 if range.end > prev_row_start && range.end <= row_start {
3476 outdent_to_row = outdent_to_row.min(range.start.row);
3477 }
3478 }
3479
3480 if let Some(basis_row) = regex_outdent_map.get(&row) {
3481 indent_from_prev_row = false;
3482 outdent_to_row = *basis_row;
3483 from_regex = true;
3484 }
3485
3486 let within_error = error_ranges
3487 .iter()
3488 .any(|e| e.start.row < row && e.end > row_start);
3489
3490 let suggestion = if outdent_to_row == prev_row
3491 || (outdent_from_prev_row && indent_from_prev_row)
3492 {
3493 Some(IndentSuggestion {
3494 basis_row: prev_row,
3495 delta: Ordering::Equal,
3496 within_error: within_error && !from_regex,
3497 })
3498 } else if indent_from_prev_row {
3499 Some(IndentSuggestion {
3500 basis_row: prev_row,
3501 delta: Ordering::Greater,
3502 within_error: within_error && !from_regex,
3503 })
3504 } else if outdent_to_row < prev_row {
3505 Some(IndentSuggestion {
3506 basis_row: outdent_to_row,
3507 delta: Ordering::Equal,
3508 within_error: within_error && !from_regex,
3509 })
3510 } else if outdent_from_prev_row {
3511 Some(IndentSuggestion {
3512 basis_row: prev_row,
3513 delta: Ordering::Less,
3514 within_error: within_error && !from_regex,
3515 })
3516 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3517 {
3518 Some(IndentSuggestion {
3519 basis_row: prev_row,
3520 delta: Ordering::Equal,
3521 within_error: within_error && !from_regex,
3522 })
3523 } else {
3524 None
3525 };
3526
3527 prev_row = row;
3528 prev_row_start = row_start;
3529 suggestion
3530 }))
3531 }
3532
3533 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3534 while row > 0 {
3535 row -= 1;
3536 if !self.is_line_blank(row) {
3537 return Some(row);
3538 }
3539 }
3540 None
3541 }
3542
3543 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3544 let captures = self.syntax.captures(range, &self.text, |grammar| {
3545 grammar
3546 .highlights_config
3547 .as_ref()
3548 .map(|config| &config.query)
3549 });
3550 let highlight_maps = captures
3551 .grammars()
3552 .iter()
3553 .map(|grammar| grammar.highlight_map())
3554 .collect();
3555 (captures, highlight_maps)
3556 }
3557
3558 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3559 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3560 /// returned in chunks where each chunk has a single syntax highlighting style and
3561 /// diagnostic status.
3562 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3563 let range = range.start.to_offset(self)..range.end.to_offset(self);
3564
3565 let mut syntax = None;
3566 if language_aware {
3567 syntax = Some(self.get_highlights(range.clone()));
3568 }
3569 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3570 let diagnostics = language_aware;
3571 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3572 }
3573
3574 pub fn highlighted_text_for_range<T: ToOffset>(
3575 &self,
3576 range: Range<T>,
3577 override_style: Option<HighlightStyle>,
3578 syntax_theme: &SyntaxTheme,
3579 ) -> HighlightedText {
3580 HighlightedText::from_buffer_range(
3581 range,
3582 &self.text,
3583 &self.syntax,
3584 override_style,
3585 syntax_theme,
3586 )
3587 }
3588
3589 /// Invokes the given callback for each line of text in the given range of the buffer.
3590 /// Uses callback to avoid allocating a string for each line.
3591 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3592 let mut line = String::new();
3593 let mut row = range.start.row;
3594 for chunk in self
3595 .as_rope()
3596 .chunks_in_range(range.to_offset(self))
3597 .chain(["\n"])
3598 {
3599 for (newline_ix, text) in chunk.split('\n').enumerate() {
3600 if newline_ix > 0 {
3601 callback(row, &line);
3602 row += 1;
3603 line.clear();
3604 }
3605 line.push_str(text);
3606 }
3607 }
3608 }
3609
3610 /// Iterates over every [`SyntaxLayer`] in the buffer.
3611 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3612 self.syntax_layers_for_range(0..self.len(), true)
3613 }
3614
3615 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3616 let offset = position.to_offset(self);
3617 self.syntax_layers_for_range(offset..offset, false)
3618 .filter(|l| {
3619 if let Some(ranges) = l.included_sub_ranges {
3620 ranges.iter().any(|range| {
3621 let start = range.start.to_offset(self);
3622 start <= offset && {
3623 let end = range.end.to_offset(self);
3624 offset < end
3625 }
3626 })
3627 } else {
3628 l.node().start_byte() <= offset && l.node().end_byte() > offset
3629 }
3630 })
3631 .last()
3632 }
3633
3634 pub fn syntax_layers_for_range<D: ToOffset>(
3635 &self,
3636 range: Range<D>,
3637 include_hidden: bool,
3638 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3639 self.syntax
3640 .layers_for_range(range, &self.text, include_hidden)
3641 }
3642
3643 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3644 &self,
3645 range: Range<D>,
3646 ) -> Option<SyntaxLayer<'_>> {
3647 let range = range.to_offset(self);
3648 self.syntax
3649 .layers_for_range(range, &self.text, false)
3650 .max_by(|a, b| {
3651 if a.depth != b.depth {
3652 a.depth.cmp(&b.depth)
3653 } else if a.offset.0 != b.offset.0 {
3654 a.offset.0.cmp(&b.offset.0)
3655 } else {
3656 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3657 }
3658 })
3659 }
3660
3661 /// Returns the main [`Language`].
3662 pub fn language(&self) -> Option<&Arc<Language>> {
3663 self.language.as_ref()
3664 }
3665
3666 /// Returns the [`Language`] at the given location.
3667 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3668 self.syntax_layer_at(position)
3669 .map(|info| info.language)
3670 .or(self.language.as_ref())
3671 }
3672
3673 /// Returns the settings for the language at the given location.
3674 pub fn settings_at<'a, D: ToOffset>(
3675 &'a self,
3676 position: D,
3677 cx: &'a App,
3678 ) -> Cow<'a, LanguageSettings> {
3679 language_settings(
3680 self.language_at(position).map(|l| l.name()),
3681 self.file.as_ref(),
3682 cx,
3683 )
3684 }
3685
3686 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3687 CharClassifier::new(self.language_scope_at(point))
3688 }
3689
3690 /// Returns the [`LanguageScope`] at the given location.
3691 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3692 let offset = position.to_offset(self);
3693 let mut scope = None;
3694 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3695
3696 // Use the layer that has the smallest node intersecting the given point.
3697 for layer in self
3698 .syntax
3699 .layers_for_range(offset..offset, &self.text, false)
3700 {
3701 let mut cursor = layer.node().walk();
3702
3703 let mut range = None;
3704 loop {
3705 let child_range = cursor.node().byte_range();
3706 if !child_range.contains(&offset) {
3707 break;
3708 }
3709
3710 range = Some(child_range);
3711 if cursor.goto_first_child_for_byte(offset).is_none() {
3712 break;
3713 }
3714 }
3715
3716 if let Some(range) = range
3717 && smallest_range_and_depth.as_ref().is_none_or(
3718 |(smallest_range, smallest_range_depth)| {
3719 if layer.depth > *smallest_range_depth {
3720 true
3721 } else if layer.depth == *smallest_range_depth {
3722 range.len() < smallest_range.len()
3723 } else {
3724 false
3725 }
3726 },
3727 )
3728 {
3729 smallest_range_and_depth = Some((range, layer.depth));
3730 scope = Some(LanguageScope {
3731 language: layer.language.clone(),
3732 override_id: layer.override_id(offset, &self.text),
3733 });
3734 }
3735 }
3736
3737 scope.or_else(|| {
3738 self.language.clone().map(|language| LanguageScope {
3739 language,
3740 override_id: None,
3741 })
3742 })
3743 }
3744
3745 /// Returns a tuple of the range and character kind of the word
3746 /// surrounding the given position.
3747 pub fn surrounding_word<T: ToOffset>(
3748 &self,
3749 start: T,
3750 scope_context: Option<CharScopeContext>,
3751 ) -> (Range<usize>, Option<CharKind>) {
3752 let mut start = start.to_offset(self);
3753 let mut end = start;
3754 let mut next_chars = self.chars_at(start).take(128).peekable();
3755 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3756
3757 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3758 let word_kind = cmp::max(
3759 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3760 next_chars.peek().copied().map(|c| classifier.kind(c)),
3761 );
3762
3763 for ch in prev_chars {
3764 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3765 start -= ch.len_utf8();
3766 } else {
3767 break;
3768 }
3769 }
3770
3771 for ch in next_chars {
3772 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3773 end += ch.len_utf8();
3774 } else {
3775 break;
3776 }
3777 }
3778
3779 (start..end, word_kind)
3780 }
3781
3782 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3783 /// range. When `require_larger` is true, the node found must be larger than the query range.
3784 ///
3785 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3786 /// be moved to the root of the tree.
3787 fn goto_node_enclosing_range(
3788 cursor: &mut tree_sitter::TreeCursor,
3789 query_range: &Range<usize>,
3790 require_larger: bool,
3791 ) -> bool {
3792 let mut ascending = false;
3793 loop {
3794 let mut range = cursor.node().byte_range();
3795 if query_range.is_empty() {
3796 // When the query range is empty and the current node starts after it, move to the
3797 // previous sibling to find the node the containing node.
3798 if range.start > query_range.start {
3799 cursor.goto_previous_sibling();
3800 range = cursor.node().byte_range();
3801 }
3802 } else {
3803 // When the query range is non-empty and the current node ends exactly at the start,
3804 // move to the next sibling to find a node that extends beyond the start.
3805 if range.end == query_range.start {
3806 cursor.goto_next_sibling();
3807 range = cursor.node().byte_range();
3808 }
3809 }
3810
3811 let encloses = range.contains_inclusive(query_range)
3812 && (!require_larger || range.len() > query_range.len());
3813 if !encloses {
3814 ascending = true;
3815 if !cursor.goto_parent() {
3816 return false;
3817 }
3818 continue;
3819 } else if ascending {
3820 return true;
3821 }
3822
3823 // Descend into the current node.
3824 if cursor
3825 .goto_first_child_for_byte(query_range.start)
3826 .is_none()
3827 {
3828 return true;
3829 }
3830 }
3831 }
3832
3833 pub fn syntax_ancestor<'a, T: ToOffset>(
3834 &'a self,
3835 range: Range<T>,
3836 ) -> Option<tree_sitter::Node<'a>> {
3837 let range = range.start.to_offset(self)..range.end.to_offset(self);
3838 let mut result: Option<tree_sitter::Node<'a>> = None;
3839 for layer in self
3840 .syntax
3841 .layers_for_range(range.clone(), &self.text, true)
3842 {
3843 let mut cursor = layer.node().walk();
3844
3845 // Find the node that both contains the range and is larger than it.
3846 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3847 continue;
3848 }
3849
3850 let left_node = cursor.node();
3851 let mut layer_result = left_node;
3852
3853 // For an empty range, try to find another node immediately to the right of the range.
3854 if left_node.end_byte() == range.start {
3855 let mut right_node = None;
3856 while !cursor.goto_next_sibling() {
3857 if !cursor.goto_parent() {
3858 break;
3859 }
3860 }
3861
3862 while cursor.node().start_byte() == range.start {
3863 right_node = Some(cursor.node());
3864 if !cursor.goto_first_child() {
3865 break;
3866 }
3867 }
3868
3869 // If there is a candidate node on both sides of the (empty) range, then
3870 // decide between the two by favoring a named node over an anonymous token.
3871 // If both nodes are the same in that regard, favor the right one.
3872 if let Some(right_node) = right_node
3873 && (right_node.is_named() || !left_node.is_named())
3874 {
3875 layer_result = right_node;
3876 }
3877 }
3878
3879 if let Some(previous_result) = &result
3880 && previous_result.byte_range().len() < layer_result.byte_range().len()
3881 {
3882 continue;
3883 }
3884 result = Some(layer_result);
3885 }
3886
3887 result
3888 }
3889
3890 /// Find the previous sibling syntax node at the given range.
3891 ///
3892 /// This function locates the syntax node that precedes the node containing
3893 /// the given range. It searches hierarchically by:
3894 /// 1. Finding the node that contains the given range
3895 /// 2. Looking for the previous sibling at the same tree level
3896 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3897 ///
3898 /// Returns `None` if there is no previous sibling at any ancestor level.
3899 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3900 &'a self,
3901 range: Range<T>,
3902 ) -> Option<tree_sitter::Node<'a>> {
3903 let range = range.start.to_offset(self)..range.end.to_offset(self);
3904 let mut result: Option<tree_sitter::Node<'a>> = None;
3905
3906 for layer in self
3907 .syntax
3908 .layers_for_range(range.clone(), &self.text, true)
3909 {
3910 let mut cursor = layer.node().walk();
3911
3912 // Find the node that contains the range
3913 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3914 continue;
3915 }
3916
3917 // Look for the previous sibling, moving up ancestor levels if needed
3918 loop {
3919 if cursor.goto_previous_sibling() {
3920 let layer_result = cursor.node();
3921
3922 if let Some(previous_result) = &result {
3923 if previous_result.byte_range().end < layer_result.byte_range().end {
3924 continue;
3925 }
3926 }
3927 result = Some(layer_result);
3928 break;
3929 }
3930
3931 // No sibling found at this level, try moving up to parent
3932 if !cursor.goto_parent() {
3933 break;
3934 }
3935 }
3936 }
3937
3938 result
3939 }
3940
3941 /// Find the next sibling syntax node at the given range.
3942 ///
3943 /// This function locates the syntax node that follows the node containing
3944 /// the given range. It searches hierarchically by:
3945 /// 1. Finding the node that contains the given range
3946 /// 2. Looking for the next sibling at the same tree level
3947 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3948 ///
3949 /// Returns `None` if there is no next sibling at any ancestor level.
3950 pub fn syntax_next_sibling<'a, T: ToOffset>(
3951 &'a self,
3952 range: Range<T>,
3953 ) -> Option<tree_sitter::Node<'a>> {
3954 let range = range.start.to_offset(self)..range.end.to_offset(self);
3955 let mut result: Option<tree_sitter::Node<'a>> = None;
3956
3957 for layer in self
3958 .syntax
3959 .layers_for_range(range.clone(), &self.text, true)
3960 {
3961 let mut cursor = layer.node().walk();
3962
3963 // Find the node that contains the range
3964 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3965 continue;
3966 }
3967
3968 // Look for the next sibling, moving up ancestor levels if needed
3969 loop {
3970 if cursor.goto_next_sibling() {
3971 let layer_result = cursor.node();
3972
3973 if let Some(previous_result) = &result {
3974 if previous_result.byte_range().start > layer_result.byte_range().start {
3975 continue;
3976 }
3977 }
3978 result = Some(layer_result);
3979 break;
3980 }
3981
3982 // No sibling found at this level, try moving up to parent
3983 if !cursor.goto_parent() {
3984 break;
3985 }
3986 }
3987 }
3988
3989 result
3990 }
3991
3992 /// Returns the root syntax node within the given row
3993 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3994 let start_offset = position.to_offset(self);
3995
3996 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3997
3998 let layer = self
3999 .syntax
4000 .layers_for_range(start_offset..start_offset, &self.text, true)
4001 .next()?;
4002
4003 let mut cursor = layer.node().walk();
4004
4005 // Descend to the first leaf that touches the start of the range.
4006 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4007 if cursor.node().end_byte() == start_offset {
4008 cursor.goto_next_sibling();
4009 }
4010 }
4011
4012 // Ascend to the root node within the same row.
4013 while cursor.goto_parent() {
4014 if cursor.node().start_position().row != row {
4015 break;
4016 }
4017 }
4018
4019 Some(cursor.node())
4020 }
4021
4022 /// Returns the outline for the buffer.
4023 ///
4024 /// This method allows passing an optional [`SyntaxTheme`] to
4025 /// syntax-highlight the returned symbols.
4026 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4027 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4028 }
4029
4030 /// Returns all the symbols that contain the given position.
4031 ///
4032 /// This method allows passing an optional [`SyntaxTheme`] to
4033 /// syntax-highlight the returned symbols.
4034 pub fn symbols_containing<T: ToOffset>(
4035 &self,
4036 position: T,
4037 theme: Option<&SyntaxTheme>,
4038 ) -> Vec<OutlineItem<Anchor>> {
4039 let position = position.to_offset(self);
4040 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4041 let end = self.clip_offset(position + 1, Bias::Right);
4042 let mut items = self.outline_items_containing(start..end, false, theme);
4043 let mut prev_depth = None;
4044 items.retain(|item| {
4045 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4046 prev_depth = Some(item.depth);
4047 result
4048 });
4049 items
4050 }
4051
4052 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4053 let range = range.to_offset(self);
4054 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4055 grammar.outline_config.as_ref().map(|c| &c.query)
4056 });
4057 let configs = matches
4058 .grammars()
4059 .iter()
4060 .map(|g| g.outline_config.as_ref().unwrap())
4061 .collect::<Vec<_>>();
4062
4063 while let Some(mat) = matches.peek() {
4064 let config = &configs[mat.grammar_index];
4065 let containing_item_node = maybe!({
4066 let item_node = mat.captures.iter().find_map(|cap| {
4067 if cap.index == config.item_capture_ix {
4068 Some(cap.node)
4069 } else {
4070 None
4071 }
4072 })?;
4073
4074 let item_byte_range = item_node.byte_range();
4075 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4076 None
4077 } else {
4078 Some(item_node)
4079 }
4080 });
4081
4082 if let Some(item_node) = containing_item_node {
4083 return Some(
4084 Point::from_ts_point(item_node.start_position())
4085 ..Point::from_ts_point(item_node.end_position()),
4086 );
4087 }
4088
4089 matches.advance();
4090 }
4091 None
4092 }
4093
4094 pub fn outline_items_containing<T: ToOffset>(
4095 &self,
4096 range: Range<T>,
4097 include_extra_context: bool,
4098 theme: Option<&SyntaxTheme>,
4099 ) -> Vec<OutlineItem<Anchor>> {
4100 self.outline_items_containing_internal(
4101 range,
4102 include_extra_context,
4103 theme,
4104 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4105 )
4106 }
4107
4108 pub fn outline_items_as_points_containing<T: ToOffset>(
4109 &self,
4110 range: Range<T>,
4111 include_extra_context: bool,
4112 theme: Option<&SyntaxTheme>,
4113 ) -> Vec<OutlineItem<Point>> {
4114 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4115 range
4116 })
4117 }
4118
4119 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4120 &self,
4121 range: Range<T>,
4122 include_extra_context: bool,
4123 theme: Option<&SyntaxTheme>,
4124 ) -> Vec<OutlineItem<usize>> {
4125 self.outline_items_containing_internal(
4126 range,
4127 include_extra_context,
4128 theme,
4129 |buffer, range| range.to_offset(buffer),
4130 )
4131 }
4132
4133 fn outline_items_containing_internal<T: ToOffset, U>(
4134 &self,
4135 range: Range<T>,
4136 include_extra_context: bool,
4137 theme: Option<&SyntaxTheme>,
4138 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4139 ) -> Vec<OutlineItem<U>> {
4140 let range = range.to_offset(self);
4141 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4142 grammar.outline_config.as_ref().map(|c| &c.query)
4143 });
4144
4145 let mut items = Vec::new();
4146 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4147 while let Some(mat) = matches.peek() {
4148 let config = matches.grammars()[mat.grammar_index]
4149 .outline_config
4150 .as_ref()
4151 .unwrap();
4152 if let Some(item) =
4153 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4154 {
4155 items.push(item);
4156 } else if let Some(capture) = mat
4157 .captures
4158 .iter()
4159 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4160 {
4161 let capture_range = capture.node.start_position()..capture.node.end_position();
4162 let mut capture_row_range =
4163 capture_range.start.row as u32..capture_range.end.row as u32;
4164 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4165 {
4166 capture_row_range.end -= 1;
4167 }
4168 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4169 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4170 last_row_range.end = capture_row_range.end;
4171 } else {
4172 annotation_row_ranges.push(capture_row_range);
4173 }
4174 } else {
4175 annotation_row_ranges.push(capture_row_range);
4176 }
4177 }
4178 matches.advance();
4179 }
4180
4181 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4182
4183 // Assign depths based on containment relationships and convert to anchors.
4184 let mut item_ends_stack = Vec::<Point>::new();
4185 let mut anchor_items = Vec::new();
4186 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4187 for item in items {
4188 while let Some(last_end) = item_ends_stack.last().copied() {
4189 if last_end < item.range.end {
4190 item_ends_stack.pop();
4191 } else {
4192 break;
4193 }
4194 }
4195
4196 let mut annotation_row_range = None;
4197 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4198 let row_preceding_item = item.range.start.row.saturating_sub(1);
4199 if next_annotation_row_range.end < row_preceding_item {
4200 annotation_row_ranges.next();
4201 } else {
4202 if next_annotation_row_range.end == row_preceding_item {
4203 annotation_row_range = Some(next_annotation_row_range.clone());
4204 annotation_row_ranges.next();
4205 }
4206 break;
4207 }
4208 }
4209
4210 anchor_items.push(OutlineItem {
4211 depth: item_ends_stack.len(),
4212 range: range_callback(self, item.range.clone()),
4213 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4214 text: item.text,
4215 highlight_ranges: item.highlight_ranges,
4216 name_ranges: item.name_ranges,
4217 body_range: item.body_range.map(|r| range_callback(self, r)),
4218 annotation_range: annotation_row_range.map(|annotation_range| {
4219 let point_range = Point::new(annotation_range.start, 0)
4220 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4221 range_callback(self, point_range)
4222 }),
4223 });
4224 item_ends_stack.push(item.range.end);
4225 }
4226
4227 anchor_items
4228 }
4229
4230 fn next_outline_item(
4231 &self,
4232 config: &OutlineConfig,
4233 mat: &SyntaxMapMatch,
4234 range: &Range<usize>,
4235 include_extra_context: bool,
4236 theme: Option<&SyntaxTheme>,
4237 ) -> Option<OutlineItem<Point>> {
4238 let item_node = mat.captures.iter().find_map(|cap| {
4239 if cap.index == config.item_capture_ix {
4240 Some(cap.node)
4241 } else {
4242 None
4243 }
4244 })?;
4245
4246 let item_byte_range = item_node.byte_range();
4247 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4248 return None;
4249 }
4250 let item_point_range = Point::from_ts_point(item_node.start_position())
4251 ..Point::from_ts_point(item_node.end_position());
4252
4253 let mut open_point = None;
4254 let mut close_point = None;
4255
4256 let mut buffer_ranges = Vec::new();
4257 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4258 let mut range = node.start_byte()..node.end_byte();
4259 let start = node.start_position();
4260 if node.end_position().row > start.row {
4261 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4262 }
4263
4264 if !range.is_empty() {
4265 buffer_ranges.push((range, node_is_name));
4266 }
4267 };
4268
4269 for capture in mat.captures {
4270 if capture.index == config.name_capture_ix {
4271 add_to_buffer_ranges(capture.node, true);
4272 } else if Some(capture.index) == config.context_capture_ix
4273 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4274 {
4275 add_to_buffer_ranges(capture.node, false);
4276 } else {
4277 if Some(capture.index) == config.open_capture_ix {
4278 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4279 } else if Some(capture.index) == config.close_capture_ix {
4280 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4281 }
4282 }
4283 }
4284
4285 if buffer_ranges.is_empty() {
4286 return None;
4287 }
4288 let source_range_for_text =
4289 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4290
4291 let mut text = String::new();
4292 let mut highlight_ranges = Vec::new();
4293 let mut name_ranges = Vec::new();
4294 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4295 let mut last_buffer_range_end = 0;
4296 for (buffer_range, is_name) in buffer_ranges {
4297 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4298 if space_added {
4299 text.push(' ');
4300 }
4301 let before_append_len = text.len();
4302 let mut offset = buffer_range.start;
4303 chunks.seek(buffer_range.clone());
4304 for mut chunk in chunks.by_ref() {
4305 if chunk.text.len() > buffer_range.end - offset {
4306 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4307 offset = buffer_range.end;
4308 } else {
4309 offset += chunk.text.len();
4310 }
4311 let style = chunk
4312 .syntax_highlight_id
4313 .zip(theme)
4314 .and_then(|(highlight, theme)| highlight.style(theme));
4315 if let Some(style) = style {
4316 let start = text.len();
4317 let end = start + chunk.text.len();
4318 highlight_ranges.push((start..end, style));
4319 }
4320 text.push_str(chunk.text);
4321 if offset >= buffer_range.end {
4322 break;
4323 }
4324 }
4325 if is_name {
4326 let after_append_len = text.len();
4327 let start = if space_added && !name_ranges.is_empty() {
4328 before_append_len - 1
4329 } else {
4330 before_append_len
4331 };
4332 name_ranges.push(start..after_append_len);
4333 }
4334 last_buffer_range_end = buffer_range.end;
4335 }
4336
4337 Some(OutlineItem {
4338 depth: 0, // We'll calculate the depth later
4339 range: item_point_range,
4340 source_range_for_text: source_range_for_text.to_point(self),
4341 text,
4342 highlight_ranges,
4343 name_ranges,
4344 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4345 annotation_range: None,
4346 })
4347 }
4348
4349 pub fn function_body_fold_ranges<T: ToOffset>(
4350 &self,
4351 within: Range<T>,
4352 ) -> impl Iterator<Item = Range<usize>> + '_ {
4353 self.text_object_ranges(within, TreeSitterOptions::default())
4354 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4355 }
4356
4357 /// For each grammar in the language, runs the provided
4358 /// [`tree_sitter::Query`] against the given range.
4359 pub fn matches(
4360 &self,
4361 range: Range<usize>,
4362 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4363 ) -> SyntaxMapMatches<'_> {
4364 self.syntax.matches(range, self, query)
4365 }
4366
4367 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4368 /// Hence, may return more bracket pairs than the range contains.
4369 ///
4370 /// Will omit known chunks.
4371 /// The resulting bracket match collections are not ordered.
4372 pub fn fetch_bracket_ranges(
4373 &self,
4374 range: Range<usize>,
4375 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4376 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4377 let mut all_bracket_matches = HashMap::default();
4378
4379 for chunk in self
4380 .tree_sitter_data
4381 .chunks
4382 .applicable_chunks(&[range.to_point(self)])
4383 {
4384 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4385 continue;
4386 }
4387 let chunk_range = chunk.anchor_range();
4388 let chunk_range = chunk_range.to_offset(&self);
4389
4390 if let Some(cached_brackets) =
4391 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4392 {
4393 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4394 continue;
4395 }
4396
4397 let mut all_brackets = Vec::new();
4398 let mut opens = Vec::new();
4399 let mut color_pairs = Vec::new();
4400
4401 let mut matches = self.syntax.matches_with_options(
4402 chunk_range.clone(),
4403 &self.text,
4404 TreeSitterOptions {
4405 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4406 max_start_depth: None,
4407 },
4408 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4409 );
4410 let configs = matches
4411 .grammars()
4412 .iter()
4413 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4414 .collect::<Vec<_>>();
4415
4416 while let Some(mat) = matches.peek() {
4417 let mut open = None;
4418 let mut close = None;
4419 let syntax_layer_depth = mat.depth;
4420 let config = configs[mat.grammar_index];
4421 let pattern = &config.patterns[mat.pattern_index];
4422 for capture in mat.captures {
4423 if capture.index == config.open_capture_ix {
4424 open = Some(capture.node.byte_range());
4425 } else if capture.index == config.close_capture_ix {
4426 close = Some(capture.node.byte_range());
4427 }
4428 }
4429
4430 matches.advance();
4431
4432 let Some((open_range, close_range)) = open.zip(close) else {
4433 continue;
4434 };
4435
4436 let bracket_range = open_range.start..=close_range.end;
4437 if !bracket_range.overlaps(&chunk_range) {
4438 continue;
4439 }
4440
4441 let index = all_brackets.len();
4442 all_brackets.push(BracketMatch {
4443 open_range: open_range.clone(),
4444 close_range: close_range.clone(),
4445 newline_only: pattern.newline_only,
4446 syntax_layer_depth,
4447 color_index: None,
4448 });
4449
4450 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4451 // bracket will match the entire tag with all text inside.
4452 // For now, avoid highlighting any pair that has more than single char in each bracket.
4453 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4454 let should_color =
4455 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4456 if should_color {
4457 opens.push(open_range.clone());
4458 color_pairs.push((open_range, close_range, index));
4459 }
4460 }
4461
4462 opens.sort_by_key(|r| (r.start, r.end));
4463 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4464 color_pairs.sort_by_key(|(_, close, _)| close.end);
4465
4466 let mut open_stack = Vec::new();
4467 let mut open_index = 0;
4468 for (open, close, index) in color_pairs {
4469 while open_index < opens.len() && opens[open_index].start < close.start {
4470 open_stack.push(opens[open_index].clone());
4471 open_index += 1;
4472 }
4473
4474 if open_stack.last() == Some(&open) {
4475 let depth_index = open_stack.len() - 1;
4476 all_brackets[index].color_index = Some(depth_index);
4477 open_stack.pop();
4478 }
4479 }
4480
4481 all_brackets.sort_by_key(|bracket_match| {
4482 (bracket_match.open_range.start, bracket_match.open_range.end)
4483 });
4484
4485 if let empty_slot @ None =
4486 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4487 {
4488 *empty_slot = Some(all_brackets.clone());
4489 }
4490 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4491 }
4492
4493 all_bracket_matches
4494 }
4495
4496 pub fn all_bracket_ranges(
4497 &self,
4498 range: Range<usize>,
4499 ) -> impl Iterator<Item = BracketMatch<usize>> {
4500 self.fetch_bracket_ranges(range.clone(), None)
4501 .into_values()
4502 .flatten()
4503 .filter(move |bracket_match| {
4504 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4505 bracket_range.overlaps(&range)
4506 })
4507 }
4508
4509 /// Returns bracket range pairs overlapping or adjacent to `range`
4510 pub fn bracket_ranges<T: ToOffset>(
4511 &self,
4512 range: Range<T>,
4513 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4514 // Find bracket pairs that *inclusively* contain the given range.
4515 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4516 self.all_bracket_ranges(range)
4517 .filter(|pair| !pair.newline_only)
4518 }
4519
4520 pub fn debug_variables_query<T: ToOffset>(
4521 &self,
4522 range: Range<T>,
4523 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4524 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4525
4526 let mut matches = self.syntax.matches_with_options(
4527 range.clone(),
4528 &self.text,
4529 TreeSitterOptions::default(),
4530 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4531 );
4532
4533 let configs = matches
4534 .grammars()
4535 .iter()
4536 .map(|grammar| grammar.debug_variables_config.as_ref())
4537 .collect::<Vec<_>>();
4538
4539 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4540
4541 iter::from_fn(move || {
4542 loop {
4543 while let Some(capture) = captures.pop() {
4544 if capture.0.overlaps(&range) {
4545 return Some(capture);
4546 }
4547 }
4548
4549 let mat = matches.peek()?;
4550
4551 let Some(config) = configs[mat.grammar_index].as_ref() else {
4552 matches.advance();
4553 continue;
4554 };
4555
4556 for capture in mat.captures {
4557 let Some(ix) = config
4558 .objects_by_capture_ix
4559 .binary_search_by_key(&capture.index, |e| e.0)
4560 .ok()
4561 else {
4562 continue;
4563 };
4564 let text_object = config.objects_by_capture_ix[ix].1;
4565 let byte_range = capture.node.byte_range();
4566
4567 let mut found = false;
4568 for (range, existing) in captures.iter_mut() {
4569 if existing == &text_object {
4570 range.start = range.start.min(byte_range.start);
4571 range.end = range.end.max(byte_range.end);
4572 found = true;
4573 break;
4574 }
4575 }
4576
4577 if !found {
4578 captures.push((byte_range, text_object));
4579 }
4580 }
4581
4582 matches.advance();
4583 }
4584 })
4585 }
4586
4587 pub fn text_object_ranges<T: ToOffset>(
4588 &self,
4589 range: Range<T>,
4590 options: TreeSitterOptions,
4591 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4592 let range =
4593 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4594
4595 let mut matches =
4596 self.syntax
4597 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4598 grammar.text_object_config.as_ref().map(|c| &c.query)
4599 });
4600
4601 let configs = matches
4602 .grammars()
4603 .iter()
4604 .map(|grammar| grammar.text_object_config.as_ref())
4605 .collect::<Vec<_>>();
4606
4607 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4608
4609 iter::from_fn(move || {
4610 loop {
4611 while let Some(capture) = captures.pop() {
4612 if capture.0.overlaps(&range) {
4613 return Some(capture);
4614 }
4615 }
4616
4617 let mat = matches.peek()?;
4618
4619 let Some(config) = configs[mat.grammar_index].as_ref() else {
4620 matches.advance();
4621 continue;
4622 };
4623
4624 for capture in mat.captures {
4625 let Some(ix) = config
4626 .text_objects_by_capture_ix
4627 .binary_search_by_key(&capture.index, |e| e.0)
4628 .ok()
4629 else {
4630 continue;
4631 };
4632 let text_object = config.text_objects_by_capture_ix[ix].1;
4633 let byte_range = capture.node.byte_range();
4634
4635 let mut found = false;
4636 for (range, existing) in captures.iter_mut() {
4637 if existing == &text_object {
4638 range.start = range.start.min(byte_range.start);
4639 range.end = range.end.max(byte_range.end);
4640 found = true;
4641 break;
4642 }
4643 }
4644
4645 if !found {
4646 captures.push((byte_range, text_object));
4647 }
4648 }
4649
4650 matches.advance();
4651 }
4652 })
4653 }
4654
4655 /// Returns enclosing bracket ranges containing the given range
4656 pub fn enclosing_bracket_ranges<T: ToOffset>(
4657 &self,
4658 range: Range<T>,
4659 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4660 let range = range.start.to_offset(self)..range.end.to_offset(self);
4661
4662 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4663 let max_depth = result
4664 .iter()
4665 .map(|mat| mat.syntax_layer_depth)
4666 .max()
4667 .unwrap_or(0);
4668 result.into_iter().filter(move |pair| {
4669 pair.open_range.start <= range.start
4670 && pair.close_range.end >= range.end
4671 && pair.syntax_layer_depth == max_depth
4672 })
4673 }
4674
4675 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4676 ///
4677 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4678 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4679 &self,
4680 range: Range<T>,
4681 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4682 ) -> Option<(Range<usize>, Range<usize>)> {
4683 let range = range.start.to_offset(self)..range.end.to_offset(self);
4684
4685 // Get the ranges of the innermost pair of brackets.
4686 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4687
4688 for pair in self.enclosing_bracket_ranges(range) {
4689 if let Some(range_filter) = range_filter
4690 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4691 {
4692 continue;
4693 }
4694
4695 let len = pair.close_range.end - pair.open_range.start;
4696
4697 if let Some((existing_open, existing_close)) = &result {
4698 let existing_len = existing_close.end - existing_open.start;
4699 if len > existing_len {
4700 continue;
4701 }
4702 }
4703
4704 result = Some((pair.open_range, pair.close_range));
4705 }
4706
4707 result
4708 }
4709
4710 /// Returns anchor ranges for any matches of the redaction query.
4711 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4712 /// will be run on the relevant section of the buffer.
4713 pub fn redacted_ranges<T: ToOffset>(
4714 &self,
4715 range: Range<T>,
4716 ) -> impl Iterator<Item = Range<usize>> + '_ {
4717 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4718 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4719 grammar
4720 .redactions_config
4721 .as_ref()
4722 .map(|config| &config.query)
4723 });
4724
4725 let configs = syntax_matches
4726 .grammars()
4727 .iter()
4728 .map(|grammar| grammar.redactions_config.as_ref())
4729 .collect::<Vec<_>>();
4730
4731 iter::from_fn(move || {
4732 let redacted_range = syntax_matches
4733 .peek()
4734 .and_then(|mat| {
4735 configs[mat.grammar_index].and_then(|config| {
4736 mat.captures
4737 .iter()
4738 .find(|capture| capture.index == config.redaction_capture_ix)
4739 })
4740 })
4741 .map(|mat| mat.node.byte_range());
4742 syntax_matches.advance();
4743 redacted_range
4744 })
4745 }
4746
4747 pub fn injections_intersecting_range<T: ToOffset>(
4748 &self,
4749 range: Range<T>,
4750 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4751 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4752
4753 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4754 grammar
4755 .injection_config
4756 .as_ref()
4757 .map(|config| &config.query)
4758 });
4759
4760 let configs = syntax_matches
4761 .grammars()
4762 .iter()
4763 .map(|grammar| grammar.injection_config.as_ref())
4764 .collect::<Vec<_>>();
4765
4766 iter::from_fn(move || {
4767 let ranges = syntax_matches.peek().and_then(|mat| {
4768 let config = &configs[mat.grammar_index]?;
4769 let content_capture_range = mat.captures.iter().find_map(|capture| {
4770 if capture.index == config.content_capture_ix {
4771 Some(capture.node.byte_range())
4772 } else {
4773 None
4774 }
4775 })?;
4776 let language = self.language_at(content_capture_range.start)?;
4777 Some((content_capture_range, language))
4778 });
4779 syntax_matches.advance();
4780 ranges
4781 })
4782 }
4783
4784 pub fn runnable_ranges(
4785 &self,
4786 offset_range: Range<usize>,
4787 ) -> impl Iterator<Item = RunnableRange> + '_ {
4788 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4789 grammar.runnable_config.as_ref().map(|config| &config.query)
4790 });
4791
4792 let test_configs = syntax_matches
4793 .grammars()
4794 .iter()
4795 .map(|grammar| grammar.runnable_config.as_ref())
4796 .collect::<Vec<_>>();
4797
4798 iter::from_fn(move || {
4799 loop {
4800 let mat = syntax_matches.peek()?;
4801
4802 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4803 let mut run_range = None;
4804 let full_range = mat.captures.iter().fold(
4805 Range {
4806 start: usize::MAX,
4807 end: 0,
4808 },
4809 |mut acc, next| {
4810 let byte_range = next.node.byte_range();
4811 if acc.start > byte_range.start {
4812 acc.start = byte_range.start;
4813 }
4814 if acc.end < byte_range.end {
4815 acc.end = byte_range.end;
4816 }
4817 acc
4818 },
4819 );
4820 if full_range.start > full_range.end {
4821 // We did not find a full spanning range of this match.
4822 return None;
4823 }
4824 let extra_captures: SmallVec<[_; 1]> =
4825 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4826 test_configs
4827 .extra_captures
4828 .get(capture.index as usize)
4829 .cloned()
4830 .and_then(|tag_name| match tag_name {
4831 RunnableCapture::Named(name) => {
4832 Some((capture.node.byte_range(), name))
4833 }
4834 RunnableCapture::Run => {
4835 let _ = run_range.insert(capture.node.byte_range());
4836 None
4837 }
4838 })
4839 }));
4840 let run_range = run_range?;
4841 let tags = test_configs
4842 .query
4843 .property_settings(mat.pattern_index)
4844 .iter()
4845 .filter_map(|property| {
4846 if *property.key == *"tag" {
4847 property
4848 .value
4849 .as_ref()
4850 .map(|value| RunnableTag(value.to_string().into()))
4851 } else {
4852 None
4853 }
4854 })
4855 .collect();
4856 let extra_captures = extra_captures
4857 .into_iter()
4858 .map(|(range, name)| {
4859 (
4860 name.to_string(),
4861 self.text_for_range(range).collect::<String>(),
4862 )
4863 })
4864 .collect();
4865 // All tags should have the same range.
4866 Some(RunnableRange {
4867 run_range,
4868 full_range,
4869 runnable: Runnable {
4870 tags,
4871 language: mat.language,
4872 buffer: self.remote_id(),
4873 },
4874 extra_captures,
4875 buffer_id: self.remote_id(),
4876 })
4877 });
4878
4879 syntax_matches.advance();
4880 if test_range.is_some() {
4881 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4882 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4883 return test_range;
4884 }
4885 }
4886 })
4887 }
4888
4889 /// Returns selections for remote peers intersecting the given range.
4890 #[allow(clippy::type_complexity)]
4891 pub fn selections_in_range(
4892 &self,
4893 range: Range<Anchor>,
4894 include_local: bool,
4895 ) -> impl Iterator<
4896 Item = (
4897 ReplicaId,
4898 bool,
4899 CursorShape,
4900 impl Iterator<Item = &Selection<Anchor>> + '_,
4901 ),
4902 > + '_ {
4903 self.remote_selections
4904 .iter()
4905 .filter(move |(replica_id, set)| {
4906 (include_local || **replica_id != self.text.replica_id())
4907 && !set.selections.is_empty()
4908 })
4909 .map(move |(replica_id, set)| {
4910 let start_ix = match set.selections.binary_search_by(|probe| {
4911 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4912 }) {
4913 Ok(ix) | Err(ix) => ix,
4914 };
4915 let end_ix = match set.selections.binary_search_by(|probe| {
4916 probe.start.cmp(&range.end, self).then(Ordering::Less)
4917 }) {
4918 Ok(ix) | Err(ix) => ix,
4919 };
4920
4921 (
4922 *replica_id,
4923 set.line_mode,
4924 set.cursor_shape,
4925 set.selections[start_ix..end_ix].iter(),
4926 )
4927 })
4928 }
4929
4930 /// Returns if the buffer contains any diagnostics.
4931 pub fn has_diagnostics(&self) -> bool {
4932 !self.diagnostics.is_empty()
4933 }
4934
4935 /// Returns all the diagnostics intersecting the given range.
4936 pub fn diagnostics_in_range<'a, T, O>(
4937 &'a self,
4938 search_range: Range<T>,
4939 reversed: bool,
4940 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4941 where
4942 T: 'a + Clone + ToOffset,
4943 O: 'a + FromAnchor,
4944 {
4945 let mut iterators: Vec<_> = self
4946 .diagnostics
4947 .iter()
4948 .map(|(_, collection)| {
4949 collection
4950 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4951 .peekable()
4952 })
4953 .collect();
4954
4955 std::iter::from_fn(move || {
4956 let (next_ix, _) = iterators
4957 .iter_mut()
4958 .enumerate()
4959 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4960 .min_by(|(_, a), (_, b)| {
4961 let cmp = a
4962 .range
4963 .start
4964 .cmp(&b.range.start, self)
4965 // when range is equal, sort by diagnostic severity
4966 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4967 // and stabilize order with group_id
4968 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4969 if reversed { cmp.reverse() } else { cmp }
4970 })?;
4971 iterators[next_ix]
4972 .next()
4973 .map(
4974 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4975 diagnostic,
4976 range: FromAnchor::from_anchor(&range.start, self)
4977 ..FromAnchor::from_anchor(&range.end, self),
4978 },
4979 )
4980 })
4981 }
4982
4983 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4984 /// should be used instead.
4985 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4986 &self.diagnostics
4987 }
4988
4989 /// Returns all the diagnostic groups associated with the given
4990 /// language server ID. If no language server ID is provided,
4991 /// all diagnostics groups are returned.
4992 pub fn diagnostic_groups(
4993 &self,
4994 language_server_id: Option<LanguageServerId>,
4995 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4996 let mut groups = Vec::new();
4997
4998 if let Some(language_server_id) = language_server_id {
4999 if let Ok(ix) = self
5000 .diagnostics
5001 .binary_search_by_key(&language_server_id, |e| e.0)
5002 {
5003 self.diagnostics[ix]
5004 .1
5005 .groups(language_server_id, &mut groups, self);
5006 }
5007 } else {
5008 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5009 diagnostics.groups(*language_server_id, &mut groups, self);
5010 }
5011 }
5012
5013 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5014 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5015 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5016 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5017 });
5018
5019 groups
5020 }
5021
5022 /// Returns an iterator over the diagnostics for the given group.
5023 pub fn diagnostic_group<O>(
5024 &self,
5025 group_id: usize,
5026 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5027 where
5028 O: FromAnchor + 'static,
5029 {
5030 self.diagnostics
5031 .iter()
5032 .flat_map(move |(_, set)| set.group(group_id, self))
5033 }
5034
5035 /// An integer version number that accounts for all updates besides
5036 /// the buffer's text itself (which is versioned via a version vector).
5037 pub fn non_text_state_update_count(&self) -> usize {
5038 self.non_text_state_update_count
5039 }
5040
5041 /// An integer version that changes when the buffer's syntax changes.
5042 pub fn syntax_update_count(&self) -> usize {
5043 self.syntax.update_count()
5044 }
5045
5046 /// Returns a snapshot of underlying file.
5047 pub fn file(&self) -> Option<&Arc<dyn File>> {
5048 self.file.as_ref()
5049 }
5050
5051 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5052 if let Some(file) = self.file() {
5053 if file.path().file_name().is_none() || include_root {
5054 Some(file.full_path(cx).to_string_lossy().into_owned())
5055 } else {
5056 Some(file.path().display(file.path_style(cx)).to_string())
5057 }
5058 } else {
5059 None
5060 }
5061 }
5062
5063 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5064 let query_str = query.fuzzy_contents;
5065 if query_str.is_some_and(|query| query.is_empty()) {
5066 return BTreeMap::default();
5067 }
5068
5069 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5070 language,
5071 override_id: None,
5072 }));
5073
5074 let mut query_ix = 0;
5075 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5076 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5077
5078 let mut words = BTreeMap::default();
5079 let mut current_word_start_ix = None;
5080 let mut chunk_ix = query.range.start;
5081 for chunk in self.chunks(query.range, false) {
5082 for (i, c) in chunk.text.char_indices() {
5083 let ix = chunk_ix + i;
5084 if classifier.is_word(c) {
5085 if current_word_start_ix.is_none() {
5086 current_word_start_ix = Some(ix);
5087 }
5088
5089 if let Some(query_chars) = &query_chars
5090 && query_ix < query_len
5091 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5092 {
5093 query_ix += 1;
5094 }
5095 continue;
5096 } else if let Some(word_start) = current_word_start_ix.take()
5097 && query_ix == query_len
5098 {
5099 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5100 let mut word_text = self.text_for_range(word_start..ix).peekable();
5101 let first_char = word_text
5102 .peek()
5103 .and_then(|first_chunk| first_chunk.chars().next());
5104 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5105 if !query.skip_digits
5106 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5107 {
5108 words.insert(word_text.collect(), word_range);
5109 }
5110 }
5111 query_ix = 0;
5112 }
5113 chunk_ix += chunk.text.len();
5114 }
5115
5116 words
5117 }
5118}
5119
5120pub struct WordsQuery<'a> {
5121 /// Only returns words with all chars from the fuzzy string in them.
5122 pub fuzzy_contents: Option<&'a str>,
5123 /// Skips words that start with a digit.
5124 pub skip_digits: bool,
5125 /// Buffer offset range, to look for words.
5126 pub range: Range<usize>,
5127}
5128
5129fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5130 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5131}
5132
5133fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5134 let mut result = IndentSize::spaces(0);
5135 for c in text {
5136 let kind = match c {
5137 ' ' => IndentKind::Space,
5138 '\t' => IndentKind::Tab,
5139 _ => break,
5140 };
5141 if result.len == 0 {
5142 result.kind = kind;
5143 }
5144 result.len += 1;
5145 }
5146 result
5147}
5148
5149impl Clone for BufferSnapshot {
5150 fn clone(&self) -> Self {
5151 Self {
5152 text: self.text.clone(),
5153 syntax: self.syntax.clone(),
5154 file: self.file.clone(),
5155 remote_selections: self.remote_selections.clone(),
5156 diagnostics: self.diagnostics.clone(),
5157 language: self.language.clone(),
5158 tree_sitter_data: self.tree_sitter_data.clone(),
5159 non_text_state_update_count: self.non_text_state_update_count,
5160 }
5161 }
5162}
5163
5164impl Deref for BufferSnapshot {
5165 type Target = text::BufferSnapshot;
5166
5167 fn deref(&self) -> &Self::Target {
5168 &self.text
5169 }
5170}
5171
5172unsafe impl Send for BufferChunks<'_> {}
5173
5174impl<'a> BufferChunks<'a> {
5175 pub(crate) fn new(
5176 text: &'a Rope,
5177 range: Range<usize>,
5178 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5179 diagnostics: bool,
5180 buffer_snapshot: Option<&'a BufferSnapshot>,
5181 ) -> Self {
5182 let mut highlights = None;
5183 if let Some((captures, highlight_maps)) = syntax {
5184 highlights = Some(BufferChunkHighlights {
5185 captures,
5186 next_capture: None,
5187 stack: Default::default(),
5188 highlight_maps,
5189 })
5190 }
5191
5192 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5193 let chunks = text.chunks_in_range(range.clone());
5194
5195 let mut this = BufferChunks {
5196 range,
5197 buffer_snapshot,
5198 chunks,
5199 diagnostic_endpoints,
5200 error_depth: 0,
5201 warning_depth: 0,
5202 information_depth: 0,
5203 hint_depth: 0,
5204 unnecessary_depth: 0,
5205 underline: true,
5206 highlights,
5207 };
5208 this.initialize_diagnostic_endpoints();
5209 this
5210 }
5211
5212 /// Seeks to the given byte offset in the buffer.
5213 pub fn seek(&mut self, range: Range<usize>) {
5214 let old_range = std::mem::replace(&mut self.range, range.clone());
5215 self.chunks.set_range(self.range.clone());
5216 if let Some(highlights) = self.highlights.as_mut() {
5217 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5218 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5219 highlights
5220 .stack
5221 .retain(|(end_offset, _)| *end_offset > range.start);
5222 if let Some(capture) = &highlights.next_capture
5223 && range.start >= capture.node.start_byte()
5224 {
5225 let next_capture_end = capture.node.end_byte();
5226 if range.start < next_capture_end {
5227 highlights.stack.push((
5228 next_capture_end,
5229 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5230 ));
5231 }
5232 highlights.next_capture.take();
5233 }
5234 } else if let Some(snapshot) = self.buffer_snapshot {
5235 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5236 *highlights = BufferChunkHighlights {
5237 captures,
5238 next_capture: None,
5239 stack: Default::default(),
5240 highlight_maps,
5241 };
5242 } else {
5243 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5244 // Seeking such BufferChunks is not supported.
5245 debug_assert!(
5246 false,
5247 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5248 );
5249 }
5250
5251 highlights.captures.set_byte_range(self.range.clone());
5252 self.initialize_diagnostic_endpoints();
5253 }
5254 }
5255
5256 fn initialize_diagnostic_endpoints(&mut self) {
5257 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5258 && let Some(buffer) = self.buffer_snapshot
5259 {
5260 let mut diagnostic_endpoints = Vec::new();
5261 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5262 diagnostic_endpoints.push(DiagnosticEndpoint {
5263 offset: entry.range.start,
5264 is_start: true,
5265 severity: entry.diagnostic.severity,
5266 is_unnecessary: entry.diagnostic.is_unnecessary,
5267 underline: entry.diagnostic.underline,
5268 });
5269 diagnostic_endpoints.push(DiagnosticEndpoint {
5270 offset: entry.range.end,
5271 is_start: false,
5272 severity: entry.diagnostic.severity,
5273 is_unnecessary: entry.diagnostic.is_unnecessary,
5274 underline: entry.diagnostic.underline,
5275 });
5276 }
5277 diagnostic_endpoints
5278 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5279 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5280 self.hint_depth = 0;
5281 self.error_depth = 0;
5282 self.warning_depth = 0;
5283 self.information_depth = 0;
5284 }
5285 }
5286
5287 /// The current byte offset in the buffer.
5288 pub fn offset(&self) -> usize {
5289 self.range.start
5290 }
5291
5292 pub fn range(&self) -> Range<usize> {
5293 self.range.clone()
5294 }
5295
5296 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5297 let depth = match endpoint.severity {
5298 DiagnosticSeverity::ERROR => &mut self.error_depth,
5299 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5300 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5301 DiagnosticSeverity::HINT => &mut self.hint_depth,
5302 _ => return,
5303 };
5304 if endpoint.is_start {
5305 *depth += 1;
5306 } else {
5307 *depth -= 1;
5308 }
5309
5310 if endpoint.is_unnecessary {
5311 if endpoint.is_start {
5312 self.unnecessary_depth += 1;
5313 } else {
5314 self.unnecessary_depth -= 1;
5315 }
5316 }
5317 }
5318
5319 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5320 if self.error_depth > 0 {
5321 Some(DiagnosticSeverity::ERROR)
5322 } else if self.warning_depth > 0 {
5323 Some(DiagnosticSeverity::WARNING)
5324 } else if self.information_depth > 0 {
5325 Some(DiagnosticSeverity::INFORMATION)
5326 } else if self.hint_depth > 0 {
5327 Some(DiagnosticSeverity::HINT)
5328 } else {
5329 None
5330 }
5331 }
5332
5333 fn current_code_is_unnecessary(&self) -> bool {
5334 self.unnecessary_depth > 0
5335 }
5336}
5337
5338impl<'a> Iterator for BufferChunks<'a> {
5339 type Item = Chunk<'a>;
5340
5341 fn next(&mut self) -> Option<Self::Item> {
5342 let mut next_capture_start = usize::MAX;
5343 let mut next_diagnostic_endpoint = usize::MAX;
5344
5345 if let Some(highlights) = self.highlights.as_mut() {
5346 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5347 if *parent_capture_end <= self.range.start {
5348 highlights.stack.pop();
5349 } else {
5350 break;
5351 }
5352 }
5353
5354 if highlights.next_capture.is_none() {
5355 highlights.next_capture = highlights.captures.next();
5356 }
5357
5358 while let Some(capture) = highlights.next_capture.as_ref() {
5359 if self.range.start < capture.node.start_byte() {
5360 next_capture_start = capture.node.start_byte();
5361 break;
5362 } else {
5363 let highlight_id =
5364 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5365 highlights
5366 .stack
5367 .push((capture.node.end_byte(), highlight_id));
5368 highlights.next_capture = highlights.captures.next();
5369 }
5370 }
5371 }
5372
5373 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5374 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5375 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5376 if endpoint.offset <= self.range.start {
5377 self.update_diagnostic_depths(endpoint);
5378 diagnostic_endpoints.next();
5379 self.underline = endpoint.underline;
5380 } else {
5381 next_diagnostic_endpoint = endpoint.offset;
5382 break;
5383 }
5384 }
5385 }
5386 self.diagnostic_endpoints = diagnostic_endpoints;
5387
5388 if let Some(ChunkBitmaps {
5389 text: chunk,
5390 chars: chars_map,
5391 tabs,
5392 }) = self.chunks.peek_with_bitmaps()
5393 {
5394 let chunk_start = self.range.start;
5395 let mut chunk_end = (self.chunks.offset() + chunk.len())
5396 .min(next_capture_start)
5397 .min(next_diagnostic_endpoint);
5398 let mut highlight_id = None;
5399 if let Some(highlights) = self.highlights.as_ref()
5400 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5401 {
5402 chunk_end = chunk_end.min(*parent_capture_end);
5403 highlight_id = Some(*parent_highlight_id);
5404 }
5405 let bit_start = chunk_start - self.chunks.offset();
5406 let bit_end = chunk_end - self.chunks.offset();
5407
5408 let slice = &chunk[bit_start..bit_end];
5409
5410 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5411 let tabs = (tabs >> bit_start) & mask;
5412 let chars = (chars_map >> bit_start) & mask;
5413
5414 self.range.start = chunk_end;
5415 if self.range.start == self.chunks.offset() + chunk.len() {
5416 self.chunks.next().unwrap();
5417 }
5418
5419 Some(Chunk {
5420 text: slice,
5421 syntax_highlight_id: highlight_id,
5422 underline: self.underline,
5423 diagnostic_severity: self.current_diagnostic_severity(),
5424 is_unnecessary: self.current_code_is_unnecessary(),
5425 tabs,
5426 chars,
5427 ..Chunk::default()
5428 })
5429 } else {
5430 None
5431 }
5432 }
5433}
5434
5435impl operation_queue::Operation for Operation {
5436 fn lamport_timestamp(&self) -> clock::Lamport {
5437 match self {
5438 Operation::Buffer(_) => {
5439 unreachable!("buffer operations should never be deferred at this layer")
5440 }
5441 Operation::UpdateDiagnostics {
5442 lamport_timestamp, ..
5443 }
5444 | Operation::UpdateSelections {
5445 lamport_timestamp, ..
5446 }
5447 | Operation::UpdateCompletionTriggers {
5448 lamport_timestamp, ..
5449 }
5450 | Operation::UpdateLineEnding {
5451 lamport_timestamp, ..
5452 } => *lamport_timestamp,
5453 }
5454 }
5455}
5456
5457impl Default for Diagnostic {
5458 fn default() -> Self {
5459 Self {
5460 source: Default::default(),
5461 source_kind: DiagnosticSourceKind::Other,
5462 code: None,
5463 code_description: None,
5464 severity: DiagnosticSeverity::ERROR,
5465 message: Default::default(),
5466 markdown: None,
5467 group_id: 0,
5468 is_primary: false,
5469 is_disk_based: false,
5470 is_unnecessary: false,
5471 underline: true,
5472 data: None,
5473 registration_id: None,
5474 }
5475 }
5476}
5477
5478impl IndentSize {
5479 /// Returns an [`IndentSize`] representing the given spaces.
5480 pub fn spaces(len: u32) -> Self {
5481 Self {
5482 len,
5483 kind: IndentKind::Space,
5484 }
5485 }
5486
5487 /// Returns an [`IndentSize`] representing a tab.
5488 pub fn tab() -> Self {
5489 Self {
5490 len: 1,
5491 kind: IndentKind::Tab,
5492 }
5493 }
5494
5495 /// An iterator over the characters represented by this [`IndentSize`].
5496 pub fn chars(&self) -> impl Iterator<Item = char> {
5497 iter::repeat(self.char()).take(self.len as usize)
5498 }
5499
5500 /// The character representation of this [`IndentSize`].
5501 pub fn char(&self) -> char {
5502 match self.kind {
5503 IndentKind::Space => ' ',
5504 IndentKind::Tab => '\t',
5505 }
5506 }
5507
5508 /// Consumes the current [`IndentSize`] and returns a new one that has
5509 /// been shrunk or enlarged by the given size along the given direction.
5510 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5511 match direction {
5512 Ordering::Less => {
5513 if self.kind == size.kind && self.len >= size.len {
5514 self.len -= size.len;
5515 }
5516 }
5517 Ordering::Equal => {}
5518 Ordering::Greater => {
5519 if self.len == 0 {
5520 self = size;
5521 } else if self.kind == size.kind {
5522 self.len += size.len;
5523 }
5524 }
5525 }
5526 self
5527 }
5528
5529 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5530 match self.kind {
5531 IndentKind::Space => self.len as usize,
5532 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5533 }
5534 }
5535}
5536
5537#[cfg(any(test, feature = "test-support"))]
5538pub struct TestFile {
5539 pub path: Arc<RelPath>,
5540 pub root_name: String,
5541 pub local_root: Option<PathBuf>,
5542}
5543
5544#[cfg(any(test, feature = "test-support"))]
5545impl File for TestFile {
5546 fn path(&self) -> &Arc<RelPath> {
5547 &self.path
5548 }
5549
5550 fn full_path(&self, _: &gpui::App) -> PathBuf {
5551 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5552 }
5553
5554 fn as_local(&self) -> Option<&dyn LocalFile> {
5555 if self.local_root.is_some() {
5556 Some(self)
5557 } else {
5558 None
5559 }
5560 }
5561
5562 fn disk_state(&self) -> DiskState {
5563 unimplemented!()
5564 }
5565
5566 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5567 self.path().file_name().unwrap_or(self.root_name.as_ref())
5568 }
5569
5570 fn worktree_id(&self, _: &App) -> WorktreeId {
5571 WorktreeId::from_usize(0)
5572 }
5573
5574 fn to_proto(&self, _: &App) -> rpc::proto::File {
5575 unimplemented!()
5576 }
5577
5578 fn is_private(&self) -> bool {
5579 false
5580 }
5581
5582 fn path_style(&self, _cx: &App) -> PathStyle {
5583 PathStyle::local()
5584 }
5585}
5586
5587#[cfg(any(test, feature = "test-support"))]
5588impl LocalFile for TestFile {
5589 fn abs_path(&self, _cx: &App) -> PathBuf {
5590 PathBuf::from(self.local_root.as_ref().unwrap())
5591 .join(&self.root_name)
5592 .join(self.path.as_std_path())
5593 }
5594
5595 fn load(&self, _cx: &App) -> Task<Result<String>> {
5596 unimplemented!()
5597 }
5598
5599 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5600 unimplemented!()
5601 }
5602}
5603
5604pub(crate) fn contiguous_ranges(
5605 values: impl Iterator<Item = u32>,
5606 max_len: usize,
5607) -> impl Iterator<Item = Range<u32>> {
5608 let mut values = values;
5609 let mut current_range: Option<Range<u32>> = None;
5610 std::iter::from_fn(move || {
5611 loop {
5612 if let Some(value) = values.next() {
5613 if let Some(range) = &mut current_range
5614 && value == range.end
5615 && range.len() < max_len
5616 {
5617 range.end += 1;
5618 continue;
5619 }
5620
5621 let prev_range = current_range.clone();
5622 current_range = Some(value..(value + 1));
5623 if prev_range.is_some() {
5624 return prev_range;
5625 }
5626 } else {
5627 return current_range.take();
5628 }
5629 }
5630 })
5631}
5632
5633#[derive(Default, Debug)]
5634pub struct CharClassifier {
5635 scope: Option<LanguageScope>,
5636 scope_context: Option<CharScopeContext>,
5637 ignore_punctuation: bool,
5638}
5639
5640impl CharClassifier {
5641 pub fn new(scope: Option<LanguageScope>) -> Self {
5642 Self {
5643 scope,
5644 scope_context: None,
5645 ignore_punctuation: false,
5646 }
5647 }
5648
5649 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5650 Self {
5651 scope_context,
5652 ..self
5653 }
5654 }
5655
5656 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5657 Self {
5658 ignore_punctuation,
5659 ..self
5660 }
5661 }
5662
5663 pub fn is_whitespace(&self, c: char) -> bool {
5664 self.kind(c) == CharKind::Whitespace
5665 }
5666
5667 pub fn is_word(&self, c: char) -> bool {
5668 self.kind(c) == CharKind::Word
5669 }
5670
5671 pub fn is_punctuation(&self, c: char) -> bool {
5672 self.kind(c) == CharKind::Punctuation
5673 }
5674
5675 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5676 if c.is_alphanumeric() || c == '_' {
5677 return CharKind::Word;
5678 }
5679
5680 if let Some(scope) = &self.scope {
5681 let characters = match self.scope_context {
5682 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5683 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5684 None => scope.word_characters(),
5685 };
5686 if let Some(characters) = characters
5687 && characters.contains(&c)
5688 {
5689 return CharKind::Word;
5690 }
5691 }
5692
5693 if c.is_whitespace() {
5694 return CharKind::Whitespace;
5695 }
5696
5697 if ignore_punctuation {
5698 CharKind::Word
5699 } else {
5700 CharKind::Punctuation
5701 }
5702 }
5703
5704 pub fn kind(&self, c: char) -> CharKind {
5705 self.kind_with(c, self.ignore_punctuation)
5706 }
5707}
5708
5709/// Find all of the ranges of whitespace that occur at the ends of lines
5710/// in the given rope.
5711///
5712/// This could also be done with a regex search, but this implementation
5713/// avoids copying text.
5714pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5715 let mut ranges = Vec::new();
5716
5717 let mut offset = 0;
5718 let mut prev_chunk_trailing_whitespace_range = 0..0;
5719 for chunk in rope.chunks() {
5720 let mut prev_line_trailing_whitespace_range = 0..0;
5721 for (i, line) in chunk.split('\n').enumerate() {
5722 let line_end_offset = offset + line.len();
5723 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5724 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5725
5726 if i == 0 && trimmed_line_len == 0 {
5727 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5728 }
5729 if !prev_line_trailing_whitespace_range.is_empty() {
5730 ranges.push(prev_line_trailing_whitespace_range);
5731 }
5732
5733 offset = line_end_offset + 1;
5734 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5735 }
5736
5737 offset -= 1;
5738 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5739 }
5740
5741 if !prev_chunk_trailing_whitespace_range.is_empty() {
5742 ranges.push(prev_chunk_trailing_whitespace_range);
5743 }
5744
5745 ranges
5746}