1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191 pub capability: Capability,
192}
193
194/// The kind and amount of indentation in a particular line. For now,
195/// assumes that indentation is all the same character.
196#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
197pub struct IndentSize {
198 /// The number of bytes that comprise the indentation.
199 pub len: u32,
200 /// The kind of whitespace used for indentation.
201 pub kind: IndentKind,
202}
203
204/// A whitespace character that's used for indentation.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub enum IndentKind {
207 /// An ASCII space character.
208 #[default]
209 Space,
210 /// An ASCII tab character.
211 Tab,
212}
213
214/// The shape of a selection cursor.
215#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
216pub enum CursorShape {
217 /// A vertical bar
218 #[default]
219 Bar,
220 /// A block that surrounds the following character
221 Block,
222 /// An underline that runs along the following character
223 Underline,
224 /// A box drawn around the following character
225 Hollow,
226}
227
228impl From<settings::CursorShape> for CursorShape {
229 fn from(shape: settings::CursorShape) -> Self {
230 match shape {
231 settings::CursorShape::Bar => CursorShape::Bar,
232 settings::CursorShape::Block => CursorShape::Block,
233 settings::CursorShape::Underline => CursorShape::Underline,
234 settings::CursorShape::Hollow => CursorShape::Hollow,
235 }
236 }
237}
238
239#[derive(Clone, Debug)]
240struct SelectionSet {
241 line_mode: bool,
242 cursor_shape: CursorShape,
243 selections: Arc<[Selection<Anchor>]>,
244 lamport_timestamp: clock::Lamport,
245}
246
247/// A diagnostic associated with a certain range of a buffer.
248#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
249pub struct Diagnostic {
250 /// The name of the service that produced this diagnostic.
251 pub source: Option<String>,
252 /// The ID provided by the dynamic registration that produced this diagnostic.
253 pub registration_id: Option<SharedString>,
254 /// A machine-readable code that identifies this diagnostic.
255 pub code: Option<NumberOrString>,
256 pub code_description: Option<lsp::Uri>,
257 /// Whether this diagnostic is a hint, warning, or error.
258 pub severity: DiagnosticSeverity,
259 /// The human-readable message associated with this diagnostic.
260 pub message: String,
261 /// The human-readable message (in markdown format)
262 pub markdown: Option<String>,
263 /// An id that identifies the group to which this diagnostic belongs.
264 ///
265 /// When a language server produces a diagnostic with
266 /// one or more associated diagnostics, those diagnostics are all
267 /// assigned a single group ID.
268 pub group_id: usize,
269 /// Whether this diagnostic is the primary diagnostic for its group.
270 ///
271 /// In a given group, the primary diagnostic is the top-level diagnostic
272 /// returned by the language server. The non-primary diagnostics are the
273 /// associated diagnostics.
274 pub is_primary: bool,
275 /// Whether this diagnostic is considered to originate from an analysis of
276 /// files on disk, as opposed to any unsaved buffer contents. This is a
277 /// property of a given diagnostic source, and is configured for a given
278 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
279 /// for the language server.
280 pub is_disk_based: bool,
281 /// Whether this diagnostic marks unnecessary code.
282 pub is_unnecessary: bool,
283 /// Quick separation of diagnostics groups based by their source.
284 pub source_kind: DiagnosticSourceKind,
285 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
286 pub data: Option<Value>,
287 /// Whether to underline the corresponding text range in the editor.
288 pub underline: bool,
289}
290
291#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
292pub enum DiagnosticSourceKind {
293 Pulled,
294 Pushed,
295 Other,
296}
297
298/// An operation used to synchronize this buffer with its other replicas.
299#[derive(Clone, Debug, PartialEq)]
300pub enum Operation {
301 /// A text operation.
302 Buffer(text::Operation),
303
304 /// An update to the buffer's diagnostics.
305 UpdateDiagnostics {
306 /// The id of the language server that produced the new diagnostics.
307 server_id: LanguageServerId,
308 /// The diagnostics.
309 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
310 /// The buffer's lamport timestamp.
311 lamport_timestamp: clock::Lamport,
312 },
313
314 /// An update to the most recent selections in this buffer.
315 UpdateSelections {
316 /// The selections.
317 selections: Arc<[Selection<Anchor>]>,
318 /// The buffer's lamport timestamp.
319 lamport_timestamp: clock::Lamport,
320 /// Whether the selections are in 'line mode'.
321 line_mode: bool,
322 /// The [`CursorShape`] associated with these selections.
323 cursor_shape: CursorShape,
324 },
325
326 /// An update to the characters that should trigger autocompletion
327 /// for this buffer.
328 UpdateCompletionTriggers {
329 /// The characters that trigger autocompletion.
330 triggers: Vec<String>,
331 /// The buffer's lamport timestamp.
332 lamport_timestamp: clock::Lamport,
333 /// The language server ID.
334 server_id: LanguageServerId,
335 },
336
337 /// An update to the line ending type of this buffer.
338 UpdateLineEnding {
339 /// The line ending type.
340 line_ending: LineEnding,
341 /// The buffer's lamport timestamp.
342 lamport_timestamp: clock::Lamport,
343 },
344}
345
346/// An event that occurs in a buffer.
347#[derive(Clone, Debug, PartialEq)]
348pub enum BufferEvent {
349 /// The buffer was changed in a way that must be
350 /// propagated to its other replicas.
351 Operation {
352 operation: Operation,
353 is_local: bool,
354 },
355 /// The buffer was edited.
356 Edited,
357 /// The buffer's `dirty` bit changed.
358 DirtyChanged,
359 /// The buffer was saved.
360 Saved,
361 /// The buffer's file was changed on disk.
362 FileHandleChanged,
363 /// The buffer was reloaded.
364 Reloaded,
365 /// The buffer is in need of a reload
366 ReloadNeeded,
367 /// The buffer's language was changed.
368 /// The boolean indicates whether this buffer did not have a language before, but does now.
369 LanguageChanged(bool),
370 /// The buffer's syntax trees were updated.
371 Reparsed,
372 /// The buffer's diagnostics were updated.
373 DiagnosticsUpdated,
374 /// The buffer gained or lost editing capabilities.
375 CapabilityChanged,
376}
377
378/// The file associated with a buffer.
379pub trait File: Send + Sync + Any {
380 /// Returns the [`LocalFile`] associated with this file, if the
381 /// file is local.
382 fn as_local(&self) -> Option<&dyn LocalFile>;
383
384 /// Returns whether this file is local.
385 fn is_local(&self) -> bool {
386 self.as_local().is_some()
387 }
388
389 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
390 /// only available in some states, such as modification time.
391 fn disk_state(&self) -> DiskState;
392
393 /// Returns the path of this file relative to the worktree's root directory.
394 fn path(&self) -> &Arc<RelPath>;
395
396 /// Returns the path of this file relative to the worktree's parent directory (this means it
397 /// includes the name of the worktree's root folder).
398 fn full_path(&self, cx: &App) -> PathBuf;
399
400 /// Returns the path style of this file.
401 fn path_style(&self, cx: &App) -> PathStyle;
402
403 /// Returns the last component of this handle's absolute path. If this handle refers to the root
404 /// of its worktree, then this method will return the name of the worktree itself.
405 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
406
407 /// Returns the id of the worktree to which this file belongs.
408 ///
409 /// This is needed for looking up project-specific settings.
410 fn worktree_id(&self, cx: &App) -> WorktreeId;
411
412 /// Converts this file into a protobuf message.
413 fn to_proto(&self, cx: &App) -> rpc::proto::File;
414
415 /// Return whether Zed considers this to be a private file.
416 fn is_private(&self) -> bool;
417}
418
419/// The file's storage status - whether it's stored (`Present`), and if so when it was last
420/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
421/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
422/// indicator for new files.
423#[derive(Copy, Clone, Debug, PartialEq)]
424pub enum DiskState {
425 /// File created in Zed that has not been saved.
426 New,
427 /// File present on the filesystem.
428 Present { mtime: MTime },
429 /// Deleted file that was previously present.
430 Deleted,
431}
432
433impl DiskState {
434 /// Returns the file's last known modification time on disk.
435 pub fn mtime(self) -> Option<MTime> {
436 match self {
437 DiskState::New => None,
438 DiskState::Present { mtime } => Some(mtime),
439 DiskState::Deleted => None,
440 }
441 }
442
443 pub fn exists(&self) -> bool {
444 match self {
445 DiskState::New => false,
446 DiskState::Present { .. } => true,
447 DiskState::Deleted => false,
448 }
449 }
450}
451
452/// The file associated with a buffer, in the case where the file is on the local disk.
453pub trait LocalFile: File {
454 /// Returns the absolute path of this file
455 fn abs_path(&self, cx: &App) -> PathBuf;
456
457 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
458 fn load(&self, cx: &App) -> Task<Result<String>>;
459
460 /// Loads the file's contents from disk.
461 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
462}
463
464/// The auto-indent behavior associated with an editing operation.
465/// For some editing operations, each affected line of text has its
466/// indentation recomputed. For other operations, the entire block
467/// of edited text is adjusted uniformly.
468#[derive(Clone, Debug)]
469pub enum AutoindentMode {
470 /// Indent each line of inserted text.
471 EachLine,
472 /// Apply the same indentation adjustment to all of the lines
473 /// in a given insertion.
474 Block {
475 /// The original indentation column of the first line of each
476 /// insertion, if it has been copied.
477 ///
478 /// Knowing this makes it possible to preserve the relative indentation
479 /// of every line in the insertion from when it was copied.
480 ///
481 /// If the original indent column is `a`, and the first line of insertion
482 /// is then auto-indented to column `b`, then every other line of
483 /// the insertion will be auto-indented to column `b - a`
484 original_indent_columns: Vec<Option<u32>>,
485 },
486}
487
488#[derive(Clone)]
489struct AutoindentRequest {
490 before_edit: BufferSnapshot,
491 entries: Vec<AutoindentRequestEntry>,
492 is_block_mode: bool,
493 ignore_empty_lines: bool,
494}
495
496#[derive(Debug, Clone)]
497struct AutoindentRequestEntry {
498 /// A range of the buffer whose indentation should be adjusted.
499 range: Range<Anchor>,
500 /// Whether or not these lines should be considered brand new, for the
501 /// purpose of auto-indent. When text is not new, its indentation will
502 /// only be adjusted if the suggested indentation level has *changed*
503 /// since the edit was made.
504 first_line_is_new: bool,
505 indent_size: IndentSize,
506 original_indent_column: Option<u32>,
507}
508
509#[derive(Debug)]
510struct IndentSuggestion {
511 basis_row: u32,
512 delta: Ordering,
513 within_error: bool,
514}
515
516struct BufferChunkHighlights<'a> {
517 captures: SyntaxMapCaptures<'a>,
518 next_capture: Option<SyntaxMapCapture<'a>>,
519 stack: Vec<(usize, HighlightId)>,
520 highlight_maps: Vec<HighlightMap>,
521}
522
523/// An iterator that yields chunks of a buffer's text, along with their
524/// syntax highlights and diagnostic status.
525pub struct BufferChunks<'a> {
526 buffer_snapshot: Option<&'a BufferSnapshot>,
527 range: Range<usize>,
528 chunks: text::Chunks<'a>,
529 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
530 error_depth: usize,
531 warning_depth: usize,
532 information_depth: usize,
533 hint_depth: usize,
534 unnecessary_depth: usize,
535 underline: bool,
536 highlights: Option<BufferChunkHighlights<'a>>,
537}
538
539/// A chunk of a buffer's text, along with its syntax highlight and
540/// diagnostic status.
541#[derive(Clone, Debug, Default)]
542pub struct Chunk<'a> {
543 /// The text of the chunk.
544 pub text: &'a str,
545 /// The syntax highlighting style of the chunk.
546 pub syntax_highlight_id: Option<HighlightId>,
547 /// The highlight style that has been applied to this chunk in
548 /// the editor.
549 pub highlight_style: Option<HighlightStyle>,
550 /// The severity of diagnostic associated with this chunk, if any.
551 pub diagnostic_severity: Option<DiagnosticSeverity>,
552 /// A bitset of which characters are tabs in this string.
553 pub tabs: u128,
554 /// Bitmap of character indices in this chunk
555 pub chars: u128,
556 /// Whether this chunk of text is marked as unnecessary.
557 pub is_unnecessary: bool,
558 /// Whether this chunk of text was originally a tab character.
559 pub is_tab: bool,
560 /// Whether this chunk of text was originally an inlay.
561 pub is_inlay: bool,
562 /// Whether to underline the corresponding text range in the editor.
563 pub underline: bool,
564}
565
566/// A set of edits to a given version of a buffer, computed asynchronously.
567#[derive(Debug)]
568pub struct Diff {
569 pub base_version: clock::Global,
570 pub line_ending: LineEnding,
571 pub edits: Vec<(Range<usize>, Arc<str>)>,
572}
573
574#[derive(Debug, Clone, Copy)]
575pub(crate) struct DiagnosticEndpoint {
576 offset: usize,
577 is_start: bool,
578 underline: bool,
579 severity: DiagnosticSeverity,
580 is_unnecessary: bool,
581}
582
583/// A class of characters, used for characterizing a run of text.
584#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
585pub enum CharKind {
586 /// Whitespace.
587 Whitespace,
588 /// Punctuation.
589 Punctuation,
590 /// Word.
591 Word,
592}
593
594/// Context for character classification within a specific scope.
595#[derive(Copy, Clone, Eq, PartialEq, Debug)]
596pub enum CharScopeContext {
597 /// Character classification for completion queries.
598 ///
599 /// This context treats certain characters as word constituents that would
600 /// normally be considered punctuation, such as '-' in Tailwind classes
601 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
602 Completion,
603 /// Character classification for linked edits.
604 ///
605 /// This context handles characters that should be treated as part of
606 /// identifiers during linked editing operations, such as '.' in JSX
607 /// component names like `<Animated.View>`.
608 LinkedEdit,
609}
610
611/// A runnable is a set of data about a region that could be resolved into a task
612pub struct Runnable {
613 pub tags: SmallVec<[RunnableTag; 1]>,
614 pub language: Arc<Language>,
615 pub buffer: BufferId,
616}
617
618#[derive(Default, Clone, Debug)]
619pub struct HighlightedText {
620 pub text: SharedString,
621 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
622}
623
624#[derive(Default, Debug)]
625struct HighlightedTextBuilder {
626 pub text: String,
627 highlights: Vec<(Range<usize>, HighlightStyle)>,
628}
629
630impl HighlightedText {
631 pub fn from_buffer_range<T: ToOffset>(
632 range: Range<T>,
633 snapshot: &text::BufferSnapshot,
634 syntax_snapshot: &SyntaxSnapshot,
635 override_style: Option<HighlightStyle>,
636 syntax_theme: &SyntaxTheme,
637 ) -> Self {
638 let mut highlighted_text = HighlightedTextBuilder::default();
639 highlighted_text.add_text_from_buffer_range(
640 range,
641 snapshot,
642 syntax_snapshot,
643 override_style,
644 syntax_theme,
645 );
646 highlighted_text.build()
647 }
648
649 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
650 gpui::StyledText::new(self.text.clone())
651 .with_default_highlights(default_style, self.highlights.iter().cloned())
652 }
653
654 /// Returns the first line without leading whitespace unless highlighted
655 /// and a boolean indicating if there are more lines after
656 pub fn first_line_preview(self) -> (Self, bool) {
657 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
658 let first_line = &self.text[..newline_ix];
659
660 // Trim leading whitespace, unless an edit starts prior to it.
661 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
662 if let Some((first_highlight_range, _)) = self.highlights.first() {
663 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
664 }
665
666 let preview_text = &first_line[preview_start_ix..];
667 let preview_highlights = self
668 .highlights
669 .into_iter()
670 .skip_while(|(range, _)| range.end <= preview_start_ix)
671 .take_while(|(range, _)| range.start < newline_ix)
672 .filter_map(|(mut range, highlight)| {
673 range.start = range.start.saturating_sub(preview_start_ix);
674 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
675 if range.is_empty() {
676 None
677 } else {
678 Some((range, highlight))
679 }
680 });
681
682 let preview = Self {
683 text: SharedString::new(preview_text),
684 highlights: preview_highlights.collect(),
685 };
686
687 (preview, self.text.len() > newline_ix)
688 }
689}
690
691impl HighlightedTextBuilder {
692 pub fn build(self) -> HighlightedText {
693 HighlightedText {
694 text: self.text.into(),
695 highlights: self.highlights,
696 }
697 }
698
699 pub fn add_text_from_buffer_range<T: ToOffset>(
700 &mut self,
701 range: Range<T>,
702 snapshot: &text::BufferSnapshot,
703 syntax_snapshot: &SyntaxSnapshot,
704 override_style: Option<HighlightStyle>,
705 syntax_theme: &SyntaxTheme,
706 ) {
707 let range = range.to_offset(snapshot);
708 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
709 let start = self.text.len();
710 self.text.push_str(chunk.text);
711 let end = self.text.len();
712
713 if let Some(highlight_style) = chunk
714 .syntax_highlight_id
715 .and_then(|id| id.style(syntax_theme))
716 {
717 let highlight_style = override_style.map_or(highlight_style, |override_style| {
718 highlight_style.highlight(override_style)
719 });
720 self.highlights.push((start..end, highlight_style));
721 } else if let Some(override_style) = override_style {
722 self.highlights.push((start..end, override_style));
723 }
724 }
725 }
726
727 fn highlighted_chunks<'a>(
728 range: Range<usize>,
729 snapshot: &'a text::BufferSnapshot,
730 syntax_snapshot: &'a SyntaxSnapshot,
731 ) -> BufferChunks<'a> {
732 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
733 grammar
734 .highlights_config
735 .as_ref()
736 .map(|config| &config.query)
737 });
738
739 let highlight_maps = captures
740 .grammars()
741 .iter()
742 .map(|grammar| grammar.highlight_map())
743 .collect();
744
745 BufferChunks::new(
746 snapshot.as_rope(),
747 range,
748 Some((captures, highlight_maps)),
749 false,
750 None,
751 )
752 }
753}
754
755#[derive(Clone)]
756pub struct EditPreview {
757 old_snapshot: text::BufferSnapshot,
758 applied_edits_snapshot: text::BufferSnapshot,
759 syntax_snapshot: SyntaxSnapshot,
760}
761
762impl EditPreview {
763 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
764 let (first, _) = edits.first()?;
765 let (last, _) = edits.last()?;
766
767 let start = first.start.to_point(&self.old_snapshot);
768 let old_end = last.end.to_point(&self.old_snapshot);
769 let new_end = last
770 .end
771 .bias_right(&self.old_snapshot)
772 .to_point(&self.applied_edits_snapshot);
773
774 let start = Point::new(start.row.saturating_sub(3), 0);
775 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
776 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
777
778 Some(unified_diff(
779 &self
780 .old_snapshot
781 .text_for_range(start..old_end)
782 .collect::<String>(),
783 &self
784 .applied_edits_snapshot
785 .text_for_range(start..new_end)
786 .collect::<String>(),
787 ))
788 }
789
790 pub fn highlight_edits(
791 &self,
792 current_snapshot: &BufferSnapshot,
793 edits: &[(Range<Anchor>, impl AsRef<str>)],
794 include_deletions: bool,
795 cx: &App,
796 ) -> HighlightedText {
797 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
798 return HighlightedText::default();
799 };
800
801 let mut highlighted_text = HighlightedTextBuilder::default();
802
803 let visible_range_in_preview_snapshot =
804 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
805 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
806
807 let insertion_highlight_style = HighlightStyle {
808 background_color: Some(cx.theme().status().created_background),
809 ..Default::default()
810 };
811 let deletion_highlight_style = HighlightStyle {
812 background_color: Some(cx.theme().status().deleted_background),
813 ..Default::default()
814 };
815 let syntax_theme = cx.theme().syntax();
816
817 for (range, edit_text) in edits {
818 let edit_new_end_in_preview_snapshot = range
819 .end
820 .bias_right(&self.old_snapshot)
821 .to_offset(&self.applied_edits_snapshot);
822 let edit_start_in_preview_snapshot =
823 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
824
825 let unchanged_range_in_preview_snapshot =
826 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
827 if !unchanged_range_in_preview_snapshot.is_empty() {
828 highlighted_text.add_text_from_buffer_range(
829 unchanged_range_in_preview_snapshot,
830 &self.applied_edits_snapshot,
831 &self.syntax_snapshot,
832 None,
833 syntax_theme,
834 );
835 }
836
837 let range_in_current_snapshot = range.to_offset(current_snapshot);
838 if include_deletions && !range_in_current_snapshot.is_empty() {
839 highlighted_text.add_text_from_buffer_range(
840 range_in_current_snapshot,
841 ¤t_snapshot.text,
842 ¤t_snapshot.syntax,
843 Some(deletion_highlight_style),
844 syntax_theme,
845 );
846 }
847
848 if !edit_text.as_ref().is_empty() {
849 highlighted_text.add_text_from_buffer_range(
850 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
851 &self.applied_edits_snapshot,
852 &self.syntax_snapshot,
853 Some(insertion_highlight_style),
854 syntax_theme,
855 );
856 }
857
858 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
859 }
860
861 highlighted_text.add_text_from_buffer_range(
862 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
863 &self.applied_edits_snapshot,
864 &self.syntax_snapshot,
865 None,
866 syntax_theme,
867 );
868
869 highlighted_text.build()
870 }
871
872 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
873 cx.new(|cx| {
874 let mut buffer = Buffer::local_normalized(
875 self.applied_edits_snapshot.as_rope().clone(),
876 self.applied_edits_snapshot.line_ending(),
877 cx,
878 );
879 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
880 buffer
881 })
882 }
883
884 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
885 let (first, _) = edits.first()?;
886 let (last, _) = edits.last()?;
887
888 let start = first
889 .start
890 .bias_left(&self.old_snapshot)
891 .to_point(&self.applied_edits_snapshot);
892 let end = last
893 .end
894 .bias_right(&self.old_snapshot)
895 .to_point(&self.applied_edits_snapshot);
896
897 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
898 let range = Point::new(start.row, 0)
899 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
900
901 Some(range)
902 }
903}
904
905#[derive(Clone, Debug, PartialEq, Eq)]
906pub struct BracketMatch<T> {
907 pub open_range: Range<T>,
908 pub close_range: Range<T>,
909 pub newline_only: bool,
910 pub syntax_layer_depth: usize,
911 pub color_index: Option<usize>,
912}
913
914impl<T> BracketMatch<T> {
915 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
916 (self.open_range, self.close_range)
917 }
918}
919
920impl Buffer {
921 /// Create a new buffer with the given base text.
922 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
923 Self::build(
924 TextBuffer::new(
925 ReplicaId::LOCAL,
926 cx.entity_id().as_non_zero_u64().into(),
927 base_text.into(),
928 ),
929 None,
930 Capability::ReadWrite,
931 )
932 }
933
934 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
935 pub fn local_normalized(
936 base_text_normalized: Rope,
937 line_ending: LineEnding,
938 cx: &Context<Self>,
939 ) -> Self {
940 Self::build(
941 TextBuffer::new_normalized(
942 ReplicaId::LOCAL,
943 cx.entity_id().as_non_zero_u64().into(),
944 line_ending,
945 base_text_normalized,
946 ),
947 None,
948 Capability::ReadWrite,
949 )
950 }
951
952 /// Create a new buffer that is a replica of a remote buffer.
953 pub fn remote(
954 remote_id: BufferId,
955 replica_id: ReplicaId,
956 capability: Capability,
957 base_text: impl Into<String>,
958 ) -> Self {
959 Self::build(
960 TextBuffer::new(replica_id, remote_id, base_text.into()),
961 None,
962 capability,
963 )
964 }
965
966 /// Create a new buffer that is a replica of a remote buffer, populating its
967 /// state from the given protobuf message.
968 pub fn from_proto(
969 replica_id: ReplicaId,
970 capability: Capability,
971 message: proto::BufferState,
972 file: Option<Arc<dyn File>>,
973 ) -> Result<Self> {
974 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
975 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
976 let mut this = Self::build(buffer, file, capability);
977 this.text.set_line_ending(proto::deserialize_line_ending(
978 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
979 ));
980 this.saved_version = proto::deserialize_version(&message.saved_version);
981 this.saved_mtime = message.saved_mtime.map(|time| time.into());
982 Ok(this)
983 }
984
985 /// Serialize the buffer's state to a protobuf message.
986 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
987 proto::BufferState {
988 id: self.remote_id().into(),
989 file: self.file.as_ref().map(|f| f.to_proto(cx)),
990 base_text: self.base_text().to_string(),
991 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
992 saved_version: proto::serialize_version(&self.saved_version),
993 saved_mtime: self.saved_mtime.map(|time| time.into()),
994 }
995 }
996
997 /// Serialize as protobufs all of the changes to the buffer since the given version.
998 pub fn serialize_ops(
999 &self,
1000 since: Option<clock::Global>,
1001 cx: &App,
1002 ) -> Task<Vec<proto::Operation>> {
1003 let mut operations = Vec::new();
1004 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1005
1006 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1007 proto::serialize_operation(&Operation::UpdateSelections {
1008 selections: set.selections.clone(),
1009 lamport_timestamp: set.lamport_timestamp,
1010 line_mode: set.line_mode,
1011 cursor_shape: set.cursor_shape,
1012 })
1013 }));
1014
1015 for (server_id, diagnostics) in &self.diagnostics {
1016 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1017 lamport_timestamp: self.diagnostics_timestamp,
1018 server_id: *server_id,
1019 diagnostics: diagnostics.iter().cloned().collect(),
1020 }));
1021 }
1022
1023 for (server_id, completions) in &self.completion_triggers_per_language_server {
1024 operations.push(proto::serialize_operation(
1025 &Operation::UpdateCompletionTriggers {
1026 triggers: completions.iter().cloned().collect(),
1027 lamport_timestamp: self.completion_triggers_timestamp,
1028 server_id: *server_id,
1029 },
1030 ));
1031 }
1032
1033 let text_operations = self.text.operations().clone();
1034 cx.background_spawn(async move {
1035 let since = since.unwrap_or_default();
1036 operations.extend(
1037 text_operations
1038 .iter()
1039 .filter(|(_, op)| !since.observed(op.timestamp()))
1040 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1041 );
1042 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1043 operations
1044 })
1045 }
1046
1047 /// Assign a language to the buffer, returning the buffer.
1048 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1049 self.set_language_async(Some(language), cx);
1050 self
1051 }
1052
1053 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1054 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1055 self.set_language(Some(language), cx);
1056 self
1057 }
1058
1059 /// Returns the [`Capability`] of this buffer.
1060 pub fn capability(&self) -> Capability {
1061 self.capability
1062 }
1063
1064 /// Whether this buffer can only be read.
1065 pub fn read_only(&self) -> bool {
1066 self.capability == Capability::ReadOnly
1067 }
1068
1069 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1070 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1071 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1072 let snapshot = buffer.snapshot();
1073 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1074 let tree_sitter_data = TreeSitterData::new(snapshot);
1075 Self {
1076 saved_mtime,
1077 tree_sitter_data: Arc::new(tree_sitter_data),
1078 saved_version: buffer.version(),
1079 preview_version: buffer.version(),
1080 reload_task: None,
1081 transaction_depth: 0,
1082 was_dirty_before_starting_transaction: None,
1083 has_unsaved_edits: Cell::new((buffer.version(), false)),
1084 text: buffer,
1085 branch_state: None,
1086 file,
1087 capability,
1088 syntax_map,
1089 reparse: None,
1090 non_text_state_update_count: 0,
1091 sync_parse_timeout: Duration::from_millis(1),
1092 parse_status: watch::channel(ParseStatus::Idle),
1093 autoindent_requests: Default::default(),
1094 wait_for_autoindent_txs: Default::default(),
1095 pending_autoindent: Default::default(),
1096 language: None,
1097 remote_selections: Default::default(),
1098 diagnostics: Default::default(),
1099 diagnostics_timestamp: Lamport::MIN,
1100 completion_triggers: Default::default(),
1101 completion_triggers_per_language_server: Default::default(),
1102 completion_triggers_timestamp: Lamport::MIN,
1103 deferred_ops: OperationQueue::new(),
1104 has_conflict: false,
1105 change_bits: Default::default(),
1106 _subscriptions: Vec::new(),
1107 encoding: encoding_rs::UTF_8,
1108 has_bom: false,
1109 }
1110 }
1111
1112 pub fn build_snapshot(
1113 text: Rope,
1114 language: Option<Arc<Language>>,
1115 language_registry: Option<Arc<LanguageRegistry>>,
1116 cx: &mut App,
1117 ) -> impl Future<Output = BufferSnapshot> + use<> {
1118 let entity_id = cx.reserve_entity::<Self>().entity_id();
1119 let buffer_id = entity_id.as_non_zero_u64().into();
1120 async move {
1121 let text =
1122 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1123 .snapshot();
1124 let mut syntax = SyntaxMap::new(&text).snapshot();
1125 if let Some(language) = language.clone() {
1126 let language_registry = language_registry.clone();
1127 syntax.reparse(&text, language_registry, language);
1128 }
1129 let tree_sitter_data = TreeSitterData::new(text.clone());
1130 BufferSnapshot {
1131 text,
1132 syntax,
1133 file: None,
1134 diagnostics: Default::default(),
1135 remote_selections: Default::default(),
1136 tree_sitter_data: Arc::new(tree_sitter_data),
1137 language,
1138 non_text_state_update_count: 0,
1139 capability: Capability::ReadOnly,
1140 }
1141 }
1142 }
1143
1144 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1145 let entity_id = cx.reserve_entity::<Self>().entity_id();
1146 let buffer_id = entity_id.as_non_zero_u64().into();
1147 let text = TextBuffer::new_normalized(
1148 ReplicaId::LOCAL,
1149 buffer_id,
1150 Default::default(),
1151 Rope::new(),
1152 )
1153 .snapshot();
1154 let syntax = SyntaxMap::new(&text).snapshot();
1155 let tree_sitter_data = TreeSitterData::new(text.clone());
1156 BufferSnapshot {
1157 text,
1158 syntax,
1159 tree_sitter_data: Arc::new(tree_sitter_data),
1160 file: None,
1161 diagnostics: Default::default(),
1162 remote_selections: Default::default(),
1163 language: None,
1164 non_text_state_update_count: 0,
1165 capability: Capability::ReadOnly,
1166 }
1167 }
1168
1169 #[cfg(any(test, feature = "test-support"))]
1170 pub fn build_snapshot_sync(
1171 text: Rope,
1172 language: Option<Arc<Language>>,
1173 language_registry: Option<Arc<LanguageRegistry>>,
1174 cx: &mut App,
1175 ) -> BufferSnapshot {
1176 let entity_id = cx.reserve_entity::<Self>().entity_id();
1177 let buffer_id = entity_id.as_non_zero_u64().into();
1178 let text =
1179 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1180 .snapshot();
1181 let mut syntax = SyntaxMap::new(&text).snapshot();
1182 if let Some(language) = language.clone() {
1183 syntax.reparse(&text, language_registry, language);
1184 }
1185 let tree_sitter_data = TreeSitterData::new(text.clone());
1186 BufferSnapshot {
1187 text,
1188 syntax,
1189 tree_sitter_data: Arc::new(tree_sitter_data),
1190 file: None,
1191 diagnostics: Default::default(),
1192 remote_selections: Default::default(),
1193 language,
1194 non_text_state_update_count: 0,
1195 capability: Capability::ReadOnly,
1196 }
1197 }
1198
1199 /// Retrieve a snapshot of the buffer's current state. This is computationally
1200 /// cheap, and allows reading from the buffer on a background thread.
1201 pub fn snapshot(&self) -> BufferSnapshot {
1202 let text = self.text.snapshot();
1203 let mut syntax_map = self.syntax_map.lock();
1204 syntax_map.interpolate(&text);
1205 let syntax = syntax_map.snapshot();
1206
1207 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1208 Arc::new(TreeSitterData::new(text.clone()))
1209 } else {
1210 self.tree_sitter_data.clone()
1211 };
1212
1213 BufferSnapshot {
1214 text,
1215 syntax,
1216 tree_sitter_data,
1217 file: self.file.clone(),
1218 remote_selections: self.remote_selections.clone(),
1219 diagnostics: self.diagnostics.clone(),
1220 language: self.language.clone(),
1221 non_text_state_update_count: self.non_text_state_update_count,
1222 capability: self.capability,
1223 }
1224 }
1225
1226 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1227 let this = cx.entity();
1228 cx.new(|cx| {
1229 let mut branch = Self {
1230 branch_state: Some(BufferBranchState {
1231 base_buffer: this.clone(),
1232 merged_operations: Default::default(),
1233 }),
1234 language: self.language.clone(),
1235 has_conflict: self.has_conflict,
1236 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1237 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1238 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1239 };
1240 if let Some(language_registry) = self.language_registry() {
1241 branch.set_language_registry(language_registry);
1242 }
1243
1244 // Reparse the branch buffer so that we get syntax highlighting immediately.
1245 branch.reparse(cx, true);
1246
1247 branch
1248 })
1249 }
1250
1251 pub fn preview_edits(
1252 &self,
1253 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1254 cx: &App,
1255 ) -> Task<EditPreview> {
1256 let registry = self.language_registry();
1257 let language = self.language().cloned();
1258 let old_snapshot = self.text.snapshot();
1259 let mut branch_buffer = self.text.branch();
1260 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1261 cx.background_spawn(async move {
1262 if !edits.is_empty() {
1263 if let Some(language) = language.clone() {
1264 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1265 }
1266
1267 branch_buffer.edit(edits.iter().cloned());
1268 let snapshot = branch_buffer.snapshot();
1269 syntax_snapshot.interpolate(&snapshot);
1270
1271 if let Some(language) = language {
1272 syntax_snapshot.reparse(&snapshot, registry, language);
1273 }
1274 }
1275 EditPreview {
1276 old_snapshot,
1277 applied_edits_snapshot: branch_buffer.snapshot(),
1278 syntax_snapshot,
1279 }
1280 })
1281 }
1282
1283 /// Applies all of the changes in this buffer that intersect any of the
1284 /// given `ranges` to its base buffer.
1285 ///
1286 /// If `ranges` is empty, then all changes will be applied. This buffer must
1287 /// be a branch buffer to call this method.
1288 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1289 let Some(base_buffer) = self.base_buffer() else {
1290 debug_panic!("not a branch buffer");
1291 return;
1292 };
1293
1294 let mut ranges = if ranges.is_empty() {
1295 &[0..usize::MAX]
1296 } else {
1297 ranges.as_slice()
1298 }
1299 .iter()
1300 .peekable();
1301
1302 let mut edits = Vec::new();
1303 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1304 let mut is_included = false;
1305 while let Some(range) = ranges.peek() {
1306 if range.end < edit.new.start {
1307 ranges.next().unwrap();
1308 } else {
1309 if range.start <= edit.new.end {
1310 is_included = true;
1311 }
1312 break;
1313 }
1314 }
1315
1316 if is_included {
1317 edits.push((
1318 edit.old.clone(),
1319 self.text_for_range(edit.new.clone()).collect::<String>(),
1320 ));
1321 }
1322 }
1323
1324 let operation = base_buffer.update(cx, |base_buffer, cx| {
1325 // cx.emit(BufferEvent::DiffBaseChanged);
1326 base_buffer.edit(edits, None, cx)
1327 });
1328
1329 if let Some(operation) = operation
1330 && let Some(BufferBranchState {
1331 merged_operations, ..
1332 }) = &mut self.branch_state
1333 {
1334 merged_operations.push(operation);
1335 }
1336 }
1337
1338 fn on_base_buffer_event(
1339 &mut self,
1340 _: Entity<Buffer>,
1341 event: &BufferEvent,
1342 cx: &mut Context<Self>,
1343 ) {
1344 let BufferEvent::Operation { operation, .. } = event else {
1345 return;
1346 };
1347 let Some(BufferBranchState {
1348 merged_operations, ..
1349 }) = &mut self.branch_state
1350 else {
1351 return;
1352 };
1353
1354 let mut operation_to_undo = None;
1355 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1356 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1357 {
1358 merged_operations.remove(ix);
1359 operation_to_undo = Some(operation.timestamp);
1360 }
1361
1362 self.apply_ops([operation.clone()], cx);
1363
1364 if let Some(timestamp) = operation_to_undo {
1365 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1366 self.undo_operations(counts, cx);
1367 }
1368 }
1369
1370 #[cfg(test)]
1371 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1372 &self.text
1373 }
1374
1375 /// Retrieve a snapshot of the buffer's raw text, without any
1376 /// language-related state like the syntax tree or diagnostics.
1377 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1378 self.text.snapshot()
1379 }
1380
1381 /// The file associated with the buffer, if any.
1382 pub fn file(&self) -> Option<&Arc<dyn File>> {
1383 self.file.as_ref()
1384 }
1385
1386 /// The version of the buffer that was last saved or reloaded from disk.
1387 pub fn saved_version(&self) -> &clock::Global {
1388 &self.saved_version
1389 }
1390
1391 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1392 pub fn saved_mtime(&self) -> Option<MTime> {
1393 self.saved_mtime
1394 }
1395
1396 /// Returns the character encoding of the buffer's file.
1397 pub fn encoding(&self) -> &'static Encoding {
1398 self.encoding
1399 }
1400
1401 /// Sets the character encoding of the buffer.
1402 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1403 self.encoding = encoding;
1404 }
1405
1406 /// Returns whether the buffer has a Byte Order Mark.
1407 pub fn has_bom(&self) -> bool {
1408 self.has_bom
1409 }
1410
1411 /// Sets whether the buffer has a Byte Order Mark.
1412 pub fn set_has_bom(&mut self, has_bom: bool) {
1413 self.has_bom = has_bom;
1414 }
1415
1416 /// Assign a language to the buffer.
1417 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1418 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1419 }
1420
1421 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1422 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1423 self.set_language_(language, true, cx);
1424 }
1425
1426 fn set_language_(
1427 &mut self,
1428 language: Option<Arc<Language>>,
1429 may_block: bool,
1430 cx: &mut Context<Self>,
1431 ) {
1432 self.non_text_state_update_count += 1;
1433 self.syntax_map.lock().clear(&self.text);
1434 let old_language = std::mem::replace(&mut self.language, language);
1435 self.was_changed();
1436 self.reparse(cx, may_block);
1437 let has_fresh_language =
1438 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1439 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1440 }
1441
1442 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1443 /// other languages if parts of the buffer are written in different languages.
1444 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1445 self.syntax_map
1446 .lock()
1447 .set_language_registry(language_registry);
1448 }
1449
1450 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1451 self.syntax_map.lock().language_registry()
1452 }
1453
1454 /// Assign the line ending type to the buffer.
1455 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1456 self.text.set_line_ending(line_ending);
1457
1458 let lamport_timestamp = self.text.lamport_clock.tick();
1459 self.send_operation(
1460 Operation::UpdateLineEnding {
1461 line_ending,
1462 lamport_timestamp,
1463 },
1464 true,
1465 cx,
1466 );
1467 }
1468
1469 /// Assign the buffer a new [`Capability`].
1470 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1471 if self.capability != capability {
1472 self.capability = capability;
1473 cx.emit(BufferEvent::CapabilityChanged)
1474 }
1475 }
1476
1477 /// This method is called to signal that the buffer has been saved.
1478 pub fn did_save(
1479 &mut self,
1480 version: clock::Global,
1481 mtime: Option<MTime>,
1482 cx: &mut Context<Self>,
1483 ) {
1484 self.saved_version = version.clone();
1485 self.has_unsaved_edits.set((version, false));
1486 self.has_conflict = false;
1487 self.saved_mtime = mtime;
1488 self.was_changed();
1489 cx.emit(BufferEvent::Saved);
1490 cx.notify();
1491 }
1492
1493 /// Reloads the contents of the buffer from disk.
1494 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1495 let (tx, rx) = futures::channel::oneshot::channel();
1496 let prev_version = self.text.version();
1497 self.reload_task = Some(cx.spawn(async move |this, cx| {
1498 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1499 let file = this.file.as_ref()?.as_local()?;
1500
1501 Some((file.disk_state().mtime(), file.load(cx)))
1502 })?
1503 else {
1504 return Ok(());
1505 };
1506
1507 let new_text = new_text.await?;
1508 let diff = this
1509 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1510 .await;
1511 this.update(cx, |this, cx| {
1512 if this.version() == diff.base_version {
1513 this.finalize_last_transaction();
1514 this.apply_diff(diff, cx);
1515 tx.send(this.finalize_last_transaction().cloned()).ok();
1516 this.has_conflict = false;
1517 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1518 } else {
1519 if !diff.edits.is_empty()
1520 || this
1521 .edits_since::<usize>(&diff.base_version)
1522 .next()
1523 .is_some()
1524 {
1525 this.has_conflict = true;
1526 }
1527
1528 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1529 }
1530
1531 this.reload_task.take();
1532 })
1533 }));
1534 rx
1535 }
1536
1537 /// This method is called to signal that the buffer has been reloaded.
1538 pub fn did_reload(
1539 &mut self,
1540 version: clock::Global,
1541 line_ending: LineEnding,
1542 mtime: Option<MTime>,
1543 cx: &mut Context<Self>,
1544 ) {
1545 self.saved_version = version;
1546 self.has_unsaved_edits
1547 .set((self.saved_version.clone(), false));
1548 self.text.set_line_ending(line_ending);
1549 self.saved_mtime = mtime;
1550 cx.emit(BufferEvent::Reloaded);
1551 cx.notify();
1552 }
1553
1554 /// Updates the [`File`] backing this buffer. This should be called when
1555 /// the file has changed or has been deleted.
1556 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1557 let was_dirty = self.is_dirty();
1558 let mut file_changed = false;
1559
1560 if let Some(old_file) = self.file.as_ref() {
1561 if new_file.path() != old_file.path() {
1562 file_changed = true;
1563 }
1564
1565 let old_state = old_file.disk_state();
1566 let new_state = new_file.disk_state();
1567 if old_state != new_state {
1568 file_changed = true;
1569 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1570 cx.emit(BufferEvent::ReloadNeeded)
1571 }
1572 }
1573 } else {
1574 file_changed = true;
1575 };
1576
1577 self.file = Some(new_file);
1578 if file_changed {
1579 self.was_changed();
1580 self.non_text_state_update_count += 1;
1581 if was_dirty != self.is_dirty() {
1582 cx.emit(BufferEvent::DirtyChanged);
1583 }
1584 cx.emit(BufferEvent::FileHandleChanged);
1585 cx.notify();
1586 }
1587 }
1588
1589 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1590 Some(self.branch_state.as_ref()?.base_buffer.clone())
1591 }
1592
1593 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1594 pub fn language(&self) -> Option<&Arc<Language>> {
1595 self.language.as_ref()
1596 }
1597
1598 /// Returns the [`Language`] at the given location.
1599 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1600 let offset = position.to_offset(self);
1601 let mut is_first = true;
1602 let start_anchor = self.anchor_before(offset);
1603 let end_anchor = self.anchor_after(offset);
1604 self.syntax_map
1605 .lock()
1606 .layers_for_range(offset..offset, &self.text, false)
1607 .filter(|layer| {
1608 if is_first {
1609 is_first = false;
1610 return true;
1611 }
1612
1613 layer
1614 .included_sub_ranges
1615 .map(|sub_ranges| {
1616 sub_ranges.iter().any(|sub_range| {
1617 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1618 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1619 !is_before_start && !is_after_end
1620 })
1621 })
1622 .unwrap_or(true)
1623 })
1624 .last()
1625 .map(|info| info.language.clone())
1626 .or_else(|| self.language.clone())
1627 }
1628
1629 /// Returns each [`Language`] for the active syntax layers at the given location.
1630 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1631 let offset = position.to_offset(self);
1632 let mut languages: Vec<Arc<Language>> = self
1633 .syntax_map
1634 .lock()
1635 .layers_for_range(offset..offset, &self.text, false)
1636 .map(|info| info.language.clone())
1637 .collect();
1638
1639 if languages.is_empty()
1640 && let Some(buffer_language) = self.language()
1641 {
1642 languages.push(buffer_language.clone());
1643 }
1644
1645 languages
1646 }
1647
1648 /// An integer version number that accounts for all updates besides
1649 /// the buffer's text itself (which is versioned via a version vector).
1650 pub fn non_text_state_update_count(&self) -> usize {
1651 self.non_text_state_update_count
1652 }
1653
1654 /// Whether the buffer is being parsed in the background.
1655 #[cfg(any(test, feature = "test-support"))]
1656 pub fn is_parsing(&self) -> bool {
1657 self.reparse.is_some()
1658 }
1659
1660 /// Indicates whether the buffer contains any regions that may be
1661 /// written in a language that hasn't been loaded yet.
1662 pub fn contains_unknown_injections(&self) -> bool {
1663 self.syntax_map.lock().contains_unknown_injections()
1664 }
1665
1666 #[cfg(any(test, feature = "test-support"))]
1667 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1668 self.sync_parse_timeout = timeout;
1669 }
1670
1671 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1672 match Arc::get_mut(&mut self.tree_sitter_data) {
1673 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1674 None => {
1675 let tree_sitter_data = TreeSitterData::new(snapshot);
1676 self.tree_sitter_data = Arc::new(tree_sitter_data)
1677 }
1678 }
1679 }
1680
1681 /// Called after an edit to synchronize the buffer's main parse tree with
1682 /// the buffer's new underlying state.
1683 ///
1684 /// Locks the syntax map and interpolates the edits since the last reparse
1685 /// into the foreground syntax tree.
1686 ///
1687 /// Then takes a stable snapshot of the syntax map before unlocking it.
1688 /// The snapshot with the interpolated edits is sent to a background thread,
1689 /// where we ask Tree-sitter to perform an incremental parse.
1690 ///
1691 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1692 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1693 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1694 ///
1695 /// If we time out waiting on the parse, we spawn a second task waiting
1696 /// until the parse does complete and return with the interpolated tree still
1697 /// in the foreground. When the background parse completes, call back into
1698 /// the main thread and assign the foreground parse state.
1699 ///
1700 /// If the buffer or grammar changed since the start of the background parse,
1701 /// initiate an additional reparse recursively. To avoid concurrent parses
1702 /// for the same buffer, we only initiate a new parse if we are not already
1703 /// parsing in the background.
1704 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1705 if self.text.version() != *self.tree_sitter_data.version() {
1706 self.invalidate_tree_sitter_data(self.text.snapshot());
1707 }
1708 if self.reparse.is_some() {
1709 return;
1710 }
1711 let language = if let Some(language) = self.language.clone() {
1712 language
1713 } else {
1714 return;
1715 };
1716
1717 let text = self.text_snapshot();
1718 let parsed_version = self.version();
1719
1720 let mut syntax_map = self.syntax_map.lock();
1721 syntax_map.interpolate(&text);
1722 let language_registry = syntax_map.language_registry();
1723 let mut syntax_snapshot = syntax_map.snapshot();
1724 drop(syntax_map);
1725
1726 let parse_task = cx.background_spawn({
1727 let language = language.clone();
1728 let language_registry = language_registry.clone();
1729 async move {
1730 syntax_snapshot.reparse(&text, language_registry, language);
1731 syntax_snapshot
1732 }
1733 });
1734
1735 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1736 if may_block {
1737 match cx
1738 .background_executor()
1739 .block_with_timeout(self.sync_parse_timeout, parse_task)
1740 {
1741 Ok(new_syntax_snapshot) => {
1742 self.did_finish_parsing(new_syntax_snapshot, cx);
1743 self.reparse = None;
1744 }
1745 Err(parse_task) => {
1746 self.reparse = Some(cx.spawn(async move |this, cx| {
1747 let new_syntax_map = cx.background_spawn(parse_task).await;
1748 this.update(cx, move |this, cx| {
1749 let grammar_changed = || {
1750 this.language.as_ref().is_none_or(|current_language| {
1751 !Arc::ptr_eq(&language, current_language)
1752 })
1753 };
1754 let language_registry_changed = || {
1755 new_syntax_map.contains_unknown_injections()
1756 && language_registry.is_some_and(|registry| {
1757 registry.version()
1758 != new_syntax_map.language_registry_version()
1759 })
1760 };
1761 let parse_again = this.version.changed_since(&parsed_version)
1762 || language_registry_changed()
1763 || grammar_changed();
1764 this.did_finish_parsing(new_syntax_map, cx);
1765 this.reparse = None;
1766 if parse_again {
1767 this.reparse(cx, false);
1768 }
1769 })
1770 .ok();
1771 }));
1772 }
1773 }
1774 } else {
1775 self.reparse = Some(cx.spawn(async move |this, cx| {
1776 let new_syntax_map = cx.background_spawn(parse_task).await;
1777 this.update(cx, move |this, cx| {
1778 let grammar_changed = || {
1779 this.language.as_ref().is_none_or(|current_language| {
1780 !Arc::ptr_eq(&language, current_language)
1781 })
1782 };
1783 let language_registry_changed = || {
1784 new_syntax_map.contains_unknown_injections()
1785 && language_registry.is_some_and(|registry| {
1786 registry.version() != new_syntax_map.language_registry_version()
1787 })
1788 };
1789 let parse_again = this.version.changed_since(&parsed_version)
1790 || language_registry_changed()
1791 || grammar_changed();
1792 this.did_finish_parsing(new_syntax_map, cx);
1793 this.reparse = None;
1794 if parse_again {
1795 this.reparse(cx, false);
1796 }
1797 })
1798 .ok();
1799 }));
1800 }
1801 }
1802
1803 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1804 self.was_changed();
1805 self.non_text_state_update_count += 1;
1806 self.syntax_map.lock().did_parse(syntax_snapshot);
1807 self.request_autoindent(cx);
1808 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1809 self.invalidate_tree_sitter_data(self.text.snapshot());
1810 cx.emit(BufferEvent::Reparsed);
1811 cx.notify();
1812 }
1813
1814 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1815 self.parse_status.1.clone()
1816 }
1817
1818 /// Wait until the buffer is no longer parsing
1819 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1820 let mut parse_status = self.parse_status();
1821 async move {
1822 while *parse_status.borrow() != ParseStatus::Idle {
1823 if parse_status.changed().await.is_err() {
1824 break;
1825 }
1826 }
1827 }
1828 }
1829
1830 /// Assign to the buffer a set of diagnostics created by a given language server.
1831 pub fn update_diagnostics(
1832 &mut self,
1833 server_id: LanguageServerId,
1834 diagnostics: DiagnosticSet,
1835 cx: &mut Context<Self>,
1836 ) {
1837 let lamport_timestamp = self.text.lamport_clock.tick();
1838 let op = Operation::UpdateDiagnostics {
1839 server_id,
1840 diagnostics: diagnostics.iter().cloned().collect(),
1841 lamport_timestamp,
1842 };
1843
1844 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1845 self.send_operation(op, true, cx);
1846 }
1847
1848 pub fn buffer_diagnostics(
1849 &self,
1850 for_server: Option<LanguageServerId>,
1851 ) -> Vec<&DiagnosticEntry<Anchor>> {
1852 match for_server {
1853 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1854 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1855 Err(_) => Vec::new(),
1856 },
1857 None => self
1858 .diagnostics
1859 .iter()
1860 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1861 .collect(),
1862 }
1863 }
1864
1865 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1866 if let Some(indent_sizes) = self.compute_autoindents() {
1867 let indent_sizes = cx.background_spawn(indent_sizes);
1868 match cx
1869 .background_executor()
1870 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1871 {
1872 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1873 Err(indent_sizes) => {
1874 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1875 let indent_sizes = indent_sizes.await;
1876 this.update(cx, |this, cx| {
1877 this.apply_autoindents(indent_sizes, cx);
1878 })
1879 .ok();
1880 }));
1881 }
1882 }
1883 } else {
1884 self.autoindent_requests.clear();
1885 for tx in self.wait_for_autoindent_txs.drain(..) {
1886 tx.send(()).ok();
1887 }
1888 }
1889 }
1890
1891 fn compute_autoindents(
1892 &self,
1893 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1894 let max_rows_between_yields = 100;
1895 let snapshot = self.snapshot();
1896 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1897 return None;
1898 }
1899
1900 let autoindent_requests = self.autoindent_requests.clone();
1901 Some(async move {
1902 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1903 for request in autoindent_requests {
1904 // Resolve each edited range to its row in the current buffer and in the
1905 // buffer before this batch of edits.
1906 let mut row_ranges = Vec::new();
1907 let mut old_to_new_rows = BTreeMap::new();
1908 let mut language_indent_sizes_by_new_row = Vec::new();
1909 for entry in &request.entries {
1910 let position = entry.range.start;
1911 let new_row = position.to_point(&snapshot).row;
1912 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1913 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1914
1915 if !entry.first_line_is_new {
1916 let old_row = position.to_point(&request.before_edit).row;
1917 old_to_new_rows.insert(old_row, new_row);
1918 }
1919 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1920 }
1921
1922 // Build a map containing the suggested indentation for each of the edited lines
1923 // with respect to the state of the buffer before these edits. This map is keyed
1924 // by the rows for these lines in the current state of the buffer.
1925 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1926 let old_edited_ranges =
1927 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1928 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1929 let mut language_indent_size = IndentSize::default();
1930 for old_edited_range in old_edited_ranges {
1931 let suggestions = request
1932 .before_edit
1933 .suggest_autoindents(old_edited_range.clone())
1934 .into_iter()
1935 .flatten();
1936 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1937 if let Some(suggestion) = suggestion {
1938 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1939
1940 // Find the indent size based on the language for this row.
1941 while let Some((row, size)) = language_indent_sizes.peek() {
1942 if *row > new_row {
1943 break;
1944 }
1945 language_indent_size = *size;
1946 language_indent_sizes.next();
1947 }
1948
1949 let suggested_indent = old_to_new_rows
1950 .get(&suggestion.basis_row)
1951 .and_then(|from_row| {
1952 Some(old_suggestions.get(from_row).copied()?.0)
1953 })
1954 .unwrap_or_else(|| {
1955 request
1956 .before_edit
1957 .indent_size_for_line(suggestion.basis_row)
1958 })
1959 .with_delta(suggestion.delta, language_indent_size);
1960 old_suggestions
1961 .insert(new_row, (suggested_indent, suggestion.within_error));
1962 }
1963 }
1964 yield_now().await;
1965 }
1966
1967 // Compute new suggestions for each line, but only include them in the result
1968 // if they differ from the old suggestion for that line.
1969 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1970 let mut language_indent_size = IndentSize::default();
1971 for (row_range, original_indent_column) in row_ranges {
1972 let new_edited_row_range = if request.is_block_mode {
1973 row_range.start..row_range.start + 1
1974 } else {
1975 row_range.clone()
1976 };
1977
1978 let suggestions = snapshot
1979 .suggest_autoindents(new_edited_row_range.clone())
1980 .into_iter()
1981 .flatten();
1982 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1983 if let Some(suggestion) = suggestion {
1984 // Find the indent size based on the language for this row.
1985 while let Some((row, size)) = language_indent_sizes.peek() {
1986 if *row > new_row {
1987 break;
1988 }
1989 language_indent_size = *size;
1990 language_indent_sizes.next();
1991 }
1992
1993 let suggested_indent = indent_sizes
1994 .get(&suggestion.basis_row)
1995 .copied()
1996 .map(|e| e.0)
1997 .unwrap_or_else(|| {
1998 snapshot.indent_size_for_line(suggestion.basis_row)
1999 })
2000 .with_delta(suggestion.delta, language_indent_size);
2001
2002 if old_suggestions.get(&new_row).is_none_or(
2003 |(old_indentation, was_within_error)| {
2004 suggested_indent != *old_indentation
2005 && (!suggestion.within_error || *was_within_error)
2006 },
2007 ) {
2008 indent_sizes.insert(
2009 new_row,
2010 (suggested_indent, request.ignore_empty_lines),
2011 );
2012 }
2013 }
2014 }
2015
2016 if let (true, Some(original_indent_column)) =
2017 (request.is_block_mode, original_indent_column)
2018 {
2019 let new_indent =
2020 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2021 *indent
2022 } else {
2023 snapshot.indent_size_for_line(row_range.start)
2024 };
2025 let delta = new_indent.len as i64 - original_indent_column as i64;
2026 if delta != 0 {
2027 for row in row_range.skip(1) {
2028 indent_sizes.entry(row).or_insert_with(|| {
2029 let mut size = snapshot.indent_size_for_line(row);
2030 if size.kind == new_indent.kind {
2031 match delta.cmp(&0) {
2032 Ordering::Greater => size.len += delta as u32,
2033 Ordering::Less => {
2034 size.len = size.len.saturating_sub(-delta as u32)
2035 }
2036 Ordering::Equal => {}
2037 }
2038 }
2039 (size, request.ignore_empty_lines)
2040 });
2041 }
2042 }
2043 }
2044
2045 yield_now().await;
2046 }
2047 }
2048
2049 indent_sizes
2050 .into_iter()
2051 .filter_map(|(row, (indent, ignore_empty_lines))| {
2052 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2053 None
2054 } else {
2055 Some((row, indent))
2056 }
2057 })
2058 .collect()
2059 })
2060 }
2061
2062 fn apply_autoindents(
2063 &mut self,
2064 indent_sizes: BTreeMap<u32, IndentSize>,
2065 cx: &mut Context<Self>,
2066 ) {
2067 self.autoindent_requests.clear();
2068 for tx in self.wait_for_autoindent_txs.drain(..) {
2069 tx.send(()).ok();
2070 }
2071
2072 let edits: Vec<_> = indent_sizes
2073 .into_iter()
2074 .filter_map(|(row, indent_size)| {
2075 let current_size = indent_size_for_line(self, row);
2076 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2077 })
2078 .collect();
2079
2080 let preserve_preview = self.preserve_preview();
2081 self.edit(edits, None, cx);
2082 if preserve_preview {
2083 self.refresh_preview();
2084 }
2085 }
2086
2087 /// Create a minimal edit that will cause the given row to be indented
2088 /// with the given size. After applying this edit, the length of the line
2089 /// will always be at least `new_size.len`.
2090 pub fn edit_for_indent_size_adjustment(
2091 row: u32,
2092 current_size: IndentSize,
2093 new_size: IndentSize,
2094 ) -> Option<(Range<Point>, String)> {
2095 if new_size.kind == current_size.kind {
2096 match new_size.len.cmp(¤t_size.len) {
2097 Ordering::Greater => {
2098 let point = Point::new(row, 0);
2099 Some((
2100 point..point,
2101 iter::repeat(new_size.char())
2102 .take((new_size.len - current_size.len) as usize)
2103 .collect::<String>(),
2104 ))
2105 }
2106
2107 Ordering::Less => Some((
2108 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2109 String::new(),
2110 )),
2111
2112 Ordering::Equal => None,
2113 }
2114 } else {
2115 Some((
2116 Point::new(row, 0)..Point::new(row, current_size.len),
2117 iter::repeat(new_size.char())
2118 .take(new_size.len as usize)
2119 .collect::<String>(),
2120 ))
2121 }
2122 }
2123
2124 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2125 /// and the given new text.
2126 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2127 let old_text = self.as_rope().clone();
2128 let base_version = self.version();
2129 cx.background_executor()
2130 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2131 let old_text = old_text.to_string();
2132 let line_ending = LineEnding::detect(&new_text);
2133 LineEnding::normalize(&mut new_text);
2134 let edits = text_diff(&old_text, &new_text);
2135 Diff {
2136 base_version,
2137 line_ending,
2138 edits,
2139 }
2140 })
2141 }
2142
2143 /// Spawns a background task that searches the buffer for any whitespace
2144 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2145 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2146 let old_text = self.as_rope().clone();
2147 let line_ending = self.line_ending();
2148 let base_version = self.version();
2149 cx.background_spawn(async move {
2150 let ranges = trailing_whitespace_ranges(&old_text);
2151 let empty = Arc::<str>::from("");
2152 Diff {
2153 base_version,
2154 line_ending,
2155 edits: ranges
2156 .into_iter()
2157 .map(|range| (range, empty.clone()))
2158 .collect(),
2159 }
2160 })
2161 }
2162
2163 /// Ensures that the buffer ends with a single newline character, and
2164 /// no other whitespace. Skips if the buffer is empty.
2165 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2166 let len = self.len();
2167 if len == 0 {
2168 return;
2169 }
2170 let mut offset = len;
2171 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2172 let non_whitespace_len = chunk
2173 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2174 .len();
2175 offset -= chunk.len();
2176 offset += non_whitespace_len;
2177 if non_whitespace_len != 0 {
2178 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2179 return;
2180 }
2181 break;
2182 }
2183 }
2184 self.edit([(offset..len, "\n")], None, cx);
2185 }
2186
2187 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2188 /// calculated, then adjust the diff to account for those changes, and discard any
2189 /// parts of the diff that conflict with those changes.
2190 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2191 let snapshot = self.snapshot();
2192 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2193 let mut delta = 0;
2194 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2195 while let Some(edit_since) = edits_since.peek() {
2196 // If the edit occurs after a diff hunk, then it does not
2197 // affect that hunk.
2198 if edit_since.old.start > range.end {
2199 break;
2200 }
2201 // If the edit precedes the diff hunk, then adjust the hunk
2202 // to reflect the edit.
2203 else if edit_since.old.end < range.start {
2204 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2205 edits_since.next();
2206 }
2207 // If the edit intersects a diff hunk, then discard that hunk.
2208 else {
2209 return None;
2210 }
2211 }
2212
2213 let start = (range.start as i64 + delta) as usize;
2214 let end = (range.end as i64 + delta) as usize;
2215 Some((start..end, new_text))
2216 });
2217
2218 self.start_transaction();
2219 self.text.set_line_ending(diff.line_ending);
2220 self.edit(adjusted_edits, None, cx);
2221 self.end_transaction(cx)
2222 }
2223
2224 pub fn has_unsaved_edits(&self) -> bool {
2225 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2226
2227 if last_version == self.version {
2228 self.has_unsaved_edits
2229 .set((last_version, has_unsaved_edits));
2230 return has_unsaved_edits;
2231 }
2232
2233 let has_edits = self.has_edits_since(&self.saved_version);
2234 self.has_unsaved_edits
2235 .set((self.version.clone(), has_edits));
2236 has_edits
2237 }
2238
2239 /// Checks if the buffer has unsaved changes.
2240 pub fn is_dirty(&self) -> bool {
2241 if self.capability == Capability::ReadOnly {
2242 return false;
2243 }
2244 if self.has_conflict {
2245 return true;
2246 }
2247 match self.file.as_ref().map(|f| f.disk_state()) {
2248 Some(DiskState::New) | Some(DiskState::Deleted) => {
2249 !self.is_empty() && self.has_unsaved_edits()
2250 }
2251 _ => self.has_unsaved_edits(),
2252 }
2253 }
2254
2255 /// Marks the buffer as having a conflict regardless of current buffer state.
2256 pub fn set_conflict(&mut self) {
2257 self.has_conflict = true;
2258 }
2259
2260 /// Checks if the buffer and its file have both changed since the buffer
2261 /// was last saved or reloaded.
2262 pub fn has_conflict(&self) -> bool {
2263 if self.has_conflict {
2264 return true;
2265 }
2266 let Some(file) = self.file.as_ref() else {
2267 return false;
2268 };
2269 match file.disk_state() {
2270 DiskState::New => false,
2271 DiskState::Present { mtime } => match self.saved_mtime {
2272 Some(saved_mtime) => {
2273 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2274 }
2275 None => true,
2276 },
2277 DiskState::Deleted => false,
2278 }
2279 }
2280
2281 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2282 pub fn subscribe(&mut self) -> Subscription<usize> {
2283 self.text.subscribe()
2284 }
2285
2286 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2287 ///
2288 /// This allows downstream code to check if the buffer's text has changed without
2289 /// waiting for an effect cycle, which would be required if using eents.
2290 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2291 if let Err(ix) = self
2292 .change_bits
2293 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2294 {
2295 self.change_bits.insert(ix, bit);
2296 }
2297 }
2298
2299 /// Set the change bit for all "listeners".
2300 fn was_changed(&mut self) {
2301 self.change_bits.retain(|change_bit| {
2302 change_bit
2303 .upgrade()
2304 .inspect(|bit| {
2305 _ = bit.replace(true);
2306 })
2307 .is_some()
2308 });
2309 }
2310
2311 /// Starts a transaction, if one is not already in-progress. When undoing or
2312 /// redoing edits, all of the edits performed within a transaction are undone
2313 /// or redone together.
2314 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2315 self.start_transaction_at(Instant::now())
2316 }
2317
2318 /// Starts a transaction, providing the current time. Subsequent transactions
2319 /// that occur within a short period of time will be grouped together. This
2320 /// is controlled by the buffer's undo grouping duration.
2321 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2322 self.transaction_depth += 1;
2323 if self.was_dirty_before_starting_transaction.is_none() {
2324 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2325 }
2326 self.text.start_transaction_at(now)
2327 }
2328
2329 /// Terminates the current transaction, if this is the outermost transaction.
2330 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2331 self.end_transaction_at(Instant::now(), cx)
2332 }
2333
2334 /// Terminates the current transaction, providing the current time. Subsequent transactions
2335 /// that occur within a short period of time will be grouped together. This
2336 /// is controlled by the buffer's undo grouping duration.
2337 pub fn end_transaction_at(
2338 &mut self,
2339 now: Instant,
2340 cx: &mut Context<Self>,
2341 ) -> Option<TransactionId> {
2342 assert!(self.transaction_depth > 0);
2343 self.transaction_depth -= 1;
2344 let was_dirty = if self.transaction_depth == 0 {
2345 self.was_dirty_before_starting_transaction.take().unwrap()
2346 } else {
2347 false
2348 };
2349 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2350 self.did_edit(&start_version, was_dirty, cx);
2351 Some(transaction_id)
2352 } else {
2353 None
2354 }
2355 }
2356
2357 /// Manually add a transaction to the buffer's undo history.
2358 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2359 self.text.push_transaction(transaction, now);
2360 }
2361
2362 /// Differs from `push_transaction` in that it does not clear the redo
2363 /// stack. Intended to be used to create a parent transaction to merge
2364 /// potential child transactions into.
2365 ///
2366 /// The caller is responsible for removing it from the undo history using
2367 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2368 /// are merged into this transaction, the caller is responsible for ensuring
2369 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2370 /// cleared is to create transactions with the usual `start_transaction` and
2371 /// `end_transaction` methods and merging the resulting transactions into
2372 /// the transaction created by this method
2373 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2374 self.text.push_empty_transaction(now)
2375 }
2376
2377 /// Prevent the last transaction from being grouped with any subsequent transactions,
2378 /// even if they occur with the buffer's undo grouping duration.
2379 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2380 self.text.finalize_last_transaction()
2381 }
2382
2383 /// Manually group all changes since a given transaction.
2384 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2385 self.text.group_until_transaction(transaction_id);
2386 }
2387
2388 /// Manually remove a transaction from the buffer's undo history
2389 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2390 self.text.forget_transaction(transaction_id)
2391 }
2392
2393 /// Retrieve a transaction from the buffer's undo history
2394 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2395 self.text.get_transaction(transaction_id)
2396 }
2397
2398 /// Manually merge two transactions in the buffer's undo history.
2399 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2400 self.text.merge_transactions(transaction, destination);
2401 }
2402
2403 /// Waits for the buffer to receive operations with the given timestamps.
2404 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2405 &mut self,
2406 edit_ids: It,
2407 ) -> impl Future<Output = Result<()>> + use<It> {
2408 self.text.wait_for_edits(edit_ids)
2409 }
2410
2411 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2412 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2413 &mut self,
2414 anchors: It,
2415 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2416 self.text.wait_for_anchors(anchors)
2417 }
2418
2419 /// Waits for the buffer to receive operations up to the given version.
2420 pub fn wait_for_version(
2421 &mut self,
2422 version: clock::Global,
2423 ) -> impl Future<Output = Result<()>> + use<> {
2424 self.text.wait_for_version(version)
2425 }
2426
2427 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2428 /// [`Buffer::wait_for_version`] to resolve with an error.
2429 pub fn give_up_waiting(&mut self) {
2430 self.text.give_up_waiting();
2431 }
2432
2433 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2434 let mut rx = None;
2435 if !self.autoindent_requests.is_empty() {
2436 let channel = oneshot::channel();
2437 self.wait_for_autoindent_txs.push(channel.0);
2438 rx = Some(channel.1);
2439 }
2440 rx
2441 }
2442
2443 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2444 pub fn set_active_selections(
2445 &mut self,
2446 selections: Arc<[Selection<Anchor>]>,
2447 line_mode: bool,
2448 cursor_shape: CursorShape,
2449 cx: &mut Context<Self>,
2450 ) {
2451 let lamport_timestamp = self.text.lamport_clock.tick();
2452 self.remote_selections.insert(
2453 self.text.replica_id(),
2454 SelectionSet {
2455 selections: selections.clone(),
2456 lamport_timestamp,
2457 line_mode,
2458 cursor_shape,
2459 },
2460 );
2461 self.send_operation(
2462 Operation::UpdateSelections {
2463 selections,
2464 line_mode,
2465 lamport_timestamp,
2466 cursor_shape,
2467 },
2468 true,
2469 cx,
2470 );
2471 self.non_text_state_update_count += 1;
2472 cx.notify();
2473 }
2474
2475 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2476 /// this replica.
2477 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2478 if self
2479 .remote_selections
2480 .get(&self.text.replica_id())
2481 .is_none_or(|set| !set.selections.is_empty())
2482 {
2483 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2484 }
2485 }
2486
2487 pub fn set_agent_selections(
2488 &mut self,
2489 selections: Arc<[Selection<Anchor>]>,
2490 line_mode: bool,
2491 cursor_shape: CursorShape,
2492 cx: &mut Context<Self>,
2493 ) {
2494 let lamport_timestamp = self.text.lamport_clock.tick();
2495 self.remote_selections.insert(
2496 ReplicaId::AGENT,
2497 SelectionSet {
2498 selections,
2499 lamport_timestamp,
2500 line_mode,
2501 cursor_shape,
2502 },
2503 );
2504 self.non_text_state_update_count += 1;
2505 cx.notify();
2506 }
2507
2508 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2509 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2510 }
2511
2512 /// Replaces the buffer's entire text.
2513 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2514 where
2515 T: Into<Arc<str>>,
2516 {
2517 self.autoindent_requests.clear();
2518 self.edit([(0..self.len(), text)], None, cx)
2519 }
2520
2521 /// Appends the given text to the end of the buffer.
2522 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2523 where
2524 T: Into<Arc<str>>,
2525 {
2526 self.edit([(self.len()..self.len(), text)], None, cx)
2527 }
2528
2529 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2530 /// delete, and a string of text to insert at that location.
2531 ///
2532 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2533 /// request for the edited ranges, which will be processed when the buffer finishes
2534 /// parsing.
2535 ///
2536 /// Parsing takes place at the end of a transaction, and may compute synchronously
2537 /// or asynchronously, depending on the changes.
2538 pub fn edit<I, S, T>(
2539 &mut self,
2540 edits_iter: I,
2541 autoindent_mode: Option<AutoindentMode>,
2542 cx: &mut Context<Self>,
2543 ) -> Option<clock::Lamport>
2544 where
2545 I: IntoIterator<Item = (Range<S>, T)>,
2546 S: ToOffset,
2547 T: Into<Arc<str>>,
2548 {
2549 // Skip invalid edits and coalesce contiguous ones.
2550 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2551
2552 for (range, new_text) in edits_iter {
2553 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2554
2555 if range.start > range.end {
2556 mem::swap(&mut range.start, &mut range.end);
2557 }
2558 let new_text = new_text.into();
2559 if !new_text.is_empty() || !range.is_empty() {
2560 if let Some((prev_range, prev_text)) = edits.last_mut()
2561 && prev_range.end >= range.start
2562 {
2563 prev_range.end = cmp::max(prev_range.end, range.end);
2564 *prev_text = format!("{prev_text}{new_text}").into();
2565 } else {
2566 edits.push((range, new_text));
2567 }
2568 }
2569 }
2570 if edits.is_empty() {
2571 return None;
2572 }
2573
2574 self.start_transaction();
2575 self.pending_autoindent.take();
2576 let autoindent_request = autoindent_mode
2577 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2578
2579 let edit_operation = self.text.edit(edits.iter().cloned());
2580 let edit_id = edit_operation.timestamp();
2581
2582 if let Some((before_edit, mode)) = autoindent_request {
2583 let mut delta = 0isize;
2584 let mut previous_setting = None;
2585 let entries: Vec<_> = edits
2586 .into_iter()
2587 .enumerate()
2588 .zip(&edit_operation.as_edit().unwrap().new_text)
2589 .filter(|((_, (range, _)), _)| {
2590 let language = before_edit.language_at(range.start);
2591 let language_id = language.map(|l| l.id());
2592 if let Some((cached_language_id, auto_indent)) = previous_setting
2593 && cached_language_id == language_id
2594 {
2595 auto_indent
2596 } else {
2597 // The auto-indent setting is not present in editorconfigs, hence
2598 // we can avoid passing the file here.
2599 let auto_indent =
2600 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2601 previous_setting = Some((language_id, auto_indent));
2602 auto_indent
2603 }
2604 })
2605 .map(|((ix, (range, _)), new_text)| {
2606 let new_text_length = new_text.len();
2607 let old_start = range.start.to_point(&before_edit);
2608 let new_start = (delta + range.start as isize) as usize;
2609 let range_len = range.end - range.start;
2610 delta += new_text_length as isize - range_len as isize;
2611
2612 // Decide what range of the insertion to auto-indent, and whether
2613 // the first line of the insertion should be considered a newly-inserted line
2614 // or an edit to an existing line.
2615 let mut range_of_insertion_to_indent = 0..new_text_length;
2616 let mut first_line_is_new = true;
2617
2618 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2619 let old_line_end = before_edit.line_len(old_start.row);
2620
2621 if old_start.column > old_line_start {
2622 first_line_is_new = false;
2623 }
2624
2625 if !new_text.contains('\n')
2626 && (old_start.column + (range_len as u32) < old_line_end
2627 || old_line_end == old_line_start)
2628 {
2629 first_line_is_new = false;
2630 }
2631
2632 // When inserting text starting with a newline, avoid auto-indenting the
2633 // previous line.
2634 if new_text.starts_with('\n') {
2635 range_of_insertion_to_indent.start += 1;
2636 first_line_is_new = true;
2637 }
2638
2639 let mut original_indent_column = None;
2640 if let AutoindentMode::Block {
2641 original_indent_columns,
2642 } = &mode
2643 {
2644 original_indent_column = Some(if new_text.starts_with('\n') {
2645 indent_size_for_text(
2646 new_text[range_of_insertion_to_indent.clone()].chars(),
2647 )
2648 .len
2649 } else {
2650 original_indent_columns
2651 .get(ix)
2652 .copied()
2653 .flatten()
2654 .unwrap_or_else(|| {
2655 indent_size_for_text(
2656 new_text[range_of_insertion_to_indent.clone()].chars(),
2657 )
2658 .len
2659 })
2660 });
2661
2662 // Avoid auto-indenting the line after the edit.
2663 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2664 range_of_insertion_to_indent.end -= 1;
2665 }
2666 }
2667
2668 AutoindentRequestEntry {
2669 first_line_is_new,
2670 original_indent_column,
2671 indent_size: before_edit.language_indent_size_at(range.start, cx),
2672 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2673 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2674 }
2675 })
2676 .collect();
2677
2678 if !entries.is_empty() {
2679 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2680 before_edit,
2681 entries,
2682 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2683 ignore_empty_lines: false,
2684 }));
2685 }
2686 }
2687
2688 self.end_transaction(cx);
2689 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2690 Some(edit_id)
2691 }
2692
2693 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2694 self.was_changed();
2695
2696 if self.edits_since::<usize>(old_version).next().is_none() {
2697 return;
2698 }
2699
2700 self.reparse(cx, true);
2701 cx.emit(BufferEvent::Edited);
2702 if was_dirty != self.is_dirty() {
2703 cx.emit(BufferEvent::DirtyChanged);
2704 }
2705 cx.notify();
2706 }
2707
2708 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2709 where
2710 I: IntoIterator<Item = Range<T>>,
2711 T: ToOffset + Copy,
2712 {
2713 let before_edit = self.snapshot();
2714 let entries = ranges
2715 .into_iter()
2716 .map(|range| AutoindentRequestEntry {
2717 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2718 first_line_is_new: true,
2719 indent_size: before_edit.language_indent_size_at(range.start, cx),
2720 original_indent_column: None,
2721 })
2722 .collect();
2723 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2724 before_edit,
2725 entries,
2726 is_block_mode: false,
2727 ignore_empty_lines: true,
2728 }));
2729 self.request_autoindent(cx);
2730 }
2731
2732 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2733 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2734 pub fn insert_empty_line(
2735 &mut self,
2736 position: impl ToPoint,
2737 space_above: bool,
2738 space_below: bool,
2739 cx: &mut Context<Self>,
2740 ) -> Point {
2741 let mut position = position.to_point(self);
2742
2743 self.start_transaction();
2744
2745 self.edit(
2746 [(position..position, "\n")],
2747 Some(AutoindentMode::EachLine),
2748 cx,
2749 );
2750
2751 if position.column > 0 {
2752 position += Point::new(1, 0);
2753 }
2754
2755 if !self.is_line_blank(position.row) {
2756 self.edit(
2757 [(position..position, "\n")],
2758 Some(AutoindentMode::EachLine),
2759 cx,
2760 );
2761 }
2762
2763 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2764 self.edit(
2765 [(position..position, "\n")],
2766 Some(AutoindentMode::EachLine),
2767 cx,
2768 );
2769 position.row += 1;
2770 }
2771
2772 if space_below
2773 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2774 {
2775 self.edit(
2776 [(position..position, "\n")],
2777 Some(AutoindentMode::EachLine),
2778 cx,
2779 );
2780 }
2781
2782 self.end_transaction(cx);
2783
2784 position
2785 }
2786
2787 /// Applies the given remote operations to the buffer.
2788 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2789 self.pending_autoindent.take();
2790 let was_dirty = self.is_dirty();
2791 let old_version = self.version.clone();
2792 let mut deferred_ops = Vec::new();
2793 let buffer_ops = ops
2794 .into_iter()
2795 .filter_map(|op| match op {
2796 Operation::Buffer(op) => Some(op),
2797 _ => {
2798 if self.can_apply_op(&op) {
2799 self.apply_op(op, cx);
2800 } else {
2801 deferred_ops.push(op);
2802 }
2803 None
2804 }
2805 })
2806 .collect::<Vec<_>>();
2807 for operation in buffer_ops.iter() {
2808 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2809 }
2810 self.text.apply_ops(buffer_ops);
2811 self.deferred_ops.insert(deferred_ops);
2812 self.flush_deferred_ops(cx);
2813 self.did_edit(&old_version, was_dirty, cx);
2814 // Notify independently of whether the buffer was edited as the operations could include a
2815 // selection update.
2816 cx.notify();
2817 }
2818
2819 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2820 let mut deferred_ops = Vec::new();
2821 for op in self.deferred_ops.drain().iter().cloned() {
2822 if self.can_apply_op(&op) {
2823 self.apply_op(op, cx);
2824 } else {
2825 deferred_ops.push(op);
2826 }
2827 }
2828 self.deferred_ops.insert(deferred_ops);
2829 }
2830
2831 pub fn has_deferred_ops(&self) -> bool {
2832 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2833 }
2834
2835 fn can_apply_op(&self, operation: &Operation) -> bool {
2836 match operation {
2837 Operation::Buffer(_) => {
2838 unreachable!("buffer operations should never be applied at this layer")
2839 }
2840 Operation::UpdateDiagnostics {
2841 diagnostics: diagnostic_set,
2842 ..
2843 } => diagnostic_set.iter().all(|diagnostic| {
2844 self.text.can_resolve(&diagnostic.range.start)
2845 && self.text.can_resolve(&diagnostic.range.end)
2846 }),
2847 Operation::UpdateSelections { selections, .. } => selections
2848 .iter()
2849 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2850 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2851 }
2852 }
2853
2854 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2855 match operation {
2856 Operation::Buffer(_) => {
2857 unreachable!("buffer operations should never be applied at this layer")
2858 }
2859 Operation::UpdateDiagnostics {
2860 server_id,
2861 diagnostics: diagnostic_set,
2862 lamport_timestamp,
2863 } => {
2864 let snapshot = self.snapshot();
2865 self.apply_diagnostic_update(
2866 server_id,
2867 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2868 lamport_timestamp,
2869 cx,
2870 );
2871 }
2872 Operation::UpdateSelections {
2873 selections,
2874 lamport_timestamp,
2875 line_mode,
2876 cursor_shape,
2877 } => {
2878 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2879 && set.lamport_timestamp > lamport_timestamp
2880 {
2881 return;
2882 }
2883
2884 self.remote_selections.insert(
2885 lamport_timestamp.replica_id,
2886 SelectionSet {
2887 selections,
2888 lamport_timestamp,
2889 line_mode,
2890 cursor_shape,
2891 },
2892 );
2893 self.text.lamport_clock.observe(lamport_timestamp);
2894 self.non_text_state_update_count += 1;
2895 }
2896 Operation::UpdateCompletionTriggers {
2897 triggers,
2898 lamport_timestamp,
2899 server_id,
2900 } => {
2901 if triggers.is_empty() {
2902 self.completion_triggers_per_language_server
2903 .remove(&server_id);
2904 self.completion_triggers = self
2905 .completion_triggers_per_language_server
2906 .values()
2907 .flat_map(|triggers| triggers.iter().cloned())
2908 .collect();
2909 } else {
2910 self.completion_triggers_per_language_server
2911 .insert(server_id, triggers.iter().cloned().collect());
2912 self.completion_triggers.extend(triggers);
2913 }
2914 self.text.lamport_clock.observe(lamport_timestamp);
2915 }
2916 Operation::UpdateLineEnding {
2917 line_ending,
2918 lamport_timestamp,
2919 } => {
2920 self.text.set_line_ending(line_ending);
2921 self.text.lamport_clock.observe(lamport_timestamp);
2922 }
2923 }
2924 }
2925
2926 fn apply_diagnostic_update(
2927 &mut self,
2928 server_id: LanguageServerId,
2929 diagnostics: DiagnosticSet,
2930 lamport_timestamp: clock::Lamport,
2931 cx: &mut Context<Self>,
2932 ) {
2933 if lamport_timestamp > self.diagnostics_timestamp {
2934 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2935 if diagnostics.is_empty() {
2936 if let Ok(ix) = ix {
2937 self.diagnostics.remove(ix);
2938 }
2939 } else {
2940 match ix {
2941 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2942 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2943 };
2944 }
2945 self.diagnostics_timestamp = lamport_timestamp;
2946 self.non_text_state_update_count += 1;
2947 self.text.lamport_clock.observe(lamport_timestamp);
2948 cx.notify();
2949 cx.emit(BufferEvent::DiagnosticsUpdated);
2950 }
2951 }
2952
2953 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2954 self.was_changed();
2955 cx.emit(BufferEvent::Operation {
2956 operation,
2957 is_local,
2958 });
2959 }
2960
2961 /// Removes the selections for a given peer.
2962 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2963 self.remote_selections.remove(&replica_id);
2964 cx.notify();
2965 }
2966
2967 /// Undoes the most recent transaction.
2968 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2969 let was_dirty = self.is_dirty();
2970 let old_version = self.version.clone();
2971
2972 if let Some((transaction_id, operation)) = self.text.undo() {
2973 self.send_operation(Operation::Buffer(operation), true, cx);
2974 self.did_edit(&old_version, was_dirty, cx);
2975 Some(transaction_id)
2976 } else {
2977 None
2978 }
2979 }
2980
2981 /// Manually undoes a specific transaction in the buffer's undo history.
2982 pub fn undo_transaction(
2983 &mut self,
2984 transaction_id: TransactionId,
2985 cx: &mut Context<Self>,
2986 ) -> bool {
2987 let was_dirty = self.is_dirty();
2988 let old_version = self.version.clone();
2989 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2990 self.send_operation(Operation::Buffer(operation), true, cx);
2991 self.did_edit(&old_version, was_dirty, cx);
2992 true
2993 } else {
2994 false
2995 }
2996 }
2997
2998 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2999 pub fn undo_to_transaction(
3000 &mut self,
3001 transaction_id: TransactionId,
3002 cx: &mut Context<Self>,
3003 ) -> bool {
3004 let was_dirty = self.is_dirty();
3005 let old_version = self.version.clone();
3006
3007 let operations = self.text.undo_to_transaction(transaction_id);
3008 let undone = !operations.is_empty();
3009 for operation in operations {
3010 self.send_operation(Operation::Buffer(operation), true, cx);
3011 }
3012 if undone {
3013 self.did_edit(&old_version, was_dirty, cx)
3014 }
3015 undone
3016 }
3017
3018 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3019 let was_dirty = self.is_dirty();
3020 let operation = self.text.undo_operations(counts);
3021 let old_version = self.version.clone();
3022 self.send_operation(Operation::Buffer(operation), true, cx);
3023 self.did_edit(&old_version, was_dirty, cx);
3024 }
3025
3026 /// Manually redoes a specific transaction in the buffer's redo history.
3027 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3028 let was_dirty = self.is_dirty();
3029 let old_version = self.version.clone();
3030
3031 if let Some((transaction_id, operation)) = self.text.redo() {
3032 self.send_operation(Operation::Buffer(operation), true, cx);
3033 self.did_edit(&old_version, was_dirty, cx);
3034 Some(transaction_id)
3035 } else {
3036 None
3037 }
3038 }
3039
3040 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3041 pub fn redo_to_transaction(
3042 &mut self,
3043 transaction_id: TransactionId,
3044 cx: &mut Context<Self>,
3045 ) -> bool {
3046 let was_dirty = self.is_dirty();
3047 let old_version = self.version.clone();
3048
3049 let operations = self.text.redo_to_transaction(transaction_id);
3050 let redone = !operations.is_empty();
3051 for operation in operations {
3052 self.send_operation(Operation::Buffer(operation), true, cx);
3053 }
3054 if redone {
3055 self.did_edit(&old_version, was_dirty, cx)
3056 }
3057 redone
3058 }
3059
3060 /// Override current completion triggers with the user-provided completion triggers.
3061 pub fn set_completion_triggers(
3062 &mut self,
3063 server_id: LanguageServerId,
3064 triggers: BTreeSet<String>,
3065 cx: &mut Context<Self>,
3066 ) {
3067 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3068 if triggers.is_empty() {
3069 self.completion_triggers_per_language_server
3070 .remove(&server_id);
3071 self.completion_triggers = self
3072 .completion_triggers_per_language_server
3073 .values()
3074 .flat_map(|triggers| triggers.iter().cloned())
3075 .collect();
3076 } else {
3077 self.completion_triggers_per_language_server
3078 .insert(server_id, triggers.clone());
3079 self.completion_triggers.extend(triggers.iter().cloned());
3080 }
3081 self.send_operation(
3082 Operation::UpdateCompletionTriggers {
3083 triggers: triggers.into_iter().collect(),
3084 lamport_timestamp: self.completion_triggers_timestamp,
3085 server_id,
3086 },
3087 true,
3088 cx,
3089 );
3090 cx.notify();
3091 }
3092
3093 /// Returns a list of strings which trigger a completion menu for this language.
3094 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3095 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3096 &self.completion_triggers
3097 }
3098
3099 /// Call this directly after performing edits to prevent the preview tab
3100 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3101 /// to return false until there are additional edits.
3102 pub fn refresh_preview(&mut self) {
3103 self.preview_version = self.version.clone();
3104 }
3105
3106 /// Whether we should preserve the preview status of a tab containing this buffer.
3107 pub fn preserve_preview(&self) -> bool {
3108 !self.has_edits_since(&self.preview_version)
3109 }
3110}
3111
3112#[doc(hidden)]
3113#[cfg(any(test, feature = "test-support"))]
3114impl Buffer {
3115 pub fn edit_via_marked_text(
3116 &mut self,
3117 marked_string: &str,
3118 autoindent_mode: Option<AutoindentMode>,
3119 cx: &mut Context<Self>,
3120 ) {
3121 let edits = self.edits_for_marked_text(marked_string);
3122 self.edit(edits, autoindent_mode, cx);
3123 }
3124
3125 pub fn set_group_interval(&mut self, group_interval: Duration) {
3126 self.text.set_group_interval(group_interval);
3127 }
3128
3129 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3130 where
3131 T: rand::Rng,
3132 {
3133 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3134 let mut last_end = None;
3135 for _ in 0..old_range_count {
3136 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3137 break;
3138 }
3139
3140 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3141 let mut range = self.random_byte_range(new_start, rng);
3142 if rng.random_bool(0.2) {
3143 mem::swap(&mut range.start, &mut range.end);
3144 }
3145 last_end = Some(range.end);
3146
3147 let new_text_len = rng.random_range(0..10);
3148 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3149 new_text = new_text.to_uppercase();
3150
3151 edits.push((range, new_text));
3152 }
3153 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3154 self.edit(edits, None, cx);
3155 }
3156
3157 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3158 let was_dirty = self.is_dirty();
3159 let old_version = self.version.clone();
3160
3161 let ops = self.text.randomly_undo_redo(rng);
3162 if !ops.is_empty() {
3163 for op in ops {
3164 self.send_operation(Operation::Buffer(op), true, cx);
3165 self.did_edit(&old_version, was_dirty, cx);
3166 }
3167 }
3168 }
3169}
3170
3171impl EventEmitter<BufferEvent> for Buffer {}
3172
3173impl Deref for Buffer {
3174 type Target = TextBuffer;
3175
3176 fn deref(&self) -> &Self::Target {
3177 &self.text
3178 }
3179}
3180
3181impl BufferSnapshot {
3182 /// Returns [`IndentSize`] for a given line that respects user settings and
3183 /// language preferences.
3184 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3185 indent_size_for_line(self, row)
3186 }
3187
3188 /// Returns [`IndentSize`] for a given position that respects user settings
3189 /// and language preferences.
3190 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3191 let settings = language_settings(
3192 self.language_at(position).map(|l| l.name()),
3193 self.file(),
3194 cx,
3195 );
3196 if settings.hard_tabs {
3197 IndentSize::tab()
3198 } else {
3199 IndentSize::spaces(settings.tab_size.get())
3200 }
3201 }
3202
3203 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3204 /// is passed in as `single_indent_size`.
3205 pub fn suggested_indents(
3206 &self,
3207 rows: impl Iterator<Item = u32>,
3208 single_indent_size: IndentSize,
3209 ) -> BTreeMap<u32, IndentSize> {
3210 let mut result = BTreeMap::new();
3211
3212 for row_range in contiguous_ranges(rows, 10) {
3213 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3214 Some(suggestions) => suggestions,
3215 _ => break,
3216 };
3217
3218 for (row, suggestion) in row_range.zip(suggestions) {
3219 let indent_size = if let Some(suggestion) = suggestion {
3220 result
3221 .get(&suggestion.basis_row)
3222 .copied()
3223 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3224 .with_delta(suggestion.delta, single_indent_size)
3225 } else {
3226 self.indent_size_for_line(row)
3227 };
3228
3229 result.insert(row, indent_size);
3230 }
3231 }
3232
3233 result
3234 }
3235
3236 fn suggest_autoindents(
3237 &self,
3238 row_range: Range<u32>,
3239 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3240 let config = &self.language.as_ref()?.config;
3241 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3242
3243 #[derive(Debug, Clone)]
3244 struct StartPosition {
3245 start: Point,
3246 suffix: SharedString,
3247 language: Arc<Language>,
3248 }
3249
3250 // Find the suggested indentation ranges based on the syntax tree.
3251 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3252 let end = Point::new(row_range.end, 0);
3253 let range = (start..end).to_offset(&self.text);
3254 let mut matches = self.syntax.matches_with_options(
3255 range.clone(),
3256 &self.text,
3257 TreeSitterOptions {
3258 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3259 max_start_depth: None,
3260 },
3261 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3262 );
3263 let indent_configs = matches
3264 .grammars()
3265 .iter()
3266 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3267 .collect::<Vec<_>>();
3268
3269 let mut indent_ranges = Vec::<Range<Point>>::new();
3270 let mut start_positions = Vec::<StartPosition>::new();
3271 let mut outdent_positions = Vec::<Point>::new();
3272 while let Some(mat) = matches.peek() {
3273 let mut start: Option<Point> = None;
3274 let mut end: Option<Point> = None;
3275
3276 let config = indent_configs[mat.grammar_index];
3277 for capture in mat.captures {
3278 if capture.index == config.indent_capture_ix {
3279 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3280 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3281 } else if Some(capture.index) == config.start_capture_ix {
3282 start = Some(Point::from_ts_point(capture.node.end_position()));
3283 } else if Some(capture.index) == config.end_capture_ix {
3284 end = Some(Point::from_ts_point(capture.node.start_position()));
3285 } else if Some(capture.index) == config.outdent_capture_ix {
3286 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3287 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3288 start_positions.push(StartPosition {
3289 start: Point::from_ts_point(capture.node.start_position()),
3290 suffix: suffix.clone(),
3291 language: mat.language.clone(),
3292 });
3293 }
3294 }
3295
3296 matches.advance();
3297 if let Some((start, end)) = start.zip(end) {
3298 if start.row == end.row {
3299 continue;
3300 }
3301 let range = start..end;
3302 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3303 Err(ix) => indent_ranges.insert(ix, range),
3304 Ok(ix) => {
3305 let prev_range = &mut indent_ranges[ix];
3306 prev_range.end = prev_range.end.max(range.end);
3307 }
3308 }
3309 }
3310 }
3311
3312 let mut error_ranges = Vec::<Range<Point>>::new();
3313 let mut matches = self
3314 .syntax
3315 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3316 while let Some(mat) = matches.peek() {
3317 let node = mat.captures[0].node;
3318 let start = Point::from_ts_point(node.start_position());
3319 let end = Point::from_ts_point(node.end_position());
3320 let range = start..end;
3321 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3322 Ok(ix) | Err(ix) => ix,
3323 };
3324 let mut end_ix = ix;
3325 while let Some(existing_range) = error_ranges.get(end_ix) {
3326 if existing_range.end < end {
3327 end_ix += 1;
3328 } else {
3329 break;
3330 }
3331 }
3332 error_ranges.splice(ix..end_ix, [range]);
3333 matches.advance();
3334 }
3335
3336 outdent_positions.sort();
3337 for outdent_position in outdent_positions {
3338 // find the innermost indent range containing this outdent_position
3339 // set its end to the outdent position
3340 if let Some(range_to_truncate) = indent_ranges
3341 .iter_mut()
3342 .rfind(|indent_range| indent_range.contains(&outdent_position))
3343 {
3344 range_to_truncate.end = outdent_position;
3345 }
3346 }
3347
3348 start_positions.sort_by_key(|b| b.start);
3349
3350 // Find the suggested indentation increases and decreased based on regexes.
3351 let mut regex_outdent_map = HashMap::default();
3352 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3353 let mut start_positions_iter = start_positions.iter().peekable();
3354
3355 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3356 self.for_each_line(
3357 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3358 ..Point::new(row_range.end, 0),
3359 |row, line| {
3360 let indent_len = self.indent_size_for_line(row).len;
3361 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3362 let row_language_config = row_language
3363 .as_ref()
3364 .map(|lang| lang.config())
3365 .unwrap_or(config);
3366
3367 if row_language_config
3368 .decrease_indent_pattern
3369 .as_ref()
3370 .is_some_and(|regex| regex.is_match(line))
3371 {
3372 indent_change_rows.push((row, Ordering::Less));
3373 }
3374 if row_language_config
3375 .increase_indent_pattern
3376 .as_ref()
3377 .is_some_and(|regex| regex.is_match(line))
3378 {
3379 indent_change_rows.push((row + 1, Ordering::Greater));
3380 }
3381 while let Some(pos) = start_positions_iter.peek() {
3382 if pos.start.row < row {
3383 let pos = start_positions_iter.next().unwrap().clone();
3384 last_seen_suffix
3385 .entry(pos.suffix.to_string())
3386 .or_default()
3387 .push(pos);
3388 } else {
3389 break;
3390 }
3391 }
3392 for rule in &row_language_config.decrease_indent_patterns {
3393 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3394 let row_start_column = self.indent_size_for_line(row).len;
3395 let basis_row = rule
3396 .valid_after
3397 .iter()
3398 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3399 .flatten()
3400 .filter(|pos| {
3401 row_language
3402 .as_ref()
3403 .or(self.language.as_ref())
3404 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3405 })
3406 .filter(|pos| pos.start.column <= row_start_column)
3407 .max_by_key(|pos| pos.start.row);
3408 if let Some(outdent_to) = basis_row {
3409 regex_outdent_map.insert(row, outdent_to.start.row);
3410 }
3411 break;
3412 }
3413 }
3414 },
3415 );
3416
3417 let mut indent_changes = indent_change_rows.into_iter().peekable();
3418 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3419 prev_non_blank_row.unwrap_or(0)
3420 } else {
3421 row_range.start.saturating_sub(1)
3422 };
3423
3424 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3425 Some(row_range.map(move |row| {
3426 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3427
3428 let mut indent_from_prev_row = false;
3429 let mut outdent_from_prev_row = false;
3430 let mut outdent_to_row = u32::MAX;
3431 let mut from_regex = false;
3432
3433 while let Some((indent_row, delta)) = indent_changes.peek() {
3434 match indent_row.cmp(&row) {
3435 Ordering::Equal => match delta {
3436 Ordering::Less => {
3437 from_regex = true;
3438 outdent_from_prev_row = true
3439 }
3440 Ordering::Greater => {
3441 indent_from_prev_row = true;
3442 from_regex = true
3443 }
3444 _ => {}
3445 },
3446
3447 Ordering::Greater => break,
3448 Ordering::Less => {}
3449 }
3450
3451 indent_changes.next();
3452 }
3453
3454 for range in &indent_ranges {
3455 if range.start.row >= row {
3456 break;
3457 }
3458 if range.start.row == prev_row && range.end > row_start {
3459 indent_from_prev_row = true;
3460 }
3461 if range.end > prev_row_start && range.end <= row_start {
3462 outdent_to_row = outdent_to_row.min(range.start.row);
3463 }
3464 }
3465
3466 if let Some(basis_row) = regex_outdent_map.get(&row) {
3467 indent_from_prev_row = false;
3468 outdent_to_row = *basis_row;
3469 from_regex = true;
3470 }
3471
3472 let within_error = error_ranges
3473 .iter()
3474 .any(|e| e.start.row < row && e.end > row_start);
3475
3476 let suggestion = if outdent_to_row == prev_row
3477 || (outdent_from_prev_row && indent_from_prev_row)
3478 {
3479 Some(IndentSuggestion {
3480 basis_row: prev_row,
3481 delta: Ordering::Equal,
3482 within_error: within_error && !from_regex,
3483 })
3484 } else if indent_from_prev_row {
3485 Some(IndentSuggestion {
3486 basis_row: prev_row,
3487 delta: Ordering::Greater,
3488 within_error: within_error && !from_regex,
3489 })
3490 } else if outdent_to_row < prev_row {
3491 Some(IndentSuggestion {
3492 basis_row: outdent_to_row,
3493 delta: Ordering::Equal,
3494 within_error: within_error && !from_regex,
3495 })
3496 } else if outdent_from_prev_row {
3497 Some(IndentSuggestion {
3498 basis_row: prev_row,
3499 delta: Ordering::Less,
3500 within_error: within_error && !from_regex,
3501 })
3502 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3503 {
3504 Some(IndentSuggestion {
3505 basis_row: prev_row,
3506 delta: Ordering::Equal,
3507 within_error: within_error && !from_regex,
3508 })
3509 } else {
3510 None
3511 };
3512
3513 prev_row = row;
3514 prev_row_start = row_start;
3515 suggestion
3516 }))
3517 }
3518
3519 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3520 while row > 0 {
3521 row -= 1;
3522 if !self.is_line_blank(row) {
3523 return Some(row);
3524 }
3525 }
3526 None
3527 }
3528
3529 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3530 let captures = self.syntax.captures(range, &self.text, |grammar| {
3531 grammar
3532 .highlights_config
3533 .as_ref()
3534 .map(|config| &config.query)
3535 });
3536 let highlight_maps = captures
3537 .grammars()
3538 .iter()
3539 .map(|grammar| grammar.highlight_map())
3540 .collect();
3541 (captures, highlight_maps)
3542 }
3543
3544 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3545 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3546 /// returned in chunks where each chunk has a single syntax highlighting style and
3547 /// diagnostic status.
3548 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3549 let range = range.start.to_offset(self)..range.end.to_offset(self);
3550
3551 let mut syntax = None;
3552 if language_aware {
3553 syntax = Some(self.get_highlights(range.clone()));
3554 }
3555 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3556 let diagnostics = language_aware;
3557 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3558 }
3559
3560 pub fn highlighted_text_for_range<T: ToOffset>(
3561 &self,
3562 range: Range<T>,
3563 override_style: Option<HighlightStyle>,
3564 syntax_theme: &SyntaxTheme,
3565 ) -> HighlightedText {
3566 HighlightedText::from_buffer_range(
3567 range,
3568 &self.text,
3569 &self.syntax,
3570 override_style,
3571 syntax_theme,
3572 )
3573 }
3574
3575 /// Invokes the given callback for each line of text in the given range of the buffer.
3576 /// Uses callback to avoid allocating a string for each line.
3577 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3578 let mut line = String::new();
3579 let mut row = range.start.row;
3580 for chunk in self
3581 .as_rope()
3582 .chunks_in_range(range.to_offset(self))
3583 .chain(["\n"])
3584 {
3585 for (newline_ix, text) in chunk.split('\n').enumerate() {
3586 if newline_ix > 0 {
3587 callback(row, &line);
3588 row += 1;
3589 line.clear();
3590 }
3591 line.push_str(text);
3592 }
3593 }
3594 }
3595
3596 /// Iterates over every [`SyntaxLayer`] in the buffer.
3597 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3598 self.syntax_layers_for_range(0..self.len(), true)
3599 }
3600
3601 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3602 let offset = position.to_offset(self);
3603 self.syntax_layers_for_range(offset..offset, false)
3604 .filter(|l| {
3605 if let Some(ranges) = l.included_sub_ranges {
3606 ranges.iter().any(|range| {
3607 let start = range.start.to_offset(self);
3608 start <= offset && {
3609 let end = range.end.to_offset(self);
3610 offset < end
3611 }
3612 })
3613 } else {
3614 l.node().start_byte() <= offset && l.node().end_byte() > offset
3615 }
3616 })
3617 .last()
3618 }
3619
3620 pub fn syntax_layers_for_range<D: ToOffset>(
3621 &self,
3622 range: Range<D>,
3623 include_hidden: bool,
3624 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3625 self.syntax
3626 .layers_for_range(range, &self.text, include_hidden)
3627 }
3628
3629 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3630 &self,
3631 range: Range<D>,
3632 ) -> Option<SyntaxLayer<'_>> {
3633 let range = range.to_offset(self);
3634 self.syntax
3635 .layers_for_range(range, &self.text, false)
3636 .max_by(|a, b| {
3637 if a.depth != b.depth {
3638 a.depth.cmp(&b.depth)
3639 } else if a.offset.0 != b.offset.0 {
3640 a.offset.0.cmp(&b.offset.0)
3641 } else {
3642 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3643 }
3644 })
3645 }
3646
3647 /// Returns the main [`Language`].
3648 pub fn language(&self) -> Option<&Arc<Language>> {
3649 self.language.as_ref()
3650 }
3651
3652 /// Returns the [`Language`] at the given location.
3653 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3654 self.syntax_layer_at(position)
3655 .map(|info| info.language)
3656 .or(self.language.as_ref())
3657 }
3658
3659 /// Returns the settings for the language at the given location.
3660 pub fn settings_at<'a, D: ToOffset>(
3661 &'a self,
3662 position: D,
3663 cx: &'a App,
3664 ) -> Cow<'a, LanguageSettings> {
3665 language_settings(
3666 self.language_at(position).map(|l| l.name()),
3667 self.file.as_ref(),
3668 cx,
3669 )
3670 }
3671
3672 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3673 CharClassifier::new(self.language_scope_at(point))
3674 }
3675
3676 /// Returns the [`LanguageScope`] at the given location.
3677 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3678 let offset = position.to_offset(self);
3679 let mut scope = None;
3680 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3681
3682 // Use the layer that has the smallest node intersecting the given point.
3683 for layer in self
3684 .syntax
3685 .layers_for_range(offset..offset, &self.text, false)
3686 {
3687 let mut cursor = layer.node().walk();
3688
3689 let mut range = None;
3690 loop {
3691 let child_range = cursor.node().byte_range();
3692 if !child_range.contains(&offset) {
3693 break;
3694 }
3695
3696 range = Some(child_range);
3697 if cursor.goto_first_child_for_byte(offset).is_none() {
3698 break;
3699 }
3700 }
3701
3702 if let Some(range) = range
3703 && smallest_range_and_depth.as_ref().is_none_or(
3704 |(smallest_range, smallest_range_depth)| {
3705 if layer.depth > *smallest_range_depth {
3706 true
3707 } else if layer.depth == *smallest_range_depth {
3708 range.len() < smallest_range.len()
3709 } else {
3710 false
3711 }
3712 },
3713 )
3714 {
3715 smallest_range_and_depth = Some((range, layer.depth));
3716 scope = Some(LanguageScope {
3717 language: layer.language.clone(),
3718 override_id: layer.override_id(offset, &self.text),
3719 });
3720 }
3721 }
3722
3723 scope.or_else(|| {
3724 self.language.clone().map(|language| LanguageScope {
3725 language,
3726 override_id: None,
3727 })
3728 })
3729 }
3730
3731 /// Returns a tuple of the range and character kind of the word
3732 /// surrounding the given position.
3733 pub fn surrounding_word<T: ToOffset>(
3734 &self,
3735 start: T,
3736 scope_context: Option<CharScopeContext>,
3737 ) -> (Range<usize>, Option<CharKind>) {
3738 let mut start = start.to_offset(self);
3739 let mut end = start;
3740 let mut next_chars = self.chars_at(start).take(128).peekable();
3741 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3742
3743 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3744 let word_kind = cmp::max(
3745 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3746 next_chars.peek().copied().map(|c| classifier.kind(c)),
3747 );
3748
3749 for ch in prev_chars {
3750 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3751 start -= ch.len_utf8();
3752 } else {
3753 break;
3754 }
3755 }
3756
3757 for ch in next_chars {
3758 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3759 end += ch.len_utf8();
3760 } else {
3761 break;
3762 }
3763 }
3764
3765 (start..end, word_kind)
3766 }
3767
3768 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3769 /// range. When `require_larger` is true, the node found must be larger than the query range.
3770 ///
3771 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3772 /// be moved to the root of the tree.
3773 fn goto_node_enclosing_range(
3774 cursor: &mut tree_sitter::TreeCursor,
3775 query_range: &Range<usize>,
3776 require_larger: bool,
3777 ) -> bool {
3778 let mut ascending = false;
3779 loop {
3780 let mut range = cursor.node().byte_range();
3781 if query_range.is_empty() {
3782 // When the query range is empty and the current node starts after it, move to the
3783 // previous sibling to find the node the containing node.
3784 if range.start > query_range.start {
3785 cursor.goto_previous_sibling();
3786 range = cursor.node().byte_range();
3787 }
3788 } else {
3789 // When the query range is non-empty and the current node ends exactly at the start,
3790 // move to the next sibling to find a node that extends beyond the start.
3791 if range.end == query_range.start {
3792 cursor.goto_next_sibling();
3793 range = cursor.node().byte_range();
3794 }
3795 }
3796
3797 let encloses = range.contains_inclusive(query_range)
3798 && (!require_larger || range.len() > query_range.len());
3799 if !encloses {
3800 ascending = true;
3801 if !cursor.goto_parent() {
3802 return false;
3803 }
3804 continue;
3805 } else if ascending {
3806 return true;
3807 }
3808
3809 // Descend into the current node.
3810 if cursor
3811 .goto_first_child_for_byte(query_range.start)
3812 .is_none()
3813 {
3814 return true;
3815 }
3816 }
3817 }
3818
3819 pub fn syntax_ancestor<'a, T: ToOffset>(
3820 &'a self,
3821 range: Range<T>,
3822 ) -> Option<tree_sitter::Node<'a>> {
3823 let range = range.start.to_offset(self)..range.end.to_offset(self);
3824 let mut result: Option<tree_sitter::Node<'a>> = None;
3825 for layer in self
3826 .syntax
3827 .layers_for_range(range.clone(), &self.text, true)
3828 {
3829 let mut cursor = layer.node().walk();
3830
3831 // Find the node that both contains the range and is larger than it.
3832 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3833 continue;
3834 }
3835
3836 let left_node = cursor.node();
3837 let mut layer_result = left_node;
3838
3839 // For an empty range, try to find another node immediately to the right of the range.
3840 if left_node.end_byte() == range.start {
3841 let mut right_node = None;
3842 while !cursor.goto_next_sibling() {
3843 if !cursor.goto_parent() {
3844 break;
3845 }
3846 }
3847
3848 while cursor.node().start_byte() == range.start {
3849 right_node = Some(cursor.node());
3850 if !cursor.goto_first_child() {
3851 break;
3852 }
3853 }
3854
3855 // If there is a candidate node on both sides of the (empty) range, then
3856 // decide between the two by favoring a named node over an anonymous token.
3857 // If both nodes are the same in that regard, favor the right one.
3858 if let Some(right_node) = right_node
3859 && (right_node.is_named() || !left_node.is_named())
3860 {
3861 layer_result = right_node;
3862 }
3863 }
3864
3865 if let Some(previous_result) = &result
3866 && previous_result.byte_range().len() < layer_result.byte_range().len()
3867 {
3868 continue;
3869 }
3870 result = Some(layer_result);
3871 }
3872
3873 result
3874 }
3875
3876 /// Find the previous sibling syntax node at the given range.
3877 ///
3878 /// This function locates the syntax node that precedes the node containing
3879 /// the given range. It searches hierarchically by:
3880 /// 1. Finding the node that contains the given range
3881 /// 2. Looking for the previous sibling at the same tree level
3882 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3883 ///
3884 /// Returns `None` if there is no previous sibling at any ancestor level.
3885 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3886 &'a self,
3887 range: Range<T>,
3888 ) -> Option<tree_sitter::Node<'a>> {
3889 let range = range.start.to_offset(self)..range.end.to_offset(self);
3890 let mut result: Option<tree_sitter::Node<'a>> = None;
3891
3892 for layer in self
3893 .syntax
3894 .layers_for_range(range.clone(), &self.text, true)
3895 {
3896 let mut cursor = layer.node().walk();
3897
3898 // Find the node that contains the range
3899 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3900 continue;
3901 }
3902
3903 // Look for the previous sibling, moving up ancestor levels if needed
3904 loop {
3905 if cursor.goto_previous_sibling() {
3906 let layer_result = cursor.node();
3907
3908 if let Some(previous_result) = &result {
3909 if previous_result.byte_range().end < layer_result.byte_range().end {
3910 continue;
3911 }
3912 }
3913 result = Some(layer_result);
3914 break;
3915 }
3916
3917 // No sibling found at this level, try moving up to parent
3918 if !cursor.goto_parent() {
3919 break;
3920 }
3921 }
3922 }
3923
3924 result
3925 }
3926
3927 /// Find the next sibling syntax node at the given range.
3928 ///
3929 /// This function locates the syntax node that follows the node containing
3930 /// the given range. It searches hierarchically by:
3931 /// 1. Finding the node that contains the given range
3932 /// 2. Looking for the next sibling at the same tree level
3933 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3934 ///
3935 /// Returns `None` if there is no next sibling at any ancestor level.
3936 pub fn syntax_next_sibling<'a, T: ToOffset>(
3937 &'a self,
3938 range: Range<T>,
3939 ) -> Option<tree_sitter::Node<'a>> {
3940 let range = range.start.to_offset(self)..range.end.to_offset(self);
3941 let mut result: Option<tree_sitter::Node<'a>> = None;
3942
3943 for layer in self
3944 .syntax
3945 .layers_for_range(range.clone(), &self.text, true)
3946 {
3947 let mut cursor = layer.node().walk();
3948
3949 // Find the node that contains the range
3950 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3951 continue;
3952 }
3953
3954 // Look for the next sibling, moving up ancestor levels if needed
3955 loop {
3956 if cursor.goto_next_sibling() {
3957 let layer_result = cursor.node();
3958
3959 if let Some(previous_result) = &result {
3960 if previous_result.byte_range().start > layer_result.byte_range().start {
3961 continue;
3962 }
3963 }
3964 result = Some(layer_result);
3965 break;
3966 }
3967
3968 // No sibling found at this level, try moving up to parent
3969 if !cursor.goto_parent() {
3970 break;
3971 }
3972 }
3973 }
3974
3975 result
3976 }
3977
3978 /// Returns the root syntax node within the given row
3979 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3980 let start_offset = position.to_offset(self);
3981
3982 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3983
3984 let layer = self
3985 .syntax
3986 .layers_for_range(start_offset..start_offset, &self.text, true)
3987 .next()?;
3988
3989 let mut cursor = layer.node().walk();
3990
3991 // Descend to the first leaf that touches the start of the range.
3992 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3993 if cursor.node().end_byte() == start_offset {
3994 cursor.goto_next_sibling();
3995 }
3996 }
3997
3998 // Ascend to the root node within the same row.
3999 while cursor.goto_parent() {
4000 if cursor.node().start_position().row != row {
4001 break;
4002 }
4003 }
4004
4005 Some(cursor.node())
4006 }
4007
4008 /// Returns the outline for the buffer.
4009 ///
4010 /// This method allows passing an optional [`SyntaxTheme`] to
4011 /// syntax-highlight the returned symbols.
4012 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4013 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4014 }
4015
4016 /// Returns all the symbols that contain the given position.
4017 ///
4018 /// This method allows passing an optional [`SyntaxTheme`] to
4019 /// syntax-highlight the returned symbols.
4020 pub fn symbols_containing<T: ToOffset>(
4021 &self,
4022 position: T,
4023 theme: Option<&SyntaxTheme>,
4024 ) -> Vec<OutlineItem<Anchor>> {
4025 let position = position.to_offset(self);
4026 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4027 let end = self.clip_offset(position + 1, Bias::Right);
4028 let mut items = self.outline_items_containing(start..end, false, theme);
4029 let mut prev_depth = None;
4030 items.retain(|item| {
4031 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4032 prev_depth = Some(item.depth);
4033 result
4034 });
4035 items
4036 }
4037
4038 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4039 let range = range.to_offset(self);
4040 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4041 grammar.outline_config.as_ref().map(|c| &c.query)
4042 });
4043 let configs = matches
4044 .grammars()
4045 .iter()
4046 .map(|g| g.outline_config.as_ref().unwrap())
4047 .collect::<Vec<_>>();
4048
4049 while let Some(mat) = matches.peek() {
4050 let config = &configs[mat.grammar_index];
4051 let containing_item_node = maybe!({
4052 let item_node = mat.captures.iter().find_map(|cap| {
4053 if cap.index == config.item_capture_ix {
4054 Some(cap.node)
4055 } else {
4056 None
4057 }
4058 })?;
4059
4060 let item_byte_range = item_node.byte_range();
4061 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4062 None
4063 } else {
4064 Some(item_node)
4065 }
4066 });
4067
4068 if let Some(item_node) = containing_item_node {
4069 return Some(
4070 Point::from_ts_point(item_node.start_position())
4071 ..Point::from_ts_point(item_node.end_position()),
4072 );
4073 }
4074
4075 matches.advance();
4076 }
4077 None
4078 }
4079
4080 pub fn outline_items_containing<T: ToOffset>(
4081 &self,
4082 range: Range<T>,
4083 include_extra_context: bool,
4084 theme: Option<&SyntaxTheme>,
4085 ) -> Vec<OutlineItem<Anchor>> {
4086 self.outline_items_containing_internal(
4087 range,
4088 include_extra_context,
4089 theme,
4090 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4091 )
4092 }
4093
4094 pub fn outline_items_as_points_containing<T: ToOffset>(
4095 &self,
4096 range: Range<T>,
4097 include_extra_context: bool,
4098 theme: Option<&SyntaxTheme>,
4099 ) -> Vec<OutlineItem<Point>> {
4100 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4101 range
4102 })
4103 }
4104
4105 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4106 &self,
4107 range: Range<T>,
4108 include_extra_context: bool,
4109 theme: Option<&SyntaxTheme>,
4110 ) -> Vec<OutlineItem<usize>> {
4111 self.outline_items_containing_internal(
4112 range,
4113 include_extra_context,
4114 theme,
4115 |buffer, range| range.to_offset(buffer),
4116 )
4117 }
4118
4119 fn outline_items_containing_internal<T: ToOffset, U>(
4120 &self,
4121 range: Range<T>,
4122 include_extra_context: bool,
4123 theme: Option<&SyntaxTheme>,
4124 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4125 ) -> Vec<OutlineItem<U>> {
4126 let range = range.to_offset(self);
4127 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4128 grammar.outline_config.as_ref().map(|c| &c.query)
4129 });
4130
4131 let mut items = Vec::new();
4132 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4133 while let Some(mat) = matches.peek() {
4134 let config = matches.grammars()[mat.grammar_index]
4135 .outline_config
4136 .as_ref()
4137 .unwrap();
4138 if let Some(item) =
4139 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4140 {
4141 items.push(item);
4142 } else if let Some(capture) = mat
4143 .captures
4144 .iter()
4145 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4146 {
4147 let capture_range = capture.node.start_position()..capture.node.end_position();
4148 let mut capture_row_range =
4149 capture_range.start.row as u32..capture_range.end.row as u32;
4150 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4151 {
4152 capture_row_range.end -= 1;
4153 }
4154 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4155 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4156 last_row_range.end = capture_row_range.end;
4157 } else {
4158 annotation_row_ranges.push(capture_row_range);
4159 }
4160 } else {
4161 annotation_row_ranges.push(capture_row_range);
4162 }
4163 }
4164 matches.advance();
4165 }
4166
4167 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4168
4169 // Assign depths based on containment relationships and convert to anchors.
4170 let mut item_ends_stack = Vec::<Point>::new();
4171 let mut anchor_items = Vec::new();
4172 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4173 for item in items {
4174 while let Some(last_end) = item_ends_stack.last().copied() {
4175 if last_end < item.range.end {
4176 item_ends_stack.pop();
4177 } else {
4178 break;
4179 }
4180 }
4181
4182 let mut annotation_row_range = None;
4183 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4184 let row_preceding_item = item.range.start.row.saturating_sub(1);
4185 if next_annotation_row_range.end < row_preceding_item {
4186 annotation_row_ranges.next();
4187 } else {
4188 if next_annotation_row_range.end == row_preceding_item {
4189 annotation_row_range = Some(next_annotation_row_range.clone());
4190 annotation_row_ranges.next();
4191 }
4192 break;
4193 }
4194 }
4195
4196 anchor_items.push(OutlineItem {
4197 depth: item_ends_stack.len(),
4198 range: range_callback(self, item.range.clone()),
4199 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4200 text: item.text,
4201 highlight_ranges: item.highlight_ranges,
4202 name_ranges: item.name_ranges,
4203 body_range: item.body_range.map(|r| range_callback(self, r)),
4204 annotation_range: annotation_row_range.map(|annotation_range| {
4205 let point_range = Point::new(annotation_range.start, 0)
4206 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4207 range_callback(self, point_range)
4208 }),
4209 });
4210 item_ends_stack.push(item.range.end);
4211 }
4212
4213 anchor_items
4214 }
4215
4216 fn next_outline_item(
4217 &self,
4218 config: &OutlineConfig,
4219 mat: &SyntaxMapMatch,
4220 range: &Range<usize>,
4221 include_extra_context: bool,
4222 theme: Option<&SyntaxTheme>,
4223 ) -> Option<OutlineItem<Point>> {
4224 let item_node = mat.captures.iter().find_map(|cap| {
4225 if cap.index == config.item_capture_ix {
4226 Some(cap.node)
4227 } else {
4228 None
4229 }
4230 })?;
4231
4232 let item_byte_range = item_node.byte_range();
4233 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4234 return None;
4235 }
4236 let item_point_range = Point::from_ts_point(item_node.start_position())
4237 ..Point::from_ts_point(item_node.end_position());
4238
4239 let mut open_point = None;
4240 let mut close_point = None;
4241
4242 let mut buffer_ranges = Vec::new();
4243 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4244 let mut range = node.start_byte()..node.end_byte();
4245 let start = node.start_position();
4246 if node.end_position().row > start.row {
4247 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4248 }
4249
4250 if !range.is_empty() {
4251 buffer_ranges.push((range, node_is_name));
4252 }
4253 };
4254
4255 for capture in mat.captures {
4256 if capture.index == config.name_capture_ix {
4257 add_to_buffer_ranges(capture.node, true);
4258 } else if Some(capture.index) == config.context_capture_ix
4259 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4260 {
4261 add_to_buffer_ranges(capture.node, false);
4262 } else {
4263 if Some(capture.index) == config.open_capture_ix {
4264 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4265 } else if Some(capture.index) == config.close_capture_ix {
4266 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4267 }
4268 }
4269 }
4270
4271 if buffer_ranges.is_empty() {
4272 return None;
4273 }
4274 let source_range_for_text =
4275 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4276
4277 let mut text = String::new();
4278 let mut highlight_ranges = Vec::new();
4279 let mut name_ranges = Vec::new();
4280 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4281 let mut last_buffer_range_end = 0;
4282 for (buffer_range, is_name) in buffer_ranges {
4283 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4284 if space_added {
4285 text.push(' ');
4286 }
4287 let before_append_len = text.len();
4288 let mut offset = buffer_range.start;
4289 chunks.seek(buffer_range.clone());
4290 for mut chunk in chunks.by_ref() {
4291 if chunk.text.len() > buffer_range.end - offset {
4292 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4293 offset = buffer_range.end;
4294 } else {
4295 offset += chunk.text.len();
4296 }
4297 let style = chunk
4298 .syntax_highlight_id
4299 .zip(theme)
4300 .and_then(|(highlight, theme)| highlight.style(theme));
4301 if let Some(style) = style {
4302 let start = text.len();
4303 let end = start + chunk.text.len();
4304 highlight_ranges.push((start..end, style));
4305 }
4306 text.push_str(chunk.text);
4307 if offset >= buffer_range.end {
4308 break;
4309 }
4310 }
4311 if is_name {
4312 let after_append_len = text.len();
4313 let start = if space_added && !name_ranges.is_empty() {
4314 before_append_len - 1
4315 } else {
4316 before_append_len
4317 };
4318 name_ranges.push(start..after_append_len);
4319 }
4320 last_buffer_range_end = buffer_range.end;
4321 }
4322
4323 Some(OutlineItem {
4324 depth: 0, // We'll calculate the depth later
4325 range: item_point_range,
4326 source_range_for_text: source_range_for_text.to_point(self),
4327 text,
4328 highlight_ranges,
4329 name_ranges,
4330 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4331 annotation_range: None,
4332 })
4333 }
4334
4335 pub fn function_body_fold_ranges<T: ToOffset>(
4336 &self,
4337 within: Range<T>,
4338 ) -> impl Iterator<Item = Range<usize>> + '_ {
4339 self.text_object_ranges(within, TreeSitterOptions::default())
4340 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4341 }
4342
4343 /// For each grammar in the language, runs the provided
4344 /// [`tree_sitter::Query`] against the given range.
4345 pub fn matches(
4346 &self,
4347 range: Range<usize>,
4348 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4349 ) -> SyntaxMapMatches<'_> {
4350 self.syntax.matches(range, self, query)
4351 }
4352
4353 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4354 /// Hence, may return more bracket pairs than the range contains.
4355 ///
4356 /// Will omit known chunks.
4357 /// The resulting bracket match collections are not ordered.
4358 pub fn fetch_bracket_ranges(
4359 &self,
4360 range: Range<usize>,
4361 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4362 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4363 let mut all_bracket_matches = HashMap::default();
4364
4365 for chunk in self
4366 .tree_sitter_data
4367 .chunks
4368 .applicable_chunks(&[range.to_point(self)])
4369 {
4370 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4371 continue;
4372 }
4373 let chunk_range = chunk.anchor_range();
4374 let chunk_range = chunk_range.to_offset(&self);
4375
4376 if let Some(cached_brackets) =
4377 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4378 {
4379 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4380 continue;
4381 }
4382
4383 let mut all_brackets = Vec::new();
4384 let mut opens = Vec::new();
4385 let mut color_pairs = Vec::new();
4386
4387 let mut matches = self.syntax.matches_with_options(
4388 chunk_range.clone(),
4389 &self.text,
4390 TreeSitterOptions {
4391 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4392 max_start_depth: None,
4393 },
4394 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4395 );
4396 let configs = matches
4397 .grammars()
4398 .iter()
4399 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4400 .collect::<Vec<_>>();
4401
4402 while let Some(mat) = matches.peek() {
4403 let mut open = None;
4404 let mut close = None;
4405 let syntax_layer_depth = mat.depth;
4406 let config = configs[mat.grammar_index];
4407 let pattern = &config.patterns[mat.pattern_index];
4408 for capture in mat.captures {
4409 if capture.index == config.open_capture_ix {
4410 open = Some(capture.node.byte_range());
4411 } else if capture.index == config.close_capture_ix {
4412 close = Some(capture.node.byte_range());
4413 }
4414 }
4415
4416 matches.advance();
4417
4418 let Some((open_range, close_range)) = open.zip(close) else {
4419 continue;
4420 };
4421
4422 let bracket_range = open_range.start..=close_range.end;
4423 if !bracket_range.overlaps(&chunk_range) {
4424 continue;
4425 }
4426
4427 let index = all_brackets.len();
4428 all_brackets.push(BracketMatch {
4429 open_range: open_range.clone(),
4430 close_range: close_range.clone(),
4431 newline_only: pattern.newline_only,
4432 syntax_layer_depth,
4433 color_index: None,
4434 });
4435
4436 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4437 // bracket will match the entire tag with all text inside.
4438 // For now, avoid highlighting any pair that has more than single char in each bracket.
4439 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4440 let should_color =
4441 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4442 if should_color {
4443 opens.push(open_range.clone());
4444 color_pairs.push((open_range, close_range, index));
4445 }
4446 }
4447
4448 opens.sort_by_key(|r| (r.start, r.end));
4449 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4450 color_pairs.sort_by_key(|(_, close, _)| close.end);
4451
4452 let mut open_stack = Vec::new();
4453 let mut open_index = 0;
4454 for (open, close, index) in color_pairs {
4455 while open_index < opens.len() && opens[open_index].start < close.start {
4456 open_stack.push(opens[open_index].clone());
4457 open_index += 1;
4458 }
4459
4460 if open_stack.last() == Some(&open) {
4461 let depth_index = open_stack.len() - 1;
4462 all_brackets[index].color_index = Some(depth_index);
4463 open_stack.pop();
4464 }
4465 }
4466
4467 all_brackets.sort_by_key(|bracket_match| {
4468 (bracket_match.open_range.start, bracket_match.open_range.end)
4469 });
4470
4471 if let empty_slot @ None =
4472 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4473 {
4474 *empty_slot = Some(all_brackets.clone());
4475 }
4476 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4477 }
4478
4479 all_bracket_matches
4480 }
4481
4482 pub fn all_bracket_ranges(
4483 &self,
4484 range: Range<usize>,
4485 ) -> impl Iterator<Item = BracketMatch<usize>> {
4486 self.fetch_bracket_ranges(range.clone(), None)
4487 .into_values()
4488 .flatten()
4489 .filter(move |bracket_match| {
4490 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4491 bracket_range.overlaps(&range)
4492 })
4493 }
4494
4495 /// Returns bracket range pairs overlapping or adjacent to `range`
4496 pub fn bracket_ranges<T: ToOffset>(
4497 &self,
4498 range: Range<T>,
4499 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4500 // Find bracket pairs that *inclusively* contain the given range.
4501 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4502 self.all_bracket_ranges(range)
4503 .filter(|pair| !pair.newline_only)
4504 }
4505
4506 pub fn debug_variables_query<T: ToOffset>(
4507 &self,
4508 range: Range<T>,
4509 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4510 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4511
4512 let mut matches = self.syntax.matches_with_options(
4513 range.clone(),
4514 &self.text,
4515 TreeSitterOptions::default(),
4516 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4517 );
4518
4519 let configs = matches
4520 .grammars()
4521 .iter()
4522 .map(|grammar| grammar.debug_variables_config.as_ref())
4523 .collect::<Vec<_>>();
4524
4525 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4526
4527 iter::from_fn(move || {
4528 loop {
4529 while let Some(capture) = captures.pop() {
4530 if capture.0.overlaps(&range) {
4531 return Some(capture);
4532 }
4533 }
4534
4535 let mat = matches.peek()?;
4536
4537 let Some(config) = configs[mat.grammar_index].as_ref() else {
4538 matches.advance();
4539 continue;
4540 };
4541
4542 for capture in mat.captures {
4543 let Some(ix) = config
4544 .objects_by_capture_ix
4545 .binary_search_by_key(&capture.index, |e| e.0)
4546 .ok()
4547 else {
4548 continue;
4549 };
4550 let text_object = config.objects_by_capture_ix[ix].1;
4551 let byte_range = capture.node.byte_range();
4552
4553 let mut found = false;
4554 for (range, existing) in captures.iter_mut() {
4555 if existing == &text_object {
4556 range.start = range.start.min(byte_range.start);
4557 range.end = range.end.max(byte_range.end);
4558 found = true;
4559 break;
4560 }
4561 }
4562
4563 if !found {
4564 captures.push((byte_range, text_object));
4565 }
4566 }
4567
4568 matches.advance();
4569 }
4570 })
4571 }
4572
4573 pub fn text_object_ranges<T: ToOffset>(
4574 &self,
4575 range: Range<T>,
4576 options: TreeSitterOptions,
4577 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4578 let range =
4579 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4580
4581 let mut matches =
4582 self.syntax
4583 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4584 grammar.text_object_config.as_ref().map(|c| &c.query)
4585 });
4586
4587 let configs = matches
4588 .grammars()
4589 .iter()
4590 .map(|grammar| grammar.text_object_config.as_ref())
4591 .collect::<Vec<_>>();
4592
4593 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4594
4595 iter::from_fn(move || {
4596 loop {
4597 while let Some(capture) = captures.pop() {
4598 if capture.0.overlaps(&range) {
4599 return Some(capture);
4600 }
4601 }
4602
4603 let mat = matches.peek()?;
4604
4605 let Some(config) = configs[mat.grammar_index].as_ref() else {
4606 matches.advance();
4607 continue;
4608 };
4609
4610 for capture in mat.captures {
4611 let Some(ix) = config
4612 .text_objects_by_capture_ix
4613 .binary_search_by_key(&capture.index, |e| e.0)
4614 .ok()
4615 else {
4616 continue;
4617 };
4618 let text_object = config.text_objects_by_capture_ix[ix].1;
4619 let byte_range = capture.node.byte_range();
4620
4621 let mut found = false;
4622 for (range, existing) in captures.iter_mut() {
4623 if existing == &text_object {
4624 range.start = range.start.min(byte_range.start);
4625 range.end = range.end.max(byte_range.end);
4626 found = true;
4627 break;
4628 }
4629 }
4630
4631 if !found {
4632 captures.push((byte_range, text_object));
4633 }
4634 }
4635
4636 matches.advance();
4637 }
4638 })
4639 }
4640
4641 /// Returns enclosing bracket ranges containing the given range
4642 pub fn enclosing_bracket_ranges<T: ToOffset>(
4643 &self,
4644 range: Range<T>,
4645 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4646 let range = range.start.to_offset(self)..range.end.to_offset(self);
4647
4648 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4649 let max_depth = result
4650 .iter()
4651 .map(|mat| mat.syntax_layer_depth)
4652 .max()
4653 .unwrap_or(0);
4654 result.into_iter().filter(move |pair| {
4655 pair.open_range.start <= range.start
4656 && pair.close_range.end >= range.end
4657 && pair.syntax_layer_depth == max_depth
4658 })
4659 }
4660
4661 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4662 ///
4663 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4664 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4665 &self,
4666 range: Range<T>,
4667 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4668 ) -> Option<(Range<usize>, Range<usize>)> {
4669 let range = range.start.to_offset(self)..range.end.to_offset(self);
4670
4671 // Get the ranges of the innermost pair of brackets.
4672 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4673
4674 for pair in self.enclosing_bracket_ranges(range) {
4675 if let Some(range_filter) = range_filter
4676 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4677 {
4678 continue;
4679 }
4680
4681 let len = pair.close_range.end - pair.open_range.start;
4682
4683 if let Some((existing_open, existing_close)) = &result {
4684 let existing_len = existing_close.end - existing_open.start;
4685 if len > existing_len {
4686 continue;
4687 }
4688 }
4689
4690 result = Some((pair.open_range, pair.close_range));
4691 }
4692
4693 result
4694 }
4695
4696 /// Returns anchor ranges for any matches of the redaction query.
4697 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4698 /// will be run on the relevant section of the buffer.
4699 pub fn redacted_ranges<T: ToOffset>(
4700 &self,
4701 range: Range<T>,
4702 ) -> impl Iterator<Item = Range<usize>> + '_ {
4703 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4704 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4705 grammar
4706 .redactions_config
4707 .as_ref()
4708 .map(|config| &config.query)
4709 });
4710
4711 let configs = syntax_matches
4712 .grammars()
4713 .iter()
4714 .map(|grammar| grammar.redactions_config.as_ref())
4715 .collect::<Vec<_>>();
4716
4717 iter::from_fn(move || {
4718 let redacted_range = syntax_matches
4719 .peek()
4720 .and_then(|mat| {
4721 configs[mat.grammar_index].and_then(|config| {
4722 mat.captures
4723 .iter()
4724 .find(|capture| capture.index == config.redaction_capture_ix)
4725 })
4726 })
4727 .map(|mat| mat.node.byte_range());
4728 syntax_matches.advance();
4729 redacted_range
4730 })
4731 }
4732
4733 pub fn injections_intersecting_range<T: ToOffset>(
4734 &self,
4735 range: Range<T>,
4736 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4737 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4738
4739 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4740 grammar
4741 .injection_config
4742 .as_ref()
4743 .map(|config| &config.query)
4744 });
4745
4746 let configs = syntax_matches
4747 .grammars()
4748 .iter()
4749 .map(|grammar| grammar.injection_config.as_ref())
4750 .collect::<Vec<_>>();
4751
4752 iter::from_fn(move || {
4753 let ranges = syntax_matches.peek().and_then(|mat| {
4754 let config = &configs[mat.grammar_index]?;
4755 let content_capture_range = mat.captures.iter().find_map(|capture| {
4756 if capture.index == config.content_capture_ix {
4757 Some(capture.node.byte_range())
4758 } else {
4759 None
4760 }
4761 })?;
4762 let language = self.language_at(content_capture_range.start)?;
4763 Some((content_capture_range, language))
4764 });
4765 syntax_matches.advance();
4766 ranges
4767 })
4768 }
4769
4770 pub fn runnable_ranges(
4771 &self,
4772 offset_range: Range<usize>,
4773 ) -> impl Iterator<Item = RunnableRange> + '_ {
4774 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4775 grammar.runnable_config.as_ref().map(|config| &config.query)
4776 });
4777
4778 let test_configs = syntax_matches
4779 .grammars()
4780 .iter()
4781 .map(|grammar| grammar.runnable_config.as_ref())
4782 .collect::<Vec<_>>();
4783
4784 iter::from_fn(move || {
4785 loop {
4786 let mat = syntax_matches.peek()?;
4787
4788 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4789 let mut run_range = None;
4790 let full_range = mat.captures.iter().fold(
4791 Range {
4792 start: usize::MAX,
4793 end: 0,
4794 },
4795 |mut acc, next| {
4796 let byte_range = next.node.byte_range();
4797 if acc.start > byte_range.start {
4798 acc.start = byte_range.start;
4799 }
4800 if acc.end < byte_range.end {
4801 acc.end = byte_range.end;
4802 }
4803 acc
4804 },
4805 );
4806 if full_range.start > full_range.end {
4807 // We did not find a full spanning range of this match.
4808 return None;
4809 }
4810 let extra_captures: SmallVec<[_; 1]> =
4811 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4812 test_configs
4813 .extra_captures
4814 .get(capture.index as usize)
4815 .cloned()
4816 .and_then(|tag_name| match tag_name {
4817 RunnableCapture::Named(name) => {
4818 Some((capture.node.byte_range(), name))
4819 }
4820 RunnableCapture::Run => {
4821 let _ = run_range.insert(capture.node.byte_range());
4822 None
4823 }
4824 })
4825 }));
4826 let run_range = run_range?;
4827 let tags = test_configs
4828 .query
4829 .property_settings(mat.pattern_index)
4830 .iter()
4831 .filter_map(|property| {
4832 if *property.key == *"tag" {
4833 property
4834 .value
4835 .as_ref()
4836 .map(|value| RunnableTag(value.to_string().into()))
4837 } else {
4838 None
4839 }
4840 })
4841 .collect();
4842 let extra_captures = extra_captures
4843 .into_iter()
4844 .map(|(range, name)| {
4845 (
4846 name.to_string(),
4847 self.text_for_range(range).collect::<String>(),
4848 )
4849 })
4850 .collect();
4851 // All tags should have the same range.
4852 Some(RunnableRange {
4853 run_range,
4854 full_range,
4855 runnable: Runnable {
4856 tags,
4857 language: mat.language,
4858 buffer: self.remote_id(),
4859 },
4860 extra_captures,
4861 buffer_id: self.remote_id(),
4862 })
4863 });
4864
4865 syntax_matches.advance();
4866 if test_range.is_some() {
4867 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4868 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4869 return test_range;
4870 }
4871 }
4872 })
4873 }
4874
4875 /// Returns selections for remote peers intersecting the given range.
4876 #[allow(clippy::type_complexity)]
4877 pub fn selections_in_range(
4878 &self,
4879 range: Range<Anchor>,
4880 include_local: bool,
4881 ) -> impl Iterator<
4882 Item = (
4883 ReplicaId,
4884 bool,
4885 CursorShape,
4886 impl Iterator<Item = &Selection<Anchor>> + '_,
4887 ),
4888 > + '_ {
4889 self.remote_selections
4890 .iter()
4891 .filter(move |(replica_id, set)| {
4892 (include_local || **replica_id != self.text.replica_id())
4893 && !set.selections.is_empty()
4894 })
4895 .map(move |(replica_id, set)| {
4896 let start_ix = match set.selections.binary_search_by(|probe| {
4897 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4898 }) {
4899 Ok(ix) | Err(ix) => ix,
4900 };
4901 let end_ix = match set.selections.binary_search_by(|probe| {
4902 probe.start.cmp(&range.end, self).then(Ordering::Less)
4903 }) {
4904 Ok(ix) | Err(ix) => ix,
4905 };
4906
4907 (
4908 *replica_id,
4909 set.line_mode,
4910 set.cursor_shape,
4911 set.selections[start_ix..end_ix].iter(),
4912 )
4913 })
4914 }
4915
4916 /// Returns if the buffer contains any diagnostics.
4917 pub fn has_diagnostics(&self) -> bool {
4918 !self.diagnostics.is_empty()
4919 }
4920
4921 /// Returns all the diagnostics intersecting the given range.
4922 pub fn diagnostics_in_range<'a, T, O>(
4923 &'a self,
4924 search_range: Range<T>,
4925 reversed: bool,
4926 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4927 where
4928 T: 'a + Clone + ToOffset,
4929 O: 'a + FromAnchor,
4930 {
4931 let mut iterators: Vec<_> = self
4932 .diagnostics
4933 .iter()
4934 .map(|(_, collection)| {
4935 collection
4936 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4937 .peekable()
4938 })
4939 .collect();
4940
4941 std::iter::from_fn(move || {
4942 let (next_ix, _) = iterators
4943 .iter_mut()
4944 .enumerate()
4945 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4946 .min_by(|(_, a), (_, b)| {
4947 let cmp = a
4948 .range
4949 .start
4950 .cmp(&b.range.start, self)
4951 // when range is equal, sort by diagnostic severity
4952 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4953 // and stabilize order with group_id
4954 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4955 if reversed { cmp.reverse() } else { cmp }
4956 })?;
4957 iterators[next_ix]
4958 .next()
4959 .map(
4960 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4961 diagnostic,
4962 range: FromAnchor::from_anchor(&range.start, self)
4963 ..FromAnchor::from_anchor(&range.end, self),
4964 },
4965 )
4966 })
4967 }
4968
4969 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4970 /// should be used instead.
4971 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4972 &self.diagnostics
4973 }
4974
4975 /// Returns all the diagnostic groups associated with the given
4976 /// language server ID. If no language server ID is provided,
4977 /// all diagnostics groups are returned.
4978 pub fn diagnostic_groups(
4979 &self,
4980 language_server_id: Option<LanguageServerId>,
4981 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4982 let mut groups = Vec::new();
4983
4984 if let Some(language_server_id) = language_server_id {
4985 if let Ok(ix) = self
4986 .diagnostics
4987 .binary_search_by_key(&language_server_id, |e| e.0)
4988 {
4989 self.diagnostics[ix]
4990 .1
4991 .groups(language_server_id, &mut groups, self);
4992 }
4993 } else {
4994 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4995 diagnostics.groups(*language_server_id, &mut groups, self);
4996 }
4997 }
4998
4999 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5000 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5001 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5002 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5003 });
5004
5005 groups
5006 }
5007
5008 /// Returns an iterator over the diagnostics for the given group.
5009 pub fn diagnostic_group<O>(
5010 &self,
5011 group_id: usize,
5012 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5013 where
5014 O: FromAnchor + 'static,
5015 {
5016 self.diagnostics
5017 .iter()
5018 .flat_map(move |(_, set)| set.group(group_id, self))
5019 }
5020
5021 /// An integer version number that accounts for all updates besides
5022 /// the buffer's text itself (which is versioned via a version vector).
5023 pub fn non_text_state_update_count(&self) -> usize {
5024 self.non_text_state_update_count
5025 }
5026
5027 /// An integer version that changes when the buffer's syntax changes.
5028 pub fn syntax_update_count(&self) -> usize {
5029 self.syntax.update_count()
5030 }
5031
5032 /// Returns a snapshot of underlying file.
5033 pub fn file(&self) -> Option<&Arc<dyn File>> {
5034 self.file.as_ref()
5035 }
5036
5037 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5038 if let Some(file) = self.file() {
5039 if file.path().file_name().is_none() || include_root {
5040 Some(file.full_path(cx).to_string_lossy().into_owned())
5041 } else {
5042 Some(file.path().display(file.path_style(cx)).to_string())
5043 }
5044 } else {
5045 None
5046 }
5047 }
5048
5049 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5050 let query_str = query.fuzzy_contents;
5051 if query_str.is_some_and(|query| query.is_empty()) {
5052 return BTreeMap::default();
5053 }
5054
5055 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5056 language,
5057 override_id: None,
5058 }));
5059
5060 let mut query_ix = 0;
5061 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5062 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5063
5064 let mut words = BTreeMap::default();
5065 let mut current_word_start_ix = None;
5066 let mut chunk_ix = query.range.start;
5067 for chunk in self.chunks(query.range, false) {
5068 for (i, c) in chunk.text.char_indices() {
5069 let ix = chunk_ix + i;
5070 if classifier.is_word(c) {
5071 if current_word_start_ix.is_none() {
5072 current_word_start_ix = Some(ix);
5073 }
5074
5075 if let Some(query_chars) = &query_chars
5076 && query_ix < query_len
5077 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5078 {
5079 query_ix += 1;
5080 }
5081 continue;
5082 } else if let Some(word_start) = current_word_start_ix.take()
5083 && query_ix == query_len
5084 {
5085 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5086 let mut word_text = self.text_for_range(word_start..ix).peekable();
5087 let first_char = word_text
5088 .peek()
5089 .and_then(|first_chunk| first_chunk.chars().next());
5090 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5091 if !query.skip_digits
5092 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5093 {
5094 words.insert(word_text.collect(), word_range);
5095 }
5096 }
5097 query_ix = 0;
5098 }
5099 chunk_ix += chunk.text.len();
5100 }
5101
5102 words
5103 }
5104}
5105
5106pub struct WordsQuery<'a> {
5107 /// Only returns words with all chars from the fuzzy string in them.
5108 pub fuzzy_contents: Option<&'a str>,
5109 /// Skips words that start with a digit.
5110 pub skip_digits: bool,
5111 /// Buffer offset range, to look for words.
5112 pub range: Range<usize>,
5113}
5114
5115fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5116 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5117}
5118
5119fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5120 let mut result = IndentSize::spaces(0);
5121 for c in text {
5122 let kind = match c {
5123 ' ' => IndentKind::Space,
5124 '\t' => IndentKind::Tab,
5125 _ => break,
5126 };
5127 if result.len == 0 {
5128 result.kind = kind;
5129 }
5130 result.len += 1;
5131 }
5132 result
5133}
5134
5135impl Clone for BufferSnapshot {
5136 fn clone(&self) -> Self {
5137 Self {
5138 text: self.text.clone(),
5139 syntax: self.syntax.clone(),
5140 file: self.file.clone(),
5141 remote_selections: self.remote_selections.clone(),
5142 diagnostics: self.diagnostics.clone(),
5143 language: self.language.clone(),
5144 tree_sitter_data: self.tree_sitter_data.clone(),
5145 non_text_state_update_count: self.non_text_state_update_count,
5146 capability: self.capability,
5147 }
5148 }
5149}
5150
5151impl Deref for BufferSnapshot {
5152 type Target = text::BufferSnapshot;
5153
5154 fn deref(&self) -> &Self::Target {
5155 &self.text
5156 }
5157}
5158
5159unsafe impl Send for BufferChunks<'_> {}
5160
5161impl<'a> BufferChunks<'a> {
5162 pub(crate) fn new(
5163 text: &'a Rope,
5164 range: Range<usize>,
5165 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5166 diagnostics: bool,
5167 buffer_snapshot: Option<&'a BufferSnapshot>,
5168 ) -> Self {
5169 let mut highlights = None;
5170 if let Some((captures, highlight_maps)) = syntax {
5171 highlights = Some(BufferChunkHighlights {
5172 captures,
5173 next_capture: None,
5174 stack: Default::default(),
5175 highlight_maps,
5176 })
5177 }
5178
5179 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5180 let chunks = text.chunks_in_range(range.clone());
5181
5182 let mut this = BufferChunks {
5183 range,
5184 buffer_snapshot,
5185 chunks,
5186 diagnostic_endpoints,
5187 error_depth: 0,
5188 warning_depth: 0,
5189 information_depth: 0,
5190 hint_depth: 0,
5191 unnecessary_depth: 0,
5192 underline: true,
5193 highlights,
5194 };
5195 this.initialize_diagnostic_endpoints();
5196 this
5197 }
5198
5199 /// Seeks to the given byte offset in the buffer.
5200 pub fn seek(&mut self, range: Range<usize>) {
5201 let old_range = std::mem::replace(&mut self.range, range.clone());
5202 self.chunks.set_range(self.range.clone());
5203 if let Some(highlights) = self.highlights.as_mut() {
5204 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5205 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5206 highlights
5207 .stack
5208 .retain(|(end_offset, _)| *end_offset > range.start);
5209 if let Some(capture) = &highlights.next_capture
5210 && range.start >= capture.node.start_byte()
5211 {
5212 let next_capture_end = capture.node.end_byte();
5213 if range.start < next_capture_end {
5214 highlights.stack.push((
5215 next_capture_end,
5216 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5217 ));
5218 }
5219 highlights.next_capture.take();
5220 }
5221 } else if let Some(snapshot) = self.buffer_snapshot {
5222 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5223 *highlights = BufferChunkHighlights {
5224 captures,
5225 next_capture: None,
5226 stack: Default::default(),
5227 highlight_maps,
5228 };
5229 } else {
5230 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5231 // Seeking such BufferChunks is not supported.
5232 debug_assert!(
5233 false,
5234 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5235 );
5236 }
5237
5238 highlights.captures.set_byte_range(self.range.clone());
5239 self.initialize_diagnostic_endpoints();
5240 }
5241 }
5242
5243 fn initialize_diagnostic_endpoints(&mut self) {
5244 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5245 && let Some(buffer) = self.buffer_snapshot
5246 {
5247 let mut diagnostic_endpoints = Vec::new();
5248 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5249 diagnostic_endpoints.push(DiagnosticEndpoint {
5250 offset: entry.range.start,
5251 is_start: true,
5252 severity: entry.diagnostic.severity,
5253 is_unnecessary: entry.diagnostic.is_unnecessary,
5254 underline: entry.diagnostic.underline,
5255 });
5256 diagnostic_endpoints.push(DiagnosticEndpoint {
5257 offset: entry.range.end,
5258 is_start: false,
5259 severity: entry.diagnostic.severity,
5260 is_unnecessary: entry.diagnostic.is_unnecessary,
5261 underline: entry.diagnostic.underline,
5262 });
5263 }
5264 diagnostic_endpoints
5265 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5266 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5267 self.hint_depth = 0;
5268 self.error_depth = 0;
5269 self.warning_depth = 0;
5270 self.information_depth = 0;
5271 }
5272 }
5273
5274 /// The current byte offset in the buffer.
5275 pub fn offset(&self) -> usize {
5276 self.range.start
5277 }
5278
5279 pub fn range(&self) -> Range<usize> {
5280 self.range.clone()
5281 }
5282
5283 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5284 let depth = match endpoint.severity {
5285 DiagnosticSeverity::ERROR => &mut self.error_depth,
5286 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5287 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5288 DiagnosticSeverity::HINT => &mut self.hint_depth,
5289 _ => return,
5290 };
5291 if endpoint.is_start {
5292 *depth += 1;
5293 } else {
5294 *depth -= 1;
5295 }
5296
5297 if endpoint.is_unnecessary {
5298 if endpoint.is_start {
5299 self.unnecessary_depth += 1;
5300 } else {
5301 self.unnecessary_depth -= 1;
5302 }
5303 }
5304 }
5305
5306 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5307 if self.error_depth > 0 {
5308 Some(DiagnosticSeverity::ERROR)
5309 } else if self.warning_depth > 0 {
5310 Some(DiagnosticSeverity::WARNING)
5311 } else if self.information_depth > 0 {
5312 Some(DiagnosticSeverity::INFORMATION)
5313 } else if self.hint_depth > 0 {
5314 Some(DiagnosticSeverity::HINT)
5315 } else {
5316 None
5317 }
5318 }
5319
5320 fn current_code_is_unnecessary(&self) -> bool {
5321 self.unnecessary_depth > 0
5322 }
5323}
5324
5325impl<'a> Iterator for BufferChunks<'a> {
5326 type Item = Chunk<'a>;
5327
5328 fn next(&mut self) -> Option<Self::Item> {
5329 let mut next_capture_start = usize::MAX;
5330 let mut next_diagnostic_endpoint = usize::MAX;
5331
5332 if let Some(highlights) = self.highlights.as_mut() {
5333 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5334 if *parent_capture_end <= self.range.start {
5335 highlights.stack.pop();
5336 } else {
5337 break;
5338 }
5339 }
5340
5341 if highlights.next_capture.is_none() {
5342 highlights.next_capture = highlights.captures.next();
5343 }
5344
5345 while let Some(capture) = highlights.next_capture.as_ref() {
5346 if self.range.start < capture.node.start_byte() {
5347 next_capture_start = capture.node.start_byte();
5348 break;
5349 } else {
5350 let highlight_id =
5351 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5352 highlights
5353 .stack
5354 .push((capture.node.end_byte(), highlight_id));
5355 highlights.next_capture = highlights.captures.next();
5356 }
5357 }
5358 }
5359
5360 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5361 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5362 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5363 if endpoint.offset <= self.range.start {
5364 self.update_diagnostic_depths(endpoint);
5365 diagnostic_endpoints.next();
5366 self.underline = endpoint.underline;
5367 } else {
5368 next_diagnostic_endpoint = endpoint.offset;
5369 break;
5370 }
5371 }
5372 }
5373 self.diagnostic_endpoints = diagnostic_endpoints;
5374
5375 if let Some(ChunkBitmaps {
5376 text: chunk,
5377 chars: chars_map,
5378 tabs,
5379 }) = self.chunks.peek_with_bitmaps()
5380 {
5381 let chunk_start = self.range.start;
5382 let mut chunk_end = (self.chunks.offset() + chunk.len())
5383 .min(next_capture_start)
5384 .min(next_diagnostic_endpoint);
5385 let mut highlight_id = None;
5386 if let Some(highlights) = self.highlights.as_ref()
5387 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5388 {
5389 chunk_end = chunk_end.min(*parent_capture_end);
5390 highlight_id = Some(*parent_highlight_id);
5391 }
5392 let bit_start = chunk_start - self.chunks.offset();
5393 let bit_end = chunk_end - self.chunks.offset();
5394
5395 let slice = &chunk[bit_start..bit_end];
5396
5397 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5398 let tabs = (tabs >> bit_start) & mask;
5399 let chars = (chars_map >> bit_start) & mask;
5400
5401 self.range.start = chunk_end;
5402 if self.range.start == self.chunks.offset() + chunk.len() {
5403 self.chunks.next().unwrap();
5404 }
5405
5406 Some(Chunk {
5407 text: slice,
5408 syntax_highlight_id: highlight_id,
5409 underline: self.underline,
5410 diagnostic_severity: self.current_diagnostic_severity(),
5411 is_unnecessary: self.current_code_is_unnecessary(),
5412 tabs,
5413 chars,
5414 ..Chunk::default()
5415 })
5416 } else {
5417 None
5418 }
5419 }
5420}
5421
5422impl operation_queue::Operation for Operation {
5423 fn lamport_timestamp(&self) -> clock::Lamport {
5424 match self {
5425 Operation::Buffer(_) => {
5426 unreachable!("buffer operations should never be deferred at this layer")
5427 }
5428 Operation::UpdateDiagnostics {
5429 lamport_timestamp, ..
5430 }
5431 | Operation::UpdateSelections {
5432 lamport_timestamp, ..
5433 }
5434 | Operation::UpdateCompletionTriggers {
5435 lamport_timestamp, ..
5436 }
5437 | Operation::UpdateLineEnding {
5438 lamport_timestamp, ..
5439 } => *lamport_timestamp,
5440 }
5441 }
5442}
5443
5444impl Default for Diagnostic {
5445 fn default() -> Self {
5446 Self {
5447 source: Default::default(),
5448 source_kind: DiagnosticSourceKind::Other,
5449 code: None,
5450 code_description: None,
5451 severity: DiagnosticSeverity::ERROR,
5452 message: Default::default(),
5453 markdown: None,
5454 group_id: 0,
5455 is_primary: false,
5456 is_disk_based: false,
5457 is_unnecessary: false,
5458 underline: true,
5459 data: None,
5460 registration_id: None,
5461 }
5462 }
5463}
5464
5465impl IndentSize {
5466 /// Returns an [`IndentSize`] representing the given spaces.
5467 pub fn spaces(len: u32) -> Self {
5468 Self {
5469 len,
5470 kind: IndentKind::Space,
5471 }
5472 }
5473
5474 /// Returns an [`IndentSize`] representing a tab.
5475 pub fn tab() -> Self {
5476 Self {
5477 len: 1,
5478 kind: IndentKind::Tab,
5479 }
5480 }
5481
5482 /// An iterator over the characters represented by this [`IndentSize`].
5483 pub fn chars(&self) -> impl Iterator<Item = char> {
5484 iter::repeat(self.char()).take(self.len as usize)
5485 }
5486
5487 /// The character representation of this [`IndentSize`].
5488 pub fn char(&self) -> char {
5489 match self.kind {
5490 IndentKind::Space => ' ',
5491 IndentKind::Tab => '\t',
5492 }
5493 }
5494
5495 /// Consumes the current [`IndentSize`] and returns a new one that has
5496 /// been shrunk or enlarged by the given size along the given direction.
5497 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5498 match direction {
5499 Ordering::Less => {
5500 if self.kind == size.kind && self.len >= size.len {
5501 self.len -= size.len;
5502 }
5503 }
5504 Ordering::Equal => {}
5505 Ordering::Greater => {
5506 if self.len == 0 {
5507 self = size;
5508 } else if self.kind == size.kind {
5509 self.len += size.len;
5510 }
5511 }
5512 }
5513 self
5514 }
5515
5516 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5517 match self.kind {
5518 IndentKind::Space => self.len as usize,
5519 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5520 }
5521 }
5522}
5523
5524#[cfg(any(test, feature = "test-support"))]
5525pub struct TestFile {
5526 pub path: Arc<RelPath>,
5527 pub root_name: String,
5528 pub local_root: Option<PathBuf>,
5529}
5530
5531#[cfg(any(test, feature = "test-support"))]
5532impl File for TestFile {
5533 fn path(&self) -> &Arc<RelPath> {
5534 &self.path
5535 }
5536
5537 fn full_path(&self, _: &gpui::App) -> PathBuf {
5538 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5539 }
5540
5541 fn as_local(&self) -> Option<&dyn LocalFile> {
5542 if self.local_root.is_some() {
5543 Some(self)
5544 } else {
5545 None
5546 }
5547 }
5548
5549 fn disk_state(&self) -> DiskState {
5550 unimplemented!()
5551 }
5552
5553 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5554 self.path().file_name().unwrap_or(self.root_name.as_ref())
5555 }
5556
5557 fn worktree_id(&self, _: &App) -> WorktreeId {
5558 WorktreeId::from_usize(0)
5559 }
5560
5561 fn to_proto(&self, _: &App) -> rpc::proto::File {
5562 unimplemented!()
5563 }
5564
5565 fn is_private(&self) -> bool {
5566 false
5567 }
5568
5569 fn path_style(&self, _cx: &App) -> PathStyle {
5570 PathStyle::local()
5571 }
5572}
5573
5574#[cfg(any(test, feature = "test-support"))]
5575impl LocalFile for TestFile {
5576 fn abs_path(&self, _cx: &App) -> PathBuf {
5577 PathBuf::from(self.local_root.as_ref().unwrap())
5578 .join(&self.root_name)
5579 .join(self.path.as_std_path())
5580 }
5581
5582 fn load(&self, _cx: &App) -> Task<Result<String>> {
5583 unimplemented!()
5584 }
5585
5586 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5587 unimplemented!()
5588 }
5589}
5590
5591pub(crate) fn contiguous_ranges(
5592 values: impl Iterator<Item = u32>,
5593 max_len: usize,
5594) -> impl Iterator<Item = Range<u32>> {
5595 let mut values = values;
5596 let mut current_range: Option<Range<u32>> = None;
5597 std::iter::from_fn(move || {
5598 loop {
5599 if let Some(value) = values.next() {
5600 if let Some(range) = &mut current_range
5601 && value == range.end
5602 && range.len() < max_len
5603 {
5604 range.end += 1;
5605 continue;
5606 }
5607
5608 let prev_range = current_range.clone();
5609 current_range = Some(value..(value + 1));
5610 if prev_range.is_some() {
5611 return prev_range;
5612 }
5613 } else {
5614 return current_range.take();
5615 }
5616 }
5617 })
5618}
5619
5620#[derive(Default, Debug)]
5621pub struct CharClassifier {
5622 scope: Option<LanguageScope>,
5623 scope_context: Option<CharScopeContext>,
5624 ignore_punctuation: bool,
5625}
5626
5627impl CharClassifier {
5628 pub fn new(scope: Option<LanguageScope>) -> Self {
5629 Self {
5630 scope,
5631 scope_context: None,
5632 ignore_punctuation: false,
5633 }
5634 }
5635
5636 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5637 Self {
5638 scope_context,
5639 ..self
5640 }
5641 }
5642
5643 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5644 Self {
5645 ignore_punctuation,
5646 ..self
5647 }
5648 }
5649
5650 pub fn is_whitespace(&self, c: char) -> bool {
5651 self.kind(c) == CharKind::Whitespace
5652 }
5653
5654 pub fn is_word(&self, c: char) -> bool {
5655 self.kind(c) == CharKind::Word
5656 }
5657
5658 pub fn is_punctuation(&self, c: char) -> bool {
5659 self.kind(c) == CharKind::Punctuation
5660 }
5661
5662 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5663 if c.is_alphanumeric() || c == '_' {
5664 return CharKind::Word;
5665 }
5666
5667 if let Some(scope) = &self.scope {
5668 let characters = match self.scope_context {
5669 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5670 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5671 None => scope.word_characters(),
5672 };
5673 if let Some(characters) = characters
5674 && characters.contains(&c)
5675 {
5676 return CharKind::Word;
5677 }
5678 }
5679
5680 if c.is_whitespace() {
5681 return CharKind::Whitespace;
5682 }
5683
5684 if ignore_punctuation {
5685 CharKind::Word
5686 } else {
5687 CharKind::Punctuation
5688 }
5689 }
5690
5691 pub fn kind(&self, c: char) -> CharKind {
5692 self.kind_with(c, self.ignore_punctuation)
5693 }
5694}
5695
5696/// Find all of the ranges of whitespace that occur at the ends of lines
5697/// in the given rope.
5698///
5699/// This could also be done with a regex search, but this implementation
5700/// avoids copying text.
5701pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5702 let mut ranges = Vec::new();
5703
5704 let mut offset = 0;
5705 let mut prev_chunk_trailing_whitespace_range = 0..0;
5706 for chunk in rope.chunks() {
5707 let mut prev_line_trailing_whitespace_range = 0..0;
5708 for (i, line) in chunk.split('\n').enumerate() {
5709 let line_end_offset = offset + line.len();
5710 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5711 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5712
5713 if i == 0 && trimmed_line_len == 0 {
5714 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5715 }
5716 if !prev_line_trailing_whitespace_range.is_empty() {
5717 ranges.push(prev_line_trailing_whitespace_range);
5718 }
5719
5720 offset = line_end_offset + 1;
5721 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5722 }
5723
5724 offset -= 1;
5725 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5726 }
5727
5728 if !prev_chunk_trailing_whitespace_range.is_empty() {
5729 ranges.push(prev_chunk_trailing_whitespace_range);
5730 }
5731
5732 ranges
5733}