1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a mutable replica, but toggled to read-only.
89 Read,
90 /// The buffer is a read-only replica.
91 ReadOnly,
92}
93
94impl Capability {
95 /// Returns `true` if the capability is `ReadWrite`.
96 pub fn editable(self) -> bool {
97 matches!(self, Capability::ReadWrite)
98 }
99}
100
101pub type BufferRow = u32;
102
103/// An in-memory representation of a source code file, including its text,
104/// syntax trees, git status, and diagnostics.
105pub struct Buffer {
106 text: TextBuffer,
107 branch_state: Option<BufferBranchState>,
108 /// Filesystem state, `None` when there is no path.
109 file: Option<Arc<dyn File>>,
110 /// The mtime of the file when this buffer was last loaded from
111 /// or saved to disk.
112 saved_mtime: Option<MTime>,
113 /// The version vector when this buffer was last loaded from
114 /// or saved to disk.
115 saved_version: clock::Global,
116 preview_version: clock::Global,
117 transaction_depth: usize,
118 was_dirty_before_starting_transaction: Option<bool>,
119 reload_task: Option<Task<Result<()>>>,
120 language: Option<Arc<Language>>,
121 autoindent_requests: Vec<Arc<AutoindentRequest>>,
122 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
123 pending_autoindent: Option<Task<()>>,
124 sync_parse_timeout: Duration,
125 syntax_map: Mutex<SyntaxMap>,
126 reparse: Option<Task<()>>,
127 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
128 non_text_state_update_count: usize,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 diagnostics_timestamp: clock::Lamport,
132 completion_triggers: BTreeSet<String>,
133 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
134 completion_triggers_timestamp: clock::Lamport,
135 deferred_ops: OperationQueue<Operation>,
136 capability: Capability,
137 has_conflict: bool,
138 /// Memoize calls to has_changes_since(saved_version).
139 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
140 has_unsaved_edits: Cell<(clock::Global, bool)>,
141 change_bits: Vec<rc::Weak<Cell<bool>>>,
142 _subscriptions: Vec<gpui::Subscription>,
143 tree_sitter_data: Arc<TreeSitterData>,
144 encoding: &'static Encoding,
145 has_bom: bool,
146}
147
148#[derive(Debug)]
149pub struct TreeSitterData {
150 chunks: RowChunks,
151 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
152}
153
154const MAX_ROWS_IN_A_CHUNK: u32 = 50;
155
156impl TreeSitterData {
157 fn clear(&mut self, snapshot: text::BufferSnapshot) {
158 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
159 self.brackets_by_chunks.get_mut().clear();
160 self.brackets_by_chunks
161 .get_mut()
162 .resize(self.chunks.len(), None);
163 }
164
165 fn new(snapshot: text::BufferSnapshot) -> Self {
166 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
167 Self {
168 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
169 chunks,
170 }
171 }
172
173 fn version(&self) -> &clock::Global {
174 self.chunks.version()
175 }
176}
177
178#[derive(Copy, Clone, Debug, PartialEq, Eq)]
179pub enum ParseStatus {
180 Idle,
181 Parsing,
182}
183
184struct BufferBranchState {
185 base_buffer: Entity<Buffer>,
186 merged_operations: Vec<Lamport>,
187}
188
189/// An immutable, cheaply cloneable representation of a fixed
190/// state of a buffer.
191pub struct BufferSnapshot {
192 pub text: text::BufferSnapshot,
193 pub syntax: SyntaxSnapshot,
194 file: Option<Arc<dyn File>>,
195 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
196 remote_selections: TreeMap<ReplicaId, SelectionSet>,
197 language: Option<Arc<Language>>,
198 non_text_state_update_count: usize,
199 tree_sitter_data: Arc<TreeSitterData>,
200 pub capability: Capability,
201}
202
203/// The kind and amount of indentation in a particular line. For now,
204/// assumes that indentation is all the same character.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub struct IndentSize {
207 /// The number of bytes that comprise the indentation.
208 pub len: u32,
209 /// The kind of whitespace used for indentation.
210 pub kind: IndentKind,
211}
212
213/// A whitespace character that's used for indentation.
214#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
215pub enum IndentKind {
216 /// An ASCII space character.
217 #[default]
218 Space,
219 /// An ASCII tab character.
220 Tab,
221}
222
223/// The shape of a selection cursor.
224#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
225pub enum CursorShape {
226 /// A vertical bar
227 #[default]
228 Bar,
229 /// A block that surrounds the following character
230 Block,
231 /// An underline that runs along the following character
232 Underline,
233 /// A box drawn around the following character
234 Hollow,
235}
236
237impl From<settings::CursorShape> for CursorShape {
238 fn from(shape: settings::CursorShape) -> Self {
239 match shape {
240 settings::CursorShape::Bar => CursorShape::Bar,
241 settings::CursorShape::Block => CursorShape::Block,
242 settings::CursorShape::Underline => CursorShape::Underline,
243 settings::CursorShape::Hollow => CursorShape::Hollow,
244 }
245 }
246}
247
248#[derive(Clone, Debug)]
249struct SelectionSet {
250 line_mode: bool,
251 cursor_shape: CursorShape,
252 selections: Arc<[Selection<Anchor>]>,
253 lamport_timestamp: clock::Lamport,
254}
255
256/// A diagnostic associated with a certain range of a buffer.
257#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
258pub struct Diagnostic {
259 /// The name of the service that produced this diagnostic.
260 pub source: Option<String>,
261 /// The ID provided by the dynamic registration that produced this diagnostic.
262 pub registration_id: Option<SharedString>,
263 /// A machine-readable code that identifies this diagnostic.
264 pub code: Option<NumberOrString>,
265 pub code_description: Option<lsp::Uri>,
266 /// Whether this diagnostic is a hint, warning, or error.
267 pub severity: DiagnosticSeverity,
268 /// The human-readable message associated with this diagnostic.
269 pub message: String,
270 /// The human-readable message (in markdown format)
271 pub markdown: Option<String>,
272 /// An id that identifies the group to which this diagnostic belongs.
273 ///
274 /// When a language server produces a diagnostic with
275 /// one or more associated diagnostics, those diagnostics are all
276 /// assigned a single group ID.
277 pub group_id: usize,
278 /// Whether this diagnostic is the primary diagnostic for its group.
279 ///
280 /// In a given group, the primary diagnostic is the top-level diagnostic
281 /// returned by the language server. The non-primary diagnostics are the
282 /// associated diagnostics.
283 pub is_primary: bool,
284 /// Whether this diagnostic is considered to originate from an analysis of
285 /// files on disk, as opposed to any unsaved buffer contents. This is a
286 /// property of a given diagnostic source, and is configured for a given
287 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
288 /// for the language server.
289 pub is_disk_based: bool,
290 /// Whether this diagnostic marks unnecessary code.
291 pub is_unnecessary: bool,
292 /// Quick separation of diagnostics groups based by their source.
293 pub source_kind: DiagnosticSourceKind,
294 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
295 pub data: Option<Value>,
296 /// Whether to underline the corresponding text range in the editor.
297 pub underline: bool,
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
301pub enum DiagnosticSourceKind {
302 Pulled,
303 Pushed,
304 Other,
305}
306
307/// An operation used to synchronize this buffer with its other replicas.
308#[derive(Clone, Debug, PartialEq)]
309pub enum Operation {
310 /// A text operation.
311 Buffer(text::Operation),
312
313 /// An update to the buffer's diagnostics.
314 UpdateDiagnostics {
315 /// The id of the language server that produced the new diagnostics.
316 server_id: LanguageServerId,
317 /// The diagnostics.
318 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 },
322
323 /// An update to the most recent selections in this buffer.
324 UpdateSelections {
325 /// The selections.
326 selections: Arc<[Selection<Anchor>]>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// Whether the selections are in 'line mode'.
330 line_mode: bool,
331 /// The [`CursorShape`] associated with these selections.
332 cursor_shape: CursorShape,
333 },
334
335 /// An update to the characters that should trigger autocompletion
336 /// for this buffer.
337 UpdateCompletionTriggers {
338 /// The characters that trigger autocompletion.
339 triggers: Vec<String>,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 /// The language server ID.
343 server_id: LanguageServerId,
344 },
345
346 /// An update to the line ending type of this buffer.
347 UpdateLineEnding {
348 /// The line ending type.
349 line_ending: LineEnding,
350 /// The buffer's lamport timestamp.
351 lamport_timestamp: clock::Lamport,
352 },
353}
354
355/// An event that occurs in a buffer.
356#[derive(Clone, Debug, PartialEq)]
357pub enum BufferEvent {
358 /// The buffer was changed in a way that must be
359 /// propagated to its other replicas.
360 Operation {
361 operation: Operation,
362 is_local: bool,
363 },
364 /// The buffer was edited.
365 Edited,
366 /// The buffer's `dirty` bit changed.
367 DirtyChanged,
368 /// The buffer was saved.
369 Saved,
370 /// The buffer's file was changed on disk.
371 FileHandleChanged,
372 /// The buffer was reloaded.
373 Reloaded,
374 /// The buffer is in need of a reload
375 ReloadNeeded,
376 /// The buffer's language was changed.
377 /// The boolean indicates whether this buffer did not have a language before, but does now.
378 LanguageChanged(bool),
379 /// The buffer's syntax trees were updated.
380 Reparsed,
381 /// The buffer's diagnostics were updated.
382 DiagnosticsUpdated,
383 /// The buffer gained or lost editing capabilities.
384 CapabilityChanged,
385}
386
387/// The file associated with a buffer.
388pub trait File: Send + Sync + Any {
389 /// Returns the [`LocalFile`] associated with this file, if the
390 /// file is local.
391 fn as_local(&self) -> Option<&dyn LocalFile>;
392
393 /// Returns whether this file is local.
394 fn is_local(&self) -> bool {
395 self.as_local().is_some()
396 }
397
398 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
399 /// only available in some states, such as modification time.
400 fn disk_state(&self) -> DiskState;
401
402 /// Returns the path of this file relative to the worktree's root directory.
403 fn path(&self) -> &Arc<RelPath>;
404
405 /// Returns the path of this file relative to the worktree's parent directory (this means it
406 /// includes the name of the worktree's root folder).
407 fn full_path(&self, cx: &App) -> PathBuf;
408
409 /// Returns the path style of this file.
410 fn path_style(&self, cx: &App) -> PathStyle;
411
412 /// Returns the last component of this handle's absolute path. If this handle refers to the root
413 /// of its worktree, then this method will return the name of the worktree itself.
414 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
415
416 /// Returns the id of the worktree to which this file belongs.
417 ///
418 /// This is needed for looking up project-specific settings.
419 fn worktree_id(&self, cx: &App) -> WorktreeId;
420
421 /// Converts this file into a protobuf message.
422 fn to_proto(&self, cx: &App) -> rpc::proto::File;
423
424 /// Return whether Zed considers this to be a private file.
425 fn is_private(&self) -> bool;
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440}
441
442impl DiskState {
443 /// Returns the file's last known modification time on disk.
444 pub fn mtime(self) -> Option<MTime> {
445 match self {
446 DiskState::New => None,
447 DiskState::Present { mtime } => Some(mtime),
448 DiskState::Deleted => None,
449 }
450 }
451
452 pub fn exists(&self) -> bool {
453 match self {
454 DiskState::New => false,
455 DiskState::Present { .. } => true,
456 DiskState::Deleted => false,
457 }
458 }
459}
460
461/// The file associated with a buffer, in the case where the file is on the local disk.
462pub trait LocalFile: File {
463 /// Returns the absolute path of this file
464 fn abs_path(&self, cx: &App) -> PathBuf;
465
466 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
467 fn load(&self, cx: &App) -> Task<Result<String>>;
468
469 /// Loads the file's contents from disk.
470 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
471}
472
473/// The auto-indent behavior associated with an editing operation.
474/// For some editing operations, each affected line of text has its
475/// indentation recomputed. For other operations, the entire block
476/// of edited text is adjusted uniformly.
477#[derive(Clone, Debug)]
478pub enum AutoindentMode {
479 /// Indent each line of inserted text.
480 EachLine,
481 /// Apply the same indentation adjustment to all of the lines
482 /// in a given insertion.
483 Block {
484 /// The original indentation column of the first line of each
485 /// insertion, if it has been copied.
486 ///
487 /// Knowing this makes it possible to preserve the relative indentation
488 /// of every line in the insertion from when it was copied.
489 ///
490 /// If the original indent column is `a`, and the first line of insertion
491 /// is then auto-indented to column `b`, then every other line of
492 /// the insertion will be auto-indented to column `b - a`
493 original_indent_columns: Vec<Option<u32>>,
494 },
495}
496
497#[derive(Clone)]
498struct AutoindentRequest {
499 before_edit: BufferSnapshot,
500 entries: Vec<AutoindentRequestEntry>,
501 is_block_mode: bool,
502 ignore_empty_lines: bool,
503}
504
505#[derive(Debug, Clone)]
506struct AutoindentRequestEntry {
507 /// A range of the buffer whose indentation should be adjusted.
508 range: Range<Anchor>,
509 /// Whether or not these lines should be considered brand new, for the
510 /// purpose of auto-indent. When text is not new, its indentation will
511 /// only be adjusted if the suggested indentation level has *changed*
512 /// since the edit was made.
513 first_line_is_new: bool,
514 indent_size: IndentSize,
515 original_indent_column: Option<u32>,
516}
517
518#[derive(Debug)]
519struct IndentSuggestion {
520 basis_row: u32,
521 delta: Ordering,
522 within_error: bool,
523}
524
525struct BufferChunkHighlights<'a> {
526 captures: SyntaxMapCaptures<'a>,
527 next_capture: Option<SyntaxMapCapture<'a>>,
528 stack: Vec<(usize, HighlightId)>,
529 highlight_maps: Vec<HighlightMap>,
530}
531
532/// An iterator that yields chunks of a buffer's text, along with their
533/// syntax highlights and diagnostic status.
534pub struct BufferChunks<'a> {
535 buffer_snapshot: Option<&'a BufferSnapshot>,
536 range: Range<usize>,
537 chunks: text::Chunks<'a>,
538 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
539 error_depth: usize,
540 warning_depth: usize,
541 information_depth: usize,
542 hint_depth: usize,
543 unnecessary_depth: usize,
544 underline: bool,
545 highlights: Option<BufferChunkHighlights<'a>>,
546}
547
548/// A chunk of a buffer's text, along with its syntax highlight and
549/// diagnostic status.
550#[derive(Clone, Debug, Default)]
551pub struct Chunk<'a> {
552 /// The text of the chunk.
553 pub text: &'a str,
554 /// The syntax highlighting style of the chunk.
555 pub syntax_highlight_id: Option<HighlightId>,
556 /// The highlight style that has been applied to this chunk in
557 /// the editor.
558 pub highlight_style: Option<HighlightStyle>,
559 /// The severity of diagnostic associated with this chunk, if any.
560 pub diagnostic_severity: Option<DiagnosticSeverity>,
561 /// A bitset of which characters are tabs in this string.
562 pub tabs: u128,
563 /// Bitmap of character indices in this chunk
564 pub chars: u128,
565 /// Whether this chunk of text is marked as unnecessary.
566 pub is_unnecessary: bool,
567 /// Whether this chunk of text was originally a tab character.
568 pub is_tab: bool,
569 /// Whether this chunk of text was originally an inlay.
570 pub is_inlay: bool,
571 /// Whether to underline the corresponding text range in the editor.
572 pub underline: bool,
573}
574
575/// A set of edits to a given version of a buffer, computed asynchronously.
576#[derive(Debug)]
577pub struct Diff {
578 pub base_version: clock::Global,
579 pub line_ending: LineEnding,
580 pub edits: Vec<(Range<usize>, Arc<str>)>,
581}
582
583#[derive(Debug, Clone, Copy)]
584pub(crate) struct DiagnosticEndpoint {
585 offset: usize,
586 is_start: bool,
587 underline: bool,
588 severity: DiagnosticSeverity,
589 is_unnecessary: bool,
590}
591
592/// A class of characters, used for characterizing a run of text.
593#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
594pub enum CharKind {
595 /// Whitespace.
596 Whitespace,
597 /// Punctuation.
598 Punctuation,
599 /// Word.
600 Word,
601}
602
603/// Context for character classification within a specific scope.
604#[derive(Copy, Clone, Eq, PartialEq, Debug)]
605pub enum CharScopeContext {
606 /// Character classification for completion queries.
607 ///
608 /// This context treats certain characters as word constituents that would
609 /// normally be considered punctuation, such as '-' in Tailwind classes
610 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
611 Completion,
612 /// Character classification for linked edits.
613 ///
614 /// This context handles characters that should be treated as part of
615 /// identifiers during linked editing operations, such as '.' in JSX
616 /// component names like `<Animated.View>`.
617 LinkedEdit,
618}
619
620/// A runnable is a set of data about a region that could be resolved into a task
621pub struct Runnable {
622 pub tags: SmallVec<[RunnableTag; 1]>,
623 pub language: Arc<Language>,
624 pub buffer: BufferId,
625}
626
627#[derive(Default, Clone, Debug)]
628pub struct HighlightedText {
629 pub text: SharedString,
630 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
631}
632
633#[derive(Default, Debug)]
634struct HighlightedTextBuilder {
635 pub text: String,
636 highlights: Vec<(Range<usize>, HighlightStyle)>,
637}
638
639impl HighlightedText {
640 pub fn from_buffer_range<T: ToOffset>(
641 range: Range<T>,
642 snapshot: &text::BufferSnapshot,
643 syntax_snapshot: &SyntaxSnapshot,
644 override_style: Option<HighlightStyle>,
645 syntax_theme: &SyntaxTheme,
646 ) -> Self {
647 let mut highlighted_text = HighlightedTextBuilder::default();
648 highlighted_text.add_text_from_buffer_range(
649 range,
650 snapshot,
651 syntax_snapshot,
652 override_style,
653 syntax_theme,
654 );
655 highlighted_text.build()
656 }
657
658 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
659 gpui::StyledText::new(self.text.clone())
660 .with_default_highlights(default_style, self.highlights.iter().cloned())
661 }
662
663 /// Returns the first line without leading whitespace unless highlighted
664 /// and a boolean indicating if there are more lines after
665 pub fn first_line_preview(self) -> (Self, bool) {
666 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
667 let first_line = &self.text[..newline_ix];
668
669 // Trim leading whitespace, unless an edit starts prior to it.
670 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
671 if let Some((first_highlight_range, _)) = self.highlights.first() {
672 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
673 }
674
675 let preview_text = &first_line[preview_start_ix..];
676 let preview_highlights = self
677 .highlights
678 .into_iter()
679 .skip_while(|(range, _)| range.end <= preview_start_ix)
680 .take_while(|(range, _)| range.start < newline_ix)
681 .filter_map(|(mut range, highlight)| {
682 range.start = range.start.saturating_sub(preview_start_ix);
683 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
684 if range.is_empty() {
685 None
686 } else {
687 Some((range, highlight))
688 }
689 });
690
691 let preview = Self {
692 text: SharedString::new(preview_text),
693 highlights: preview_highlights.collect(),
694 };
695
696 (preview, self.text.len() > newline_ix)
697 }
698}
699
700impl HighlightedTextBuilder {
701 pub fn build(self) -> HighlightedText {
702 HighlightedText {
703 text: self.text.into(),
704 highlights: self.highlights,
705 }
706 }
707
708 pub fn add_text_from_buffer_range<T: ToOffset>(
709 &mut self,
710 range: Range<T>,
711 snapshot: &text::BufferSnapshot,
712 syntax_snapshot: &SyntaxSnapshot,
713 override_style: Option<HighlightStyle>,
714 syntax_theme: &SyntaxTheme,
715 ) {
716 let range = range.to_offset(snapshot);
717 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
718 let start = self.text.len();
719 self.text.push_str(chunk.text);
720 let end = self.text.len();
721
722 if let Some(highlight_style) = chunk
723 .syntax_highlight_id
724 .and_then(|id| id.style(syntax_theme))
725 {
726 let highlight_style = override_style.map_or(highlight_style, |override_style| {
727 highlight_style.highlight(override_style)
728 });
729 self.highlights.push((start..end, highlight_style));
730 } else if let Some(override_style) = override_style {
731 self.highlights.push((start..end, override_style));
732 }
733 }
734 }
735
736 fn highlighted_chunks<'a>(
737 range: Range<usize>,
738 snapshot: &'a text::BufferSnapshot,
739 syntax_snapshot: &'a SyntaxSnapshot,
740 ) -> BufferChunks<'a> {
741 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
742 grammar
743 .highlights_config
744 .as_ref()
745 .map(|config| &config.query)
746 });
747
748 let highlight_maps = captures
749 .grammars()
750 .iter()
751 .map(|grammar| grammar.highlight_map())
752 .collect();
753
754 BufferChunks::new(
755 snapshot.as_rope(),
756 range,
757 Some((captures, highlight_maps)),
758 false,
759 None,
760 )
761 }
762}
763
764#[derive(Clone)]
765pub struct EditPreview {
766 old_snapshot: text::BufferSnapshot,
767 applied_edits_snapshot: text::BufferSnapshot,
768 syntax_snapshot: SyntaxSnapshot,
769}
770
771impl EditPreview {
772 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
773 let (first, _) = edits.first()?;
774 let (last, _) = edits.last()?;
775
776 let start = first.start.to_point(&self.old_snapshot);
777 let old_end = last.end.to_point(&self.old_snapshot);
778 let new_end = last
779 .end
780 .bias_right(&self.old_snapshot)
781 .to_point(&self.applied_edits_snapshot);
782
783 let start = Point::new(start.row.saturating_sub(3), 0);
784 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
785 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
786
787 Some(unified_diff(
788 &self
789 .old_snapshot
790 .text_for_range(start..old_end)
791 .collect::<String>(),
792 &self
793 .applied_edits_snapshot
794 .text_for_range(start..new_end)
795 .collect::<String>(),
796 ))
797 }
798
799 pub fn highlight_edits(
800 &self,
801 current_snapshot: &BufferSnapshot,
802 edits: &[(Range<Anchor>, impl AsRef<str>)],
803 include_deletions: bool,
804 cx: &App,
805 ) -> HighlightedText {
806 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
807 return HighlightedText::default();
808 };
809
810 let mut highlighted_text = HighlightedTextBuilder::default();
811
812 let visible_range_in_preview_snapshot =
813 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
814 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
815
816 let insertion_highlight_style = HighlightStyle {
817 background_color: Some(cx.theme().status().created_background),
818 ..Default::default()
819 };
820 let deletion_highlight_style = HighlightStyle {
821 background_color: Some(cx.theme().status().deleted_background),
822 ..Default::default()
823 };
824 let syntax_theme = cx.theme().syntax();
825
826 for (range, edit_text) in edits {
827 let edit_new_end_in_preview_snapshot = range
828 .end
829 .bias_right(&self.old_snapshot)
830 .to_offset(&self.applied_edits_snapshot);
831 let edit_start_in_preview_snapshot =
832 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
833
834 let unchanged_range_in_preview_snapshot =
835 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
836 if !unchanged_range_in_preview_snapshot.is_empty() {
837 highlighted_text.add_text_from_buffer_range(
838 unchanged_range_in_preview_snapshot,
839 &self.applied_edits_snapshot,
840 &self.syntax_snapshot,
841 None,
842 syntax_theme,
843 );
844 }
845
846 let range_in_current_snapshot = range.to_offset(current_snapshot);
847 if include_deletions && !range_in_current_snapshot.is_empty() {
848 highlighted_text.add_text_from_buffer_range(
849 range_in_current_snapshot,
850 ¤t_snapshot.text,
851 ¤t_snapshot.syntax,
852 Some(deletion_highlight_style),
853 syntax_theme,
854 );
855 }
856
857 if !edit_text.as_ref().is_empty() {
858 highlighted_text.add_text_from_buffer_range(
859 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
860 &self.applied_edits_snapshot,
861 &self.syntax_snapshot,
862 Some(insertion_highlight_style),
863 syntax_theme,
864 );
865 }
866
867 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
868 }
869
870 highlighted_text.add_text_from_buffer_range(
871 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
872 &self.applied_edits_snapshot,
873 &self.syntax_snapshot,
874 None,
875 syntax_theme,
876 );
877
878 highlighted_text.build()
879 }
880
881 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
882 cx.new(|cx| {
883 let mut buffer = Buffer::local_normalized(
884 self.applied_edits_snapshot.as_rope().clone(),
885 self.applied_edits_snapshot.line_ending(),
886 cx,
887 );
888 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
889 buffer
890 })
891 }
892
893 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
894 let (first, _) = edits.first()?;
895 let (last, _) = edits.last()?;
896
897 let start = first
898 .start
899 .bias_left(&self.old_snapshot)
900 .to_point(&self.applied_edits_snapshot);
901 let end = last
902 .end
903 .bias_right(&self.old_snapshot)
904 .to_point(&self.applied_edits_snapshot);
905
906 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
907 let range = Point::new(start.row, 0)
908 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
909
910 Some(range)
911 }
912}
913
914#[derive(Clone, Debug, PartialEq, Eq)]
915pub struct BracketMatch<T> {
916 pub open_range: Range<T>,
917 pub close_range: Range<T>,
918 pub newline_only: bool,
919 pub syntax_layer_depth: usize,
920 pub color_index: Option<usize>,
921}
922
923impl<T> BracketMatch<T> {
924 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
925 (self.open_range, self.close_range)
926 }
927}
928
929impl Buffer {
930 /// Create a new buffer with the given base text.
931 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
932 Self::build(
933 TextBuffer::new(
934 ReplicaId::LOCAL,
935 cx.entity_id().as_non_zero_u64().into(),
936 base_text.into(),
937 ),
938 None,
939 Capability::ReadWrite,
940 )
941 }
942
943 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
944 pub fn local_normalized(
945 base_text_normalized: Rope,
946 line_ending: LineEnding,
947 cx: &Context<Self>,
948 ) -> Self {
949 Self::build(
950 TextBuffer::new_normalized(
951 ReplicaId::LOCAL,
952 cx.entity_id().as_non_zero_u64().into(),
953 line_ending,
954 base_text_normalized,
955 ),
956 None,
957 Capability::ReadWrite,
958 )
959 }
960
961 /// Create a new buffer that is a replica of a remote buffer.
962 pub fn remote(
963 remote_id: BufferId,
964 replica_id: ReplicaId,
965 capability: Capability,
966 base_text: impl Into<String>,
967 ) -> Self {
968 Self::build(
969 TextBuffer::new(replica_id, remote_id, base_text.into()),
970 None,
971 capability,
972 )
973 }
974
975 /// Create a new buffer that is a replica of a remote buffer, populating its
976 /// state from the given protobuf message.
977 pub fn from_proto(
978 replica_id: ReplicaId,
979 capability: Capability,
980 message: proto::BufferState,
981 file: Option<Arc<dyn File>>,
982 ) -> Result<Self> {
983 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
984 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
985 let mut this = Self::build(buffer, file, capability);
986 this.text.set_line_ending(proto::deserialize_line_ending(
987 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
988 ));
989 this.saved_version = proto::deserialize_version(&message.saved_version);
990 this.saved_mtime = message.saved_mtime.map(|time| time.into());
991 Ok(this)
992 }
993
994 /// Serialize the buffer's state to a protobuf message.
995 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
996 proto::BufferState {
997 id: self.remote_id().into(),
998 file: self.file.as_ref().map(|f| f.to_proto(cx)),
999 base_text: self.base_text().to_string(),
1000 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1001 saved_version: proto::serialize_version(&self.saved_version),
1002 saved_mtime: self.saved_mtime.map(|time| time.into()),
1003 }
1004 }
1005
1006 /// Serialize as protobufs all of the changes to the buffer since the given version.
1007 pub fn serialize_ops(
1008 &self,
1009 since: Option<clock::Global>,
1010 cx: &App,
1011 ) -> Task<Vec<proto::Operation>> {
1012 let mut operations = Vec::new();
1013 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1014
1015 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1016 proto::serialize_operation(&Operation::UpdateSelections {
1017 selections: set.selections.clone(),
1018 lamport_timestamp: set.lamport_timestamp,
1019 line_mode: set.line_mode,
1020 cursor_shape: set.cursor_shape,
1021 })
1022 }));
1023
1024 for (server_id, diagnostics) in &self.diagnostics {
1025 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1026 lamport_timestamp: self.diagnostics_timestamp,
1027 server_id: *server_id,
1028 diagnostics: diagnostics.iter().cloned().collect(),
1029 }));
1030 }
1031
1032 for (server_id, completions) in &self.completion_triggers_per_language_server {
1033 operations.push(proto::serialize_operation(
1034 &Operation::UpdateCompletionTriggers {
1035 triggers: completions.iter().cloned().collect(),
1036 lamport_timestamp: self.completion_triggers_timestamp,
1037 server_id: *server_id,
1038 },
1039 ));
1040 }
1041
1042 let text_operations = self.text.operations().clone();
1043 cx.background_spawn(async move {
1044 let since = since.unwrap_or_default();
1045 operations.extend(
1046 text_operations
1047 .iter()
1048 .filter(|(_, op)| !since.observed(op.timestamp()))
1049 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1050 );
1051 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1052 operations
1053 })
1054 }
1055
1056 /// Assign a language to the buffer, returning the buffer.
1057 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1058 self.set_language_async(Some(language), cx);
1059 self
1060 }
1061
1062 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1063 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1064 self.set_language(Some(language), cx);
1065 self
1066 }
1067
1068 /// Returns the [`Capability`] of this buffer.
1069 pub fn capability(&self) -> Capability {
1070 self.capability
1071 }
1072
1073 /// Whether this buffer can only be read.
1074 pub fn read_only(&self) -> bool {
1075 !self.capability.editable()
1076 }
1077
1078 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1079 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1080 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1081 let snapshot = buffer.snapshot();
1082 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1083 let tree_sitter_data = TreeSitterData::new(snapshot);
1084 Self {
1085 saved_mtime,
1086 tree_sitter_data: Arc::new(tree_sitter_data),
1087 saved_version: buffer.version(),
1088 preview_version: buffer.version(),
1089 reload_task: None,
1090 transaction_depth: 0,
1091 was_dirty_before_starting_transaction: None,
1092 has_unsaved_edits: Cell::new((buffer.version(), false)),
1093 text: buffer,
1094 branch_state: None,
1095 file,
1096 capability,
1097 syntax_map,
1098 reparse: None,
1099 non_text_state_update_count: 0,
1100 sync_parse_timeout: Duration::from_millis(1),
1101 parse_status: watch::channel(ParseStatus::Idle),
1102 autoindent_requests: Default::default(),
1103 wait_for_autoindent_txs: Default::default(),
1104 pending_autoindent: Default::default(),
1105 language: None,
1106 remote_selections: Default::default(),
1107 diagnostics: Default::default(),
1108 diagnostics_timestamp: Lamport::MIN,
1109 completion_triggers: Default::default(),
1110 completion_triggers_per_language_server: Default::default(),
1111 completion_triggers_timestamp: Lamport::MIN,
1112 deferred_ops: OperationQueue::new(),
1113 has_conflict: false,
1114 change_bits: Default::default(),
1115 _subscriptions: Vec::new(),
1116 encoding: encoding_rs::UTF_8,
1117 has_bom: false,
1118 }
1119 }
1120
1121 pub fn build_snapshot(
1122 text: Rope,
1123 language: Option<Arc<Language>>,
1124 language_registry: Option<Arc<LanguageRegistry>>,
1125 cx: &mut App,
1126 ) -> impl Future<Output = BufferSnapshot> + use<> {
1127 let entity_id = cx.reserve_entity::<Self>().entity_id();
1128 let buffer_id = entity_id.as_non_zero_u64().into();
1129 async move {
1130 let text =
1131 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1132 .snapshot();
1133 let mut syntax = SyntaxMap::new(&text).snapshot();
1134 if let Some(language) = language.clone() {
1135 let language_registry = language_registry.clone();
1136 syntax.reparse(&text, language_registry, language);
1137 }
1138 let tree_sitter_data = TreeSitterData::new(text.clone());
1139 BufferSnapshot {
1140 text,
1141 syntax,
1142 file: None,
1143 diagnostics: Default::default(),
1144 remote_selections: Default::default(),
1145 tree_sitter_data: Arc::new(tree_sitter_data),
1146 language,
1147 non_text_state_update_count: 0,
1148 capability: Capability::ReadOnly,
1149 }
1150 }
1151 }
1152
1153 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1154 let entity_id = cx.reserve_entity::<Self>().entity_id();
1155 let buffer_id = entity_id.as_non_zero_u64().into();
1156 let text = TextBuffer::new_normalized(
1157 ReplicaId::LOCAL,
1158 buffer_id,
1159 Default::default(),
1160 Rope::new(),
1161 )
1162 .snapshot();
1163 let syntax = SyntaxMap::new(&text).snapshot();
1164 let tree_sitter_data = TreeSitterData::new(text.clone());
1165 BufferSnapshot {
1166 text,
1167 syntax,
1168 tree_sitter_data: Arc::new(tree_sitter_data),
1169 file: None,
1170 diagnostics: Default::default(),
1171 remote_selections: Default::default(),
1172 language: None,
1173 non_text_state_update_count: 0,
1174 capability: Capability::ReadOnly,
1175 }
1176 }
1177
1178 #[cfg(any(test, feature = "test-support"))]
1179 pub fn build_snapshot_sync(
1180 text: Rope,
1181 language: Option<Arc<Language>>,
1182 language_registry: Option<Arc<LanguageRegistry>>,
1183 cx: &mut App,
1184 ) -> BufferSnapshot {
1185 let entity_id = cx.reserve_entity::<Self>().entity_id();
1186 let buffer_id = entity_id.as_non_zero_u64().into();
1187 let text =
1188 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1189 .snapshot();
1190 let mut syntax = SyntaxMap::new(&text).snapshot();
1191 if let Some(language) = language.clone() {
1192 syntax.reparse(&text, language_registry, language);
1193 }
1194 let tree_sitter_data = TreeSitterData::new(text.clone());
1195 BufferSnapshot {
1196 text,
1197 syntax,
1198 tree_sitter_data: Arc::new(tree_sitter_data),
1199 file: None,
1200 diagnostics: Default::default(),
1201 remote_selections: Default::default(),
1202 language,
1203 non_text_state_update_count: 0,
1204 capability: Capability::ReadOnly,
1205 }
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's current state. This is computationally
1209 /// cheap, and allows reading from the buffer on a background thread.
1210 pub fn snapshot(&self) -> BufferSnapshot {
1211 let text = self.text.snapshot();
1212 let mut syntax_map = self.syntax_map.lock();
1213 syntax_map.interpolate(&text);
1214 let syntax = syntax_map.snapshot();
1215
1216 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1217 Arc::new(TreeSitterData::new(text.clone()))
1218 } else {
1219 self.tree_sitter_data.clone()
1220 };
1221
1222 BufferSnapshot {
1223 text,
1224 syntax,
1225 tree_sitter_data,
1226 file: self.file.clone(),
1227 remote_selections: self.remote_selections.clone(),
1228 diagnostics: self.diagnostics.clone(),
1229 language: self.language.clone(),
1230 non_text_state_update_count: self.non_text_state_update_count,
1231 capability: self.capability,
1232 }
1233 }
1234
1235 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1236 let this = cx.entity();
1237 cx.new(|cx| {
1238 let mut branch = Self {
1239 branch_state: Some(BufferBranchState {
1240 base_buffer: this.clone(),
1241 merged_operations: Default::default(),
1242 }),
1243 language: self.language.clone(),
1244 has_conflict: self.has_conflict,
1245 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1246 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1247 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1248 };
1249 if let Some(language_registry) = self.language_registry() {
1250 branch.set_language_registry(language_registry);
1251 }
1252
1253 // Reparse the branch buffer so that we get syntax highlighting immediately.
1254 branch.reparse(cx, true);
1255
1256 branch
1257 })
1258 }
1259
1260 pub fn preview_edits(
1261 &self,
1262 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1263 cx: &App,
1264 ) -> Task<EditPreview> {
1265 let registry = self.language_registry();
1266 let language = self.language().cloned();
1267 let old_snapshot = self.text.snapshot();
1268 let mut branch_buffer = self.text.branch();
1269 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1270 cx.background_spawn(async move {
1271 if !edits.is_empty() {
1272 if let Some(language) = language.clone() {
1273 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1274 }
1275
1276 branch_buffer.edit(edits.iter().cloned());
1277 let snapshot = branch_buffer.snapshot();
1278 syntax_snapshot.interpolate(&snapshot);
1279
1280 if let Some(language) = language {
1281 syntax_snapshot.reparse(&snapshot, registry, language);
1282 }
1283 }
1284 EditPreview {
1285 old_snapshot,
1286 applied_edits_snapshot: branch_buffer.snapshot(),
1287 syntax_snapshot,
1288 }
1289 })
1290 }
1291
1292 /// Applies all of the changes in this buffer that intersect any of the
1293 /// given `ranges` to its base buffer.
1294 ///
1295 /// If `ranges` is empty, then all changes will be applied. This buffer must
1296 /// be a branch buffer to call this method.
1297 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1298 let Some(base_buffer) = self.base_buffer() else {
1299 debug_panic!("not a branch buffer");
1300 return;
1301 };
1302
1303 let mut ranges = if ranges.is_empty() {
1304 &[0..usize::MAX]
1305 } else {
1306 ranges.as_slice()
1307 }
1308 .iter()
1309 .peekable();
1310
1311 let mut edits = Vec::new();
1312 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1313 let mut is_included = false;
1314 while let Some(range) = ranges.peek() {
1315 if range.end < edit.new.start {
1316 ranges.next().unwrap();
1317 } else {
1318 if range.start <= edit.new.end {
1319 is_included = true;
1320 }
1321 break;
1322 }
1323 }
1324
1325 if is_included {
1326 edits.push((
1327 edit.old.clone(),
1328 self.text_for_range(edit.new.clone()).collect::<String>(),
1329 ));
1330 }
1331 }
1332
1333 let operation = base_buffer.update(cx, |base_buffer, cx| {
1334 // cx.emit(BufferEvent::DiffBaseChanged);
1335 base_buffer.edit(edits, None, cx)
1336 });
1337
1338 if let Some(operation) = operation
1339 && let Some(BufferBranchState {
1340 merged_operations, ..
1341 }) = &mut self.branch_state
1342 {
1343 merged_operations.push(operation);
1344 }
1345 }
1346
1347 fn on_base_buffer_event(
1348 &mut self,
1349 _: Entity<Buffer>,
1350 event: &BufferEvent,
1351 cx: &mut Context<Self>,
1352 ) {
1353 let BufferEvent::Operation { operation, .. } = event else {
1354 return;
1355 };
1356 let Some(BufferBranchState {
1357 merged_operations, ..
1358 }) = &mut self.branch_state
1359 else {
1360 return;
1361 };
1362
1363 let mut operation_to_undo = None;
1364 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1365 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1366 {
1367 merged_operations.remove(ix);
1368 operation_to_undo = Some(operation.timestamp);
1369 }
1370
1371 self.apply_ops([operation.clone()], cx);
1372
1373 if let Some(timestamp) = operation_to_undo {
1374 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1375 self.undo_operations(counts, cx);
1376 }
1377 }
1378
1379 #[cfg(test)]
1380 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1381 &self.text
1382 }
1383
1384 /// Retrieve a snapshot of the buffer's raw text, without any
1385 /// language-related state like the syntax tree or diagnostics.
1386 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1387 self.text.snapshot()
1388 }
1389
1390 /// The file associated with the buffer, if any.
1391 pub fn file(&self) -> Option<&Arc<dyn File>> {
1392 self.file.as_ref()
1393 }
1394
1395 /// The version of the buffer that was last saved or reloaded from disk.
1396 pub fn saved_version(&self) -> &clock::Global {
1397 &self.saved_version
1398 }
1399
1400 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1401 pub fn saved_mtime(&self) -> Option<MTime> {
1402 self.saved_mtime
1403 }
1404
1405 /// Returns the character encoding of the buffer's file.
1406 pub fn encoding(&self) -> &'static Encoding {
1407 self.encoding
1408 }
1409
1410 /// Sets the character encoding of the buffer.
1411 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1412 self.encoding = encoding;
1413 }
1414
1415 /// Returns whether the buffer has a Byte Order Mark.
1416 pub fn has_bom(&self) -> bool {
1417 self.has_bom
1418 }
1419
1420 /// Sets whether the buffer has a Byte Order Mark.
1421 pub fn set_has_bom(&mut self, has_bom: bool) {
1422 self.has_bom = has_bom;
1423 }
1424
1425 /// Assign a language to the buffer.
1426 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1427 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1428 }
1429
1430 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1431 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1432 self.set_language_(language, true, cx);
1433 }
1434
1435 fn set_language_(
1436 &mut self,
1437 language: Option<Arc<Language>>,
1438 may_block: bool,
1439 cx: &mut Context<Self>,
1440 ) {
1441 self.non_text_state_update_count += 1;
1442 self.syntax_map.lock().clear(&self.text);
1443 let old_language = std::mem::replace(&mut self.language, language);
1444 self.was_changed();
1445 self.reparse(cx, may_block);
1446 let has_fresh_language =
1447 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1448 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1449 }
1450
1451 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1452 /// other languages if parts of the buffer are written in different languages.
1453 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1454 self.syntax_map
1455 .lock()
1456 .set_language_registry(language_registry);
1457 }
1458
1459 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1460 self.syntax_map.lock().language_registry()
1461 }
1462
1463 /// Assign the line ending type to the buffer.
1464 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1465 self.text.set_line_ending(line_ending);
1466
1467 let lamport_timestamp = self.text.lamport_clock.tick();
1468 self.send_operation(
1469 Operation::UpdateLineEnding {
1470 line_ending,
1471 lamport_timestamp,
1472 },
1473 true,
1474 cx,
1475 );
1476 }
1477
1478 /// Assign the buffer a new [`Capability`].
1479 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1480 if self.capability != capability {
1481 self.capability = capability;
1482 cx.emit(BufferEvent::CapabilityChanged)
1483 }
1484 }
1485
1486 /// This method is called to signal that the buffer has been saved.
1487 pub fn did_save(
1488 &mut self,
1489 version: clock::Global,
1490 mtime: Option<MTime>,
1491 cx: &mut Context<Self>,
1492 ) {
1493 self.saved_version = version.clone();
1494 self.has_unsaved_edits.set((version, false));
1495 self.has_conflict = false;
1496 self.saved_mtime = mtime;
1497 self.was_changed();
1498 cx.emit(BufferEvent::Saved);
1499 cx.notify();
1500 }
1501
1502 /// Reloads the contents of the buffer from disk.
1503 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1504 let (tx, rx) = futures::channel::oneshot::channel();
1505 let prev_version = self.text.version();
1506 self.reload_task = Some(cx.spawn(async move |this, cx| {
1507 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1508 let file = this.file.as_ref()?.as_local()?;
1509
1510 Some((file.disk_state().mtime(), file.load(cx)))
1511 })?
1512 else {
1513 return Ok(());
1514 };
1515
1516 let new_text = new_text.await?;
1517 let diff = this
1518 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1519 .await;
1520 this.update(cx, |this, cx| {
1521 if this.version() == diff.base_version {
1522 this.finalize_last_transaction();
1523 this.apply_diff(diff, cx);
1524 tx.send(this.finalize_last_transaction().cloned()).ok();
1525 this.has_conflict = false;
1526 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1527 } else {
1528 if !diff.edits.is_empty()
1529 || this
1530 .edits_since::<usize>(&diff.base_version)
1531 .next()
1532 .is_some()
1533 {
1534 this.has_conflict = true;
1535 }
1536
1537 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1538 }
1539
1540 this.reload_task.take();
1541 })
1542 }));
1543 rx
1544 }
1545
1546 /// This method is called to signal that the buffer has been reloaded.
1547 pub fn did_reload(
1548 &mut self,
1549 version: clock::Global,
1550 line_ending: LineEnding,
1551 mtime: Option<MTime>,
1552 cx: &mut Context<Self>,
1553 ) {
1554 self.saved_version = version;
1555 self.has_unsaved_edits
1556 .set((self.saved_version.clone(), false));
1557 self.text.set_line_ending(line_ending);
1558 self.saved_mtime = mtime;
1559 cx.emit(BufferEvent::Reloaded);
1560 cx.notify();
1561 }
1562
1563 /// Updates the [`File`] backing this buffer. This should be called when
1564 /// the file has changed or has been deleted.
1565 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1566 let was_dirty = self.is_dirty();
1567 let mut file_changed = false;
1568
1569 if let Some(old_file) = self.file.as_ref() {
1570 if new_file.path() != old_file.path() {
1571 file_changed = true;
1572 }
1573
1574 let old_state = old_file.disk_state();
1575 let new_state = new_file.disk_state();
1576 if old_state != new_state {
1577 file_changed = true;
1578 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1579 cx.emit(BufferEvent::ReloadNeeded)
1580 }
1581 }
1582 } else {
1583 file_changed = true;
1584 };
1585
1586 self.file = Some(new_file);
1587 if file_changed {
1588 self.was_changed();
1589 self.non_text_state_update_count += 1;
1590 if was_dirty != self.is_dirty() {
1591 cx.emit(BufferEvent::DirtyChanged);
1592 }
1593 cx.emit(BufferEvent::FileHandleChanged);
1594 cx.notify();
1595 }
1596 }
1597
1598 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1599 Some(self.branch_state.as_ref()?.base_buffer.clone())
1600 }
1601
1602 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1603 pub fn language(&self) -> Option<&Arc<Language>> {
1604 self.language.as_ref()
1605 }
1606
1607 /// Returns the [`Language`] at the given location.
1608 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1609 let offset = position.to_offset(self);
1610 let mut is_first = true;
1611 let start_anchor = self.anchor_before(offset);
1612 let end_anchor = self.anchor_after(offset);
1613 self.syntax_map
1614 .lock()
1615 .layers_for_range(offset..offset, &self.text, false)
1616 .filter(|layer| {
1617 if is_first {
1618 is_first = false;
1619 return true;
1620 }
1621
1622 layer
1623 .included_sub_ranges
1624 .map(|sub_ranges| {
1625 sub_ranges.iter().any(|sub_range| {
1626 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1627 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1628 !is_before_start && !is_after_end
1629 })
1630 })
1631 .unwrap_or(true)
1632 })
1633 .last()
1634 .map(|info| info.language.clone())
1635 .or_else(|| self.language.clone())
1636 }
1637
1638 /// Returns each [`Language`] for the active syntax layers at the given location.
1639 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1640 let offset = position.to_offset(self);
1641 let mut languages: Vec<Arc<Language>> = self
1642 .syntax_map
1643 .lock()
1644 .layers_for_range(offset..offset, &self.text, false)
1645 .map(|info| info.language.clone())
1646 .collect();
1647
1648 if languages.is_empty()
1649 && let Some(buffer_language) = self.language()
1650 {
1651 languages.push(buffer_language.clone());
1652 }
1653
1654 languages
1655 }
1656
1657 /// An integer version number that accounts for all updates besides
1658 /// the buffer's text itself (which is versioned via a version vector).
1659 pub fn non_text_state_update_count(&self) -> usize {
1660 self.non_text_state_update_count
1661 }
1662
1663 /// Whether the buffer is being parsed in the background.
1664 #[cfg(any(test, feature = "test-support"))]
1665 pub fn is_parsing(&self) -> bool {
1666 self.reparse.is_some()
1667 }
1668
1669 /// Indicates whether the buffer contains any regions that may be
1670 /// written in a language that hasn't been loaded yet.
1671 pub fn contains_unknown_injections(&self) -> bool {
1672 self.syntax_map.lock().contains_unknown_injections()
1673 }
1674
1675 #[cfg(any(test, feature = "test-support"))]
1676 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1677 self.sync_parse_timeout = timeout;
1678 }
1679
1680 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1681 match Arc::get_mut(&mut self.tree_sitter_data) {
1682 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1683 None => {
1684 let tree_sitter_data = TreeSitterData::new(snapshot);
1685 self.tree_sitter_data = Arc::new(tree_sitter_data)
1686 }
1687 }
1688 }
1689
1690 /// Called after an edit to synchronize the buffer's main parse tree with
1691 /// the buffer's new underlying state.
1692 ///
1693 /// Locks the syntax map and interpolates the edits since the last reparse
1694 /// into the foreground syntax tree.
1695 ///
1696 /// Then takes a stable snapshot of the syntax map before unlocking it.
1697 /// The snapshot with the interpolated edits is sent to a background thread,
1698 /// where we ask Tree-sitter to perform an incremental parse.
1699 ///
1700 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1701 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1702 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1703 ///
1704 /// If we time out waiting on the parse, we spawn a second task waiting
1705 /// until the parse does complete and return with the interpolated tree still
1706 /// in the foreground. When the background parse completes, call back into
1707 /// the main thread and assign the foreground parse state.
1708 ///
1709 /// If the buffer or grammar changed since the start of the background parse,
1710 /// initiate an additional reparse recursively. To avoid concurrent parses
1711 /// for the same buffer, we only initiate a new parse if we are not already
1712 /// parsing in the background.
1713 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1714 if self.text.version() != *self.tree_sitter_data.version() {
1715 self.invalidate_tree_sitter_data(self.text.snapshot());
1716 }
1717 if self.reparse.is_some() {
1718 return;
1719 }
1720 let language = if let Some(language) = self.language.clone() {
1721 language
1722 } else {
1723 return;
1724 };
1725
1726 let text = self.text_snapshot();
1727 let parsed_version = self.version();
1728
1729 let mut syntax_map = self.syntax_map.lock();
1730 syntax_map.interpolate(&text);
1731 let language_registry = syntax_map.language_registry();
1732 let mut syntax_snapshot = syntax_map.snapshot();
1733 drop(syntax_map);
1734
1735 let parse_task = cx.background_spawn({
1736 let language = language.clone();
1737 let language_registry = language_registry.clone();
1738 async move {
1739 syntax_snapshot.reparse(&text, language_registry, language);
1740 syntax_snapshot
1741 }
1742 });
1743
1744 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1745 if may_block {
1746 match cx
1747 .background_executor()
1748 .block_with_timeout(self.sync_parse_timeout, parse_task)
1749 {
1750 Ok(new_syntax_snapshot) => {
1751 self.did_finish_parsing(new_syntax_snapshot, cx);
1752 self.reparse = None;
1753 }
1754 Err(parse_task) => {
1755 self.reparse = Some(cx.spawn(async move |this, cx| {
1756 let new_syntax_map = cx.background_spawn(parse_task).await;
1757 this.update(cx, move |this, cx| {
1758 let grammar_changed = || {
1759 this.language.as_ref().is_none_or(|current_language| {
1760 !Arc::ptr_eq(&language, current_language)
1761 })
1762 };
1763 let language_registry_changed = || {
1764 new_syntax_map.contains_unknown_injections()
1765 && language_registry.is_some_and(|registry| {
1766 registry.version()
1767 != new_syntax_map.language_registry_version()
1768 })
1769 };
1770 let parse_again = this.version.changed_since(&parsed_version)
1771 || language_registry_changed()
1772 || grammar_changed();
1773 this.did_finish_parsing(new_syntax_map, cx);
1774 this.reparse = None;
1775 if parse_again {
1776 this.reparse(cx, false);
1777 }
1778 })
1779 .ok();
1780 }));
1781 }
1782 }
1783 } else {
1784 self.reparse = Some(cx.spawn(async move |this, cx| {
1785 let new_syntax_map = cx.background_spawn(parse_task).await;
1786 this.update(cx, move |this, cx| {
1787 let grammar_changed = || {
1788 this.language.as_ref().is_none_or(|current_language| {
1789 !Arc::ptr_eq(&language, current_language)
1790 })
1791 };
1792 let language_registry_changed = || {
1793 new_syntax_map.contains_unknown_injections()
1794 && language_registry.is_some_and(|registry| {
1795 registry.version() != new_syntax_map.language_registry_version()
1796 })
1797 };
1798 let parse_again = this.version.changed_since(&parsed_version)
1799 || language_registry_changed()
1800 || grammar_changed();
1801 this.did_finish_parsing(new_syntax_map, cx);
1802 this.reparse = None;
1803 if parse_again {
1804 this.reparse(cx, false);
1805 }
1806 })
1807 .ok();
1808 }));
1809 }
1810 }
1811
1812 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1813 self.was_changed();
1814 self.non_text_state_update_count += 1;
1815 self.syntax_map.lock().did_parse(syntax_snapshot);
1816 self.request_autoindent(cx);
1817 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1818 self.invalidate_tree_sitter_data(self.text.snapshot());
1819 cx.emit(BufferEvent::Reparsed);
1820 cx.notify();
1821 }
1822
1823 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1824 self.parse_status.1.clone()
1825 }
1826
1827 /// Wait until the buffer is no longer parsing
1828 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1829 let mut parse_status = self.parse_status();
1830 async move {
1831 while *parse_status.borrow() != ParseStatus::Idle {
1832 if parse_status.changed().await.is_err() {
1833 break;
1834 }
1835 }
1836 }
1837 }
1838
1839 /// Assign to the buffer a set of diagnostics created by a given language server.
1840 pub fn update_diagnostics(
1841 &mut self,
1842 server_id: LanguageServerId,
1843 diagnostics: DiagnosticSet,
1844 cx: &mut Context<Self>,
1845 ) {
1846 let lamport_timestamp = self.text.lamport_clock.tick();
1847 let op = Operation::UpdateDiagnostics {
1848 server_id,
1849 diagnostics: diagnostics.iter().cloned().collect(),
1850 lamport_timestamp,
1851 };
1852
1853 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1854 self.send_operation(op, true, cx);
1855 }
1856
1857 pub fn buffer_diagnostics(
1858 &self,
1859 for_server: Option<LanguageServerId>,
1860 ) -> Vec<&DiagnosticEntry<Anchor>> {
1861 match for_server {
1862 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1863 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1864 Err(_) => Vec::new(),
1865 },
1866 None => self
1867 .diagnostics
1868 .iter()
1869 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1870 .collect(),
1871 }
1872 }
1873
1874 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1875 if let Some(indent_sizes) = self.compute_autoindents() {
1876 let indent_sizes = cx.background_spawn(indent_sizes);
1877 match cx
1878 .background_executor()
1879 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1880 {
1881 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1882 Err(indent_sizes) => {
1883 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1884 let indent_sizes = indent_sizes.await;
1885 this.update(cx, |this, cx| {
1886 this.apply_autoindents(indent_sizes, cx);
1887 })
1888 .ok();
1889 }));
1890 }
1891 }
1892 } else {
1893 self.autoindent_requests.clear();
1894 for tx in self.wait_for_autoindent_txs.drain(..) {
1895 tx.send(()).ok();
1896 }
1897 }
1898 }
1899
1900 fn compute_autoindents(
1901 &self,
1902 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1903 let max_rows_between_yields = 100;
1904 let snapshot = self.snapshot();
1905 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1906 return None;
1907 }
1908
1909 let autoindent_requests = self.autoindent_requests.clone();
1910 Some(async move {
1911 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1912 for request in autoindent_requests {
1913 // Resolve each edited range to its row in the current buffer and in the
1914 // buffer before this batch of edits.
1915 let mut row_ranges = Vec::new();
1916 let mut old_to_new_rows = BTreeMap::new();
1917 let mut language_indent_sizes_by_new_row = Vec::new();
1918 for entry in &request.entries {
1919 let position = entry.range.start;
1920 let new_row = position.to_point(&snapshot).row;
1921 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1922 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1923
1924 if !entry.first_line_is_new {
1925 let old_row = position.to_point(&request.before_edit).row;
1926 old_to_new_rows.insert(old_row, new_row);
1927 }
1928 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1929 }
1930
1931 // Build a map containing the suggested indentation for each of the edited lines
1932 // with respect to the state of the buffer before these edits. This map is keyed
1933 // by the rows for these lines in the current state of the buffer.
1934 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1935 let old_edited_ranges =
1936 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1937 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1938 let mut language_indent_size = IndentSize::default();
1939 for old_edited_range in old_edited_ranges {
1940 let suggestions = request
1941 .before_edit
1942 .suggest_autoindents(old_edited_range.clone())
1943 .into_iter()
1944 .flatten();
1945 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1946 if let Some(suggestion) = suggestion {
1947 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1948
1949 // Find the indent size based on the language for this row.
1950 while let Some((row, size)) = language_indent_sizes.peek() {
1951 if *row > new_row {
1952 break;
1953 }
1954 language_indent_size = *size;
1955 language_indent_sizes.next();
1956 }
1957
1958 let suggested_indent = old_to_new_rows
1959 .get(&suggestion.basis_row)
1960 .and_then(|from_row| {
1961 Some(old_suggestions.get(from_row).copied()?.0)
1962 })
1963 .unwrap_or_else(|| {
1964 request
1965 .before_edit
1966 .indent_size_for_line(suggestion.basis_row)
1967 })
1968 .with_delta(suggestion.delta, language_indent_size);
1969 old_suggestions
1970 .insert(new_row, (suggested_indent, suggestion.within_error));
1971 }
1972 }
1973 yield_now().await;
1974 }
1975
1976 // Compute new suggestions for each line, but only include them in the result
1977 // if they differ from the old suggestion for that line.
1978 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1979 let mut language_indent_size = IndentSize::default();
1980 for (row_range, original_indent_column) in row_ranges {
1981 let new_edited_row_range = if request.is_block_mode {
1982 row_range.start..row_range.start + 1
1983 } else {
1984 row_range.clone()
1985 };
1986
1987 let suggestions = snapshot
1988 .suggest_autoindents(new_edited_row_range.clone())
1989 .into_iter()
1990 .flatten();
1991 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1992 if let Some(suggestion) = suggestion {
1993 // Find the indent size based on the language for this row.
1994 while let Some((row, size)) = language_indent_sizes.peek() {
1995 if *row > new_row {
1996 break;
1997 }
1998 language_indent_size = *size;
1999 language_indent_sizes.next();
2000 }
2001
2002 let suggested_indent = indent_sizes
2003 .get(&suggestion.basis_row)
2004 .copied()
2005 .map(|e| e.0)
2006 .unwrap_or_else(|| {
2007 snapshot.indent_size_for_line(suggestion.basis_row)
2008 })
2009 .with_delta(suggestion.delta, language_indent_size);
2010
2011 if old_suggestions.get(&new_row).is_none_or(
2012 |(old_indentation, was_within_error)| {
2013 suggested_indent != *old_indentation
2014 && (!suggestion.within_error || *was_within_error)
2015 },
2016 ) {
2017 indent_sizes.insert(
2018 new_row,
2019 (suggested_indent, request.ignore_empty_lines),
2020 );
2021 }
2022 }
2023 }
2024
2025 if let (true, Some(original_indent_column)) =
2026 (request.is_block_mode, original_indent_column)
2027 {
2028 let new_indent =
2029 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2030 *indent
2031 } else {
2032 snapshot.indent_size_for_line(row_range.start)
2033 };
2034 let delta = new_indent.len as i64 - original_indent_column as i64;
2035 if delta != 0 {
2036 for row in row_range.skip(1) {
2037 indent_sizes.entry(row).or_insert_with(|| {
2038 let mut size = snapshot.indent_size_for_line(row);
2039 if size.kind == new_indent.kind {
2040 match delta.cmp(&0) {
2041 Ordering::Greater => size.len += delta as u32,
2042 Ordering::Less => {
2043 size.len = size.len.saturating_sub(-delta as u32)
2044 }
2045 Ordering::Equal => {}
2046 }
2047 }
2048 (size, request.ignore_empty_lines)
2049 });
2050 }
2051 }
2052 }
2053
2054 yield_now().await;
2055 }
2056 }
2057
2058 indent_sizes
2059 .into_iter()
2060 .filter_map(|(row, (indent, ignore_empty_lines))| {
2061 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2062 None
2063 } else {
2064 Some((row, indent))
2065 }
2066 })
2067 .collect()
2068 })
2069 }
2070
2071 fn apply_autoindents(
2072 &mut self,
2073 indent_sizes: BTreeMap<u32, IndentSize>,
2074 cx: &mut Context<Self>,
2075 ) {
2076 self.autoindent_requests.clear();
2077 for tx in self.wait_for_autoindent_txs.drain(..) {
2078 tx.send(()).ok();
2079 }
2080
2081 let edits: Vec<_> = indent_sizes
2082 .into_iter()
2083 .filter_map(|(row, indent_size)| {
2084 let current_size = indent_size_for_line(self, row);
2085 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2086 })
2087 .collect();
2088
2089 let preserve_preview = self.preserve_preview();
2090 self.edit(edits, None, cx);
2091 if preserve_preview {
2092 self.refresh_preview();
2093 }
2094 }
2095
2096 /// Create a minimal edit that will cause the given row to be indented
2097 /// with the given size. After applying this edit, the length of the line
2098 /// will always be at least `new_size.len`.
2099 pub fn edit_for_indent_size_adjustment(
2100 row: u32,
2101 current_size: IndentSize,
2102 new_size: IndentSize,
2103 ) -> Option<(Range<Point>, String)> {
2104 if new_size.kind == current_size.kind {
2105 match new_size.len.cmp(¤t_size.len) {
2106 Ordering::Greater => {
2107 let point = Point::new(row, 0);
2108 Some((
2109 point..point,
2110 iter::repeat(new_size.char())
2111 .take((new_size.len - current_size.len) as usize)
2112 .collect::<String>(),
2113 ))
2114 }
2115
2116 Ordering::Less => Some((
2117 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2118 String::new(),
2119 )),
2120
2121 Ordering::Equal => None,
2122 }
2123 } else {
2124 Some((
2125 Point::new(row, 0)..Point::new(row, current_size.len),
2126 iter::repeat(new_size.char())
2127 .take(new_size.len as usize)
2128 .collect::<String>(),
2129 ))
2130 }
2131 }
2132
2133 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2134 /// and the given new text.
2135 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2136 let old_text = self.as_rope().clone();
2137 let base_version = self.version();
2138 cx.background_executor()
2139 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2140 let old_text = old_text.to_string();
2141 let line_ending = LineEnding::detect(&new_text);
2142 LineEnding::normalize(&mut new_text);
2143 let edits = text_diff(&old_text, &new_text);
2144 Diff {
2145 base_version,
2146 line_ending,
2147 edits,
2148 }
2149 })
2150 }
2151
2152 /// Spawns a background task that searches the buffer for any whitespace
2153 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2154 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2155 let old_text = self.as_rope().clone();
2156 let line_ending = self.line_ending();
2157 let base_version = self.version();
2158 cx.background_spawn(async move {
2159 let ranges = trailing_whitespace_ranges(&old_text);
2160 let empty = Arc::<str>::from("");
2161 Diff {
2162 base_version,
2163 line_ending,
2164 edits: ranges
2165 .into_iter()
2166 .map(|range| (range, empty.clone()))
2167 .collect(),
2168 }
2169 })
2170 }
2171
2172 /// Ensures that the buffer ends with a single newline character, and
2173 /// no other whitespace. Skips if the buffer is empty.
2174 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2175 let len = self.len();
2176 if len == 0 {
2177 return;
2178 }
2179 let mut offset = len;
2180 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2181 let non_whitespace_len = chunk
2182 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2183 .len();
2184 offset -= chunk.len();
2185 offset += non_whitespace_len;
2186 if non_whitespace_len != 0 {
2187 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2188 return;
2189 }
2190 break;
2191 }
2192 }
2193 self.edit([(offset..len, "\n")], None, cx);
2194 }
2195
2196 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2197 /// calculated, then adjust the diff to account for those changes, and discard any
2198 /// parts of the diff that conflict with those changes.
2199 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2200 let snapshot = self.snapshot();
2201 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2202 let mut delta = 0;
2203 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2204 while let Some(edit_since) = edits_since.peek() {
2205 // If the edit occurs after a diff hunk, then it does not
2206 // affect that hunk.
2207 if edit_since.old.start > range.end {
2208 break;
2209 }
2210 // If the edit precedes the diff hunk, then adjust the hunk
2211 // to reflect the edit.
2212 else if edit_since.old.end < range.start {
2213 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2214 edits_since.next();
2215 }
2216 // If the edit intersects a diff hunk, then discard that hunk.
2217 else {
2218 return None;
2219 }
2220 }
2221
2222 let start = (range.start as i64 + delta) as usize;
2223 let end = (range.end as i64 + delta) as usize;
2224 Some((start..end, new_text))
2225 });
2226
2227 self.start_transaction();
2228 self.text.set_line_ending(diff.line_ending);
2229 self.edit(adjusted_edits, None, cx);
2230 self.end_transaction(cx)
2231 }
2232
2233 pub fn has_unsaved_edits(&self) -> bool {
2234 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2235
2236 if last_version == self.version {
2237 self.has_unsaved_edits
2238 .set((last_version, has_unsaved_edits));
2239 return has_unsaved_edits;
2240 }
2241
2242 let has_edits = self.has_edits_since(&self.saved_version);
2243 self.has_unsaved_edits
2244 .set((self.version.clone(), has_edits));
2245 has_edits
2246 }
2247
2248 /// Checks if the buffer has unsaved changes.
2249 pub fn is_dirty(&self) -> bool {
2250 if self.capability == Capability::ReadOnly {
2251 return false;
2252 }
2253 if self.has_conflict {
2254 return true;
2255 }
2256 match self.file.as_ref().map(|f| f.disk_state()) {
2257 Some(DiskState::New) | Some(DiskState::Deleted) => {
2258 !self.is_empty() && self.has_unsaved_edits()
2259 }
2260 _ => self.has_unsaved_edits(),
2261 }
2262 }
2263
2264 /// Marks the buffer as having a conflict regardless of current buffer state.
2265 pub fn set_conflict(&mut self) {
2266 self.has_conflict = true;
2267 }
2268
2269 /// Checks if the buffer and its file have both changed since the buffer
2270 /// was last saved or reloaded.
2271 pub fn has_conflict(&self) -> bool {
2272 if self.has_conflict {
2273 return true;
2274 }
2275 let Some(file) = self.file.as_ref() else {
2276 return false;
2277 };
2278 match file.disk_state() {
2279 DiskState::New => false,
2280 DiskState::Present { mtime } => match self.saved_mtime {
2281 Some(saved_mtime) => {
2282 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2283 }
2284 None => true,
2285 },
2286 DiskState::Deleted => false,
2287 }
2288 }
2289
2290 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2291 pub fn subscribe(&mut self) -> Subscription<usize> {
2292 self.text.subscribe()
2293 }
2294
2295 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2296 ///
2297 /// This allows downstream code to check if the buffer's text has changed without
2298 /// waiting for an effect cycle, which would be required if using eents.
2299 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2300 if let Err(ix) = self
2301 .change_bits
2302 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2303 {
2304 self.change_bits.insert(ix, bit);
2305 }
2306 }
2307
2308 /// Set the change bit for all "listeners".
2309 fn was_changed(&mut self) {
2310 self.change_bits.retain(|change_bit| {
2311 change_bit
2312 .upgrade()
2313 .inspect(|bit| {
2314 _ = bit.replace(true);
2315 })
2316 .is_some()
2317 });
2318 }
2319
2320 /// Starts a transaction, if one is not already in-progress. When undoing or
2321 /// redoing edits, all of the edits performed within a transaction are undone
2322 /// or redone together.
2323 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2324 self.start_transaction_at(Instant::now())
2325 }
2326
2327 /// Starts a transaction, providing the current time. Subsequent transactions
2328 /// that occur within a short period of time will be grouped together. This
2329 /// is controlled by the buffer's undo grouping duration.
2330 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2331 self.transaction_depth += 1;
2332 if self.was_dirty_before_starting_transaction.is_none() {
2333 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2334 }
2335 self.text.start_transaction_at(now)
2336 }
2337
2338 /// Terminates the current transaction, if this is the outermost transaction.
2339 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2340 self.end_transaction_at(Instant::now(), cx)
2341 }
2342
2343 /// Terminates the current transaction, providing the current time. Subsequent transactions
2344 /// that occur within a short period of time will be grouped together. This
2345 /// is controlled by the buffer's undo grouping duration.
2346 pub fn end_transaction_at(
2347 &mut self,
2348 now: Instant,
2349 cx: &mut Context<Self>,
2350 ) -> Option<TransactionId> {
2351 assert!(self.transaction_depth > 0);
2352 self.transaction_depth -= 1;
2353 let was_dirty = if self.transaction_depth == 0 {
2354 self.was_dirty_before_starting_transaction.take().unwrap()
2355 } else {
2356 false
2357 };
2358 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2359 self.did_edit(&start_version, was_dirty, cx);
2360 Some(transaction_id)
2361 } else {
2362 None
2363 }
2364 }
2365
2366 /// Manually add a transaction to the buffer's undo history.
2367 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2368 self.text.push_transaction(transaction, now);
2369 }
2370
2371 /// Differs from `push_transaction` in that it does not clear the redo
2372 /// stack. Intended to be used to create a parent transaction to merge
2373 /// potential child transactions into.
2374 ///
2375 /// The caller is responsible for removing it from the undo history using
2376 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2377 /// are merged into this transaction, the caller is responsible for ensuring
2378 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2379 /// cleared is to create transactions with the usual `start_transaction` and
2380 /// `end_transaction` methods and merging the resulting transactions into
2381 /// the transaction created by this method
2382 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2383 self.text.push_empty_transaction(now)
2384 }
2385
2386 /// Prevent the last transaction from being grouped with any subsequent transactions,
2387 /// even if they occur with the buffer's undo grouping duration.
2388 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2389 self.text.finalize_last_transaction()
2390 }
2391
2392 /// Manually group all changes since a given transaction.
2393 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2394 self.text.group_until_transaction(transaction_id);
2395 }
2396
2397 /// Manually remove a transaction from the buffer's undo history
2398 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2399 self.text.forget_transaction(transaction_id)
2400 }
2401
2402 /// Retrieve a transaction from the buffer's undo history
2403 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2404 self.text.get_transaction(transaction_id)
2405 }
2406
2407 /// Manually merge two transactions in the buffer's undo history.
2408 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2409 self.text.merge_transactions(transaction, destination);
2410 }
2411
2412 /// Waits for the buffer to receive operations with the given timestamps.
2413 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2414 &mut self,
2415 edit_ids: It,
2416 ) -> impl Future<Output = Result<()>> + use<It> {
2417 self.text.wait_for_edits(edit_ids)
2418 }
2419
2420 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2421 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2422 &mut self,
2423 anchors: It,
2424 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2425 self.text.wait_for_anchors(anchors)
2426 }
2427
2428 /// Waits for the buffer to receive operations up to the given version.
2429 pub fn wait_for_version(
2430 &mut self,
2431 version: clock::Global,
2432 ) -> impl Future<Output = Result<()>> + use<> {
2433 self.text.wait_for_version(version)
2434 }
2435
2436 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2437 /// [`Buffer::wait_for_version`] to resolve with an error.
2438 pub fn give_up_waiting(&mut self) {
2439 self.text.give_up_waiting();
2440 }
2441
2442 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2443 let mut rx = None;
2444 if !self.autoindent_requests.is_empty() {
2445 let channel = oneshot::channel();
2446 self.wait_for_autoindent_txs.push(channel.0);
2447 rx = Some(channel.1);
2448 }
2449 rx
2450 }
2451
2452 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2453 pub fn set_active_selections(
2454 &mut self,
2455 selections: Arc<[Selection<Anchor>]>,
2456 line_mode: bool,
2457 cursor_shape: CursorShape,
2458 cx: &mut Context<Self>,
2459 ) {
2460 let lamport_timestamp = self.text.lamport_clock.tick();
2461 self.remote_selections.insert(
2462 self.text.replica_id(),
2463 SelectionSet {
2464 selections: selections.clone(),
2465 lamport_timestamp,
2466 line_mode,
2467 cursor_shape,
2468 },
2469 );
2470 self.send_operation(
2471 Operation::UpdateSelections {
2472 selections,
2473 line_mode,
2474 lamport_timestamp,
2475 cursor_shape,
2476 },
2477 true,
2478 cx,
2479 );
2480 self.non_text_state_update_count += 1;
2481 cx.notify();
2482 }
2483
2484 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2485 /// this replica.
2486 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2487 if self
2488 .remote_selections
2489 .get(&self.text.replica_id())
2490 .is_none_or(|set| !set.selections.is_empty())
2491 {
2492 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2493 }
2494 }
2495
2496 pub fn set_agent_selections(
2497 &mut self,
2498 selections: Arc<[Selection<Anchor>]>,
2499 line_mode: bool,
2500 cursor_shape: CursorShape,
2501 cx: &mut Context<Self>,
2502 ) {
2503 let lamport_timestamp = self.text.lamport_clock.tick();
2504 self.remote_selections.insert(
2505 ReplicaId::AGENT,
2506 SelectionSet {
2507 selections,
2508 lamport_timestamp,
2509 line_mode,
2510 cursor_shape,
2511 },
2512 );
2513 self.non_text_state_update_count += 1;
2514 cx.notify();
2515 }
2516
2517 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2518 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2519 }
2520
2521 /// Replaces the buffer's entire text.
2522 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2523 where
2524 T: Into<Arc<str>>,
2525 {
2526 self.autoindent_requests.clear();
2527 self.edit([(0..self.len(), text)], None, cx)
2528 }
2529
2530 /// Appends the given text to the end of the buffer.
2531 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2532 where
2533 T: Into<Arc<str>>,
2534 {
2535 self.edit([(self.len()..self.len(), text)], None, cx)
2536 }
2537
2538 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2539 /// delete, and a string of text to insert at that location.
2540 ///
2541 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2542 /// request for the edited ranges, which will be processed when the buffer finishes
2543 /// parsing.
2544 ///
2545 /// Parsing takes place at the end of a transaction, and may compute synchronously
2546 /// or asynchronously, depending on the changes.
2547 pub fn edit<I, S, T>(
2548 &mut self,
2549 edits_iter: I,
2550 autoindent_mode: Option<AutoindentMode>,
2551 cx: &mut Context<Self>,
2552 ) -> Option<clock::Lamport>
2553 where
2554 I: IntoIterator<Item = (Range<S>, T)>,
2555 S: ToOffset,
2556 T: Into<Arc<str>>,
2557 {
2558 // Skip invalid edits and coalesce contiguous ones.
2559 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2560
2561 for (range, new_text) in edits_iter {
2562 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2563
2564 if range.start > range.end {
2565 mem::swap(&mut range.start, &mut range.end);
2566 }
2567 let new_text = new_text.into();
2568 if !new_text.is_empty() || !range.is_empty() {
2569 if let Some((prev_range, prev_text)) = edits.last_mut()
2570 && prev_range.end >= range.start
2571 {
2572 prev_range.end = cmp::max(prev_range.end, range.end);
2573 *prev_text = format!("{prev_text}{new_text}").into();
2574 } else {
2575 edits.push((range, new_text));
2576 }
2577 }
2578 }
2579 if edits.is_empty() {
2580 return None;
2581 }
2582
2583 self.start_transaction();
2584 self.pending_autoindent.take();
2585 let autoindent_request = autoindent_mode
2586 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2587
2588 let edit_operation = self.text.edit(edits.iter().cloned());
2589 let edit_id = edit_operation.timestamp();
2590
2591 if let Some((before_edit, mode)) = autoindent_request {
2592 let mut delta = 0isize;
2593 let mut previous_setting = None;
2594 let entries: Vec<_> = edits
2595 .into_iter()
2596 .enumerate()
2597 .zip(&edit_operation.as_edit().unwrap().new_text)
2598 .filter(|((_, (range, _)), _)| {
2599 let language = before_edit.language_at(range.start);
2600 let language_id = language.map(|l| l.id());
2601 if let Some((cached_language_id, auto_indent)) = previous_setting
2602 && cached_language_id == language_id
2603 {
2604 auto_indent
2605 } else {
2606 // The auto-indent setting is not present in editorconfigs, hence
2607 // we can avoid passing the file here.
2608 let auto_indent =
2609 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2610 previous_setting = Some((language_id, auto_indent));
2611 auto_indent
2612 }
2613 })
2614 .map(|((ix, (range, _)), new_text)| {
2615 let new_text_length = new_text.len();
2616 let old_start = range.start.to_point(&before_edit);
2617 let new_start = (delta + range.start as isize) as usize;
2618 let range_len = range.end - range.start;
2619 delta += new_text_length as isize - range_len as isize;
2620
2621 // Decide what range of the insertion to auto-indent, and whether
2622 // the first line of the insertion should be considered a newly-inserted line
2623 // or an edit to an existing line.
2624 let mut range_of_insertion_to_indent = 0..new_text_length;
2625 let mut first_line_is_new = true;
2626
2627 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2628 let old_line_end = before_edit.line_len(old_start.row);
2629
2630 if old_start.column > old_line_start {
2631 first_line_is_new = false;
2632 }
2633
2634 if !new_text.contains('\n')
2635 && (old_start.column + (range_len as u32) < old_line_end
2636 || old_line_end == old_line_start)
2637 {
2638 first_line_is_new = false;
2639 }
2640
2641 // When inserting text starting with a newline, avoid auto-indenting the
2642 // previous line.
2643 if new_text.starts_with('\n') {
2644 range_of_insertion_to_indent.start += 1;
2645 first_line_is_new = true;
2646 }
2647
2648 let mut original_indent_column = None;
2649 if let AutoindentMode::Block {
2650 original_indent_columns,
2651 } = &mode
2652 {
2653 original_indent_column = Some(if new_text.starts_with('\n') {
2654 indent_size_for_text(
2655 new_text[range_of_insertion_to_indent.clone()].chars(),
2656 )
2657 .len
2658 } else {
2659 original_indent_columns
2660 .get(ix)
2661 .copied()
2662 .flatten()
2663 .unwrap_or_else(|| {
2664 indent_size_for_text(
2665 new_text[range_of_insertion_to_indent.clone()].chars(),
2666 )
2667 .len
2668 })
2669 });
2670
2671 // Avoid auto-indenting the line after the edit.
2672 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2673 range_of_insertion_to_indent.end -= 1;
2674 }
2675 }
2676
2677 AutoindentRequestEntry {
2678 first_line_is_new,
2679 original_indent_column,
2680 indent_size: before_edit.language_indent_size_at(range.start, cx),
2681 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2682 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2683 }
2684 })
2685 .collect();
2686
2687 if !entries.is_empty() {
2688 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2689 before_edit,
2690 entries,
2691 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2692 ignore_empty_lines: false,
2693 }));
2694 }
2695 }
2696
2697 self.end_transaction(cx);
2698 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2699 Some(edit_id)
2700 }
2701
2702 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2703 self.was_changed();
2704
2705 if self.edits_since::<usize>(old_version).next().is_none() {
2706 return;
2707 }
2708
2709 self.reparse(cx, true);
2710 cx.emit(BufferEvent::Edited);
2711 if was_dirty != self.is_dirty() {
2712 cx.emit(BufferEvent::DirtyChanged);
2713 }
2714 cx.notify();
2715 }
2716
2717 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2718 where
2719 I: IntoIterator<Item = Range<T>>,
2720 T: ToOffset + Copy,
2721 {
2722 let before_edit = self.snapshot();
2723 let entries = ranges
2724 .into_iter()
2725 .map(|range| AutoindentRequestEntry {
2726 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2727 first_line_is_new: true,
2728 indent_size: before_edit.language_indent_size_at(range.start, cx),
2729 original_indent_column: None,
2730 })
2731 .collect();
2732 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2733 before_edit,
2734 entries,
2735 is_block_mode: false,
2736 ignore_empty_lines: true,
2737 }));
2738 self.request_autoindent(cx);
2739 }
2740
2741 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2742 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2743 pub fn insert_empty_line(
2744 &mut self,
2745 position: impl ToPoint,
2746 space_above: bool,
2747 space_below: bool,
2748 cx: &mut Context<Self>,
2749 ) -> Point {
2750 let mut position = position.to_point(self);
2751
2752 self.start_transaction();
2753
2754 self.edit(
2755 [(position..position, "\n")],
2756 Some(AutoindentMode::EachLine),
2757 cx,
2758 );
2759
2760 if position.column > 0 {
2761 position += Point::new(1, 0);
2762 }
2763
2764 if !self.is_line_blank(position.row) {
2765 self.edit(
2766 [(position..position, "\n")],
2767 Some(AutoindentMode::EachLine),
2768 cx,
2769 );
2770 }
2771
2772 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2773 self.edit(
2774 [(position..position, "\n")],
2775 Some(AutoindentMode::EachLine),
2776 cx,
2777 );
2778 position.row += 1;
2779 }
2780
2781 if space_below
2782 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2783 {
2784 self.edit(
2785 [(position..position, "\n")],
2786 Some(AutoindentMode::EachLine),
2787 cx,
2788 );
2789 }
2790
2791 self.end_transaction(cx);
2792
2793 position
2794 }
2795
2796 /// Applies the given remote operations to the buffer.
2797 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2798 self.pending_autoindent.take();
2799 let was_dirty = self.is_dirty();
2800 let old_version = self.version.clone();
2801 let mut deferred_ops = Vec::new();
2802 let buffer_ops = ops
2803 .into_iter()
2804 .filter_map(|op| match op {
2805 Operation::Buffer(op) => Some(op),
2806 _ => {
2807 if self.can_apply_op(&op) {
2808 self.apply_op(op, cx);
2809 } else {
2810 deferred_ops.push(op);
2811 }
2812 None
2813 }
2814 })
2815 .collect::<Vec<_>>();
2816 for operation in buffer_ops.iter() {
2817 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2818 }
2819 self.text.apply_ops(buffer_ops);
2820 self.deferred_ops.insert(deferred_ops);
2821 self.flush_deferred_ops(cx);
2822 self.did_edit(&old_version, was_dirty, cx);
2823 // Notify independently of whether the buffer was edited as the operations could include a
2824 // selection update.
2825 cx.notify();
2826 }
2827
2828 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2829 let mut deferred_ops = Vec::new();
2830 for op in self.deferred_ops.drain().iter().cloned() {
2831 if self.can_apply_op(&op) {
2832 self.apply_op(op, cx);
2833 } else {
2834 deferred_ops.push(op);
2835 }
2836 }
2837 self.deferred_ops.insert(deferred_ops);
2838 }
2839
2840 pub fn has_deferred_ops(&self) -> bool {
2841 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2842 }
2843
2844 fn can_apply_op(&self, operation: &Operation) -> bool {
2845 match operation {
2846 Operation::Buffer(_) => {
2847 unreachable!("buffer operations should never be applied at this layer")
2848 }
2849 Operation::UpdateDiagnostics {
2850 diagnostics: diagnostic_set,
2851 ..
2852 } => diagnostic_set.iter().all(|diagnostic| {
2853 self.text.can_resolve(&diagnostic.range.start)
2854 && self.text.can_resolve(&diagnostic.range.end)
2855 }),
2856 Operation::UpdateSelections { selections, .. } => selections
2857 .iter()
2858 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2859 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2860 }
2861 }
2862
2863 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2864 match operation {
2865 Operation::Buffer(_) => {
2866 unreachable!("buffer operations should never be applied at this layer")
2867 }
2868 Operation::UpdateDiagnostics {
2869 server_id,
2870 diagnostics: diagnostic_set,
2871 lamport_timestamp,
2872 } => {
2873 let snapshot = self.snapshot();
2874 self.apply_diagnostic_update(
2875 server_id,
2876 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2877 lamport_timestamp,
2878 cx,
2879 );
2880 }
2881 Operation::UpdateSelections {
2882 selections,
2883 lamport_timestamp,
2884 line_mode,
2885 cursor_shape,
2886 } => {
2887 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2888 && set.lamport_timestamp > lamport_timestamp
2889 {
2890 return;
2891 }
2892
2893 self.remote_selections.insert(
2894 lamport_timestamp.replica_id,
2895 SelectionSet {
2896 selections,
2897 lamport_timestamp,
2898 line_mode,
2899 cursor_shape,
2900 },
2901 );
2902 self.text.lamport_clock.observe(lamport_timestamp);
2903 self.non_text_state_update_count += 1;
2904 }
2905 Operation::UpdateCompletionTriggers {
2906 triggers,
2907 lamport_timestamp,
2908 server_id,
2909 } => {
2910 if triggers.is_empty() {
2911 self.completion_triggers_per_language_server
2912 .remove(&server_id);
2913 self.completion_triggers = self
2914 .completion_triggers_per_language_server
2915 .values()
2916 .flat_map(|triggers| triggers.iter().cloned())
2917 .collect();
2918 } else {
2919 self.completion_triggers_per_language_server
2920 .insert(server_id, triggers.iter().cloned().collect());
2921 self.completion_triggers.extend(triggers);
2922 }
2923 self.text.lamport_clock.observe(lamport_timestamp);
2924 }
2925 Operation::UpdateLineEnding {
2926 line_ending,
2927 lamport_timestamp,
2928 } => {
2929 self.text.set_line_ending(line_ending);
2930 self.text.lamport_clock.observe(lamport_timestamp);
2931 }
2932 }
2933 }
2934
2935 fn apply_diagnostic_update(
2936 &mut self,
2937 server_id: LanguageServerId,
2938 diagnostics: DiagnosticSet,
2939 lamport_timestamp: clock::Lamport,
2940 cx: &mut Context<Self>,
2941 ) {
2942 if lamport_timestamp > self.diagnostics_timestamp {
2943 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2944 if diagnostics.is_empty() {
2945 if let Ok(ix) = ix {
2946 self.diagnostics.remove(ix);
2947 }
2948 } else {
2949 match ix {
2950 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2951 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2952 };
2953 }
2954 self.diagnostics_timestamp = lamport_timestamp;
2955 self.non_text_state_update_count += 1;
2956 self.text.lamport_clock.observe(lamport_timestamp);
2957 cx.notify();
2958 cx.emit(BufferEvent::DiagnosticsUpdated);
2959 }
2960 }
2961
2962 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2963 self.was_changed();
2964 cx.emit(BufferEvent::Operation {
2965 operation,
2966 is_local,
2967 });
2968 }
2969
2970 /// Removes the selections for a given peer.
2971 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2972 self.remote_selections.remove(&replica_id);
2973 cx.notify();
2974 }
2975
2976 /// Undoes the most recent transaction.
2977 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2978 let was_dirty = self.is_dirty();
2979 let old_version = self.version.clone();
2980
2981 if let Some((transaction_id, operation)) = self.text.undo() {
2982 self.send_operation(Operation::Buffer(operation), true, cx);
2983 self.did_edit(&old_version, was_dirty, cx);
2984 Some(transaction_id)
2985 } else {
2986 None
2987 }
2988 }
2989
2990 /// Manually undoes a specific transaction in the buffer's undo history.
2991 pub fn undo_transaction(
2992 &mut self,
2993 transaction_id: TransactionId,
2994 cx: &mut Context<Self>,
2995 ) -> bool {
2996 let was_dirty = self.is_dirty();
2997 let old_version = self.version.clone();
2998 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2999 self.send_operation(Operation::Buffer(operation), true, cx);
3000 self.did_edit(&old_version, was_dirty, cx);
3001 true
3002 } else {
3003 false
3004 }
3005 }
3006
3007 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3008 pub fn undo_to_transaction(
3009 &mut self,
3010 transaction_id: TransactionId,
3011 cx: &mut Context<Self>,
3012 ) -> bool {
3013 let was_dirty = self.is_dirty();
3014 let old_version = self.version.clone();
3015
3016 let operations = self.text.undo_to_transaction(transaction_id);
3017 let undone = !operations.is_empty();
3018 for operation in operations {
3019 self.send_operation(Operation::Buffer(operation), true, cx);
3020 }
3021 if undone {
3022 self.did_edit(&old_version, was_dirty, cx)
3023 }
3024 undone
3025 }
3026
3027 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3028 let was_dirty = self.is_dirty();
3029 let operation = self.text.undo_operations(counts);
3030 let old_version = self.version.clone();
3031 self.send_operation(Operation::Buffer(operation), true, cx);
3032 self.did_edit(&old_version, was_dirty, cx);
3033 }
3034
3035 /// Manually redoes a specific transaction in the buffer's redo history.
3036 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3037 let was_dirty = self.is_dirty();
3038 let old_version = self.version.clone();
3039
3040 if let Some((transaction_id, operation)) = self.text.redo() {
3041 self.send_operation(Operation::Buffer(operation), true, cx);
3042 self.did_edit(&old_version, was_dirty, cx);
3043 Some(transaction_id)
3044 } else {
3045 None
3046 }
3047 }
3048
3049 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3050 pub fn redo_to_transaction(
3051 &mut self,
3052 transaction_id: TransactionId,
3053 cx: &mut Context<Self>,
3054 ) -> bool {
3055 let was_dirty = self.is_dirty();
3056 let old_version = self.version.clone();
3057
3058 let operations = self.text.redo_to_transaction(transaction_id);
3059 let redone = !operations.is_empty();
3060 for operation in operations {
3061 self.send_operation(Operation::Buffer(operation), true, cx);
3062 }
3063 if redone {
3064 self.did_edit(&old_version, was_dirty, cx)
3065 }
3066 redone
3067 }
3068
3069 /// Override current completion triggers with the user-provided completion triggers.
3070 pub fn set_completion_triggers(
3071 &mut self,
3072 server_id: LanguageServerId,
3073 triggers: BTreeSet<String>,
3074 cx: &mut Context<Self>,
3075 ) {
3076 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3077 if triggers.is_empty() {
3078 self.completion_triggers_per_language_server
3079 .remove(&server_id);
3080 self.completion_triggers = self
3081 .completion_triggers_per_language_server
3082 .values()
3083 .flat_map(|triggers| triggers.iter().cloned())
3084 .collect();
3085 } else {
3086 self.completion_triggers_per_language_server
3087 .insert(server_id, triggers.clone());
3088 self.completion_triggers.extend(triggers.iter().cloned());
3089 }
3090 self.send_operation(
3091 Operation::UpdateCompletionTriggers {
3092 triggers: triggers.into_iter().collect(),
3093 lamport_timestamp: self.completion_triggers_timestamp,
3094 server_id,
3095 },
3096 true,
3097 cx,
3098 );
3099 cx.notify();
3100 }
3101
3102 /// Returns a list of strings which trigger a completion menu for this language.
3103 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3104 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3105 &self.completion_triggers
3106 }
3107
3108 /// Call this directly after performing edits to prevent the preview tab
3109 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3110 /// to return false until there are additional edits.
3111 pub fn refresh_preview(&mut self) {
3112 self.preview_version = self.version.clone();
3113 }
3114
3115 /// Whether we should preserve the preview status of a tab containing this buffer.
3116 pub fn preserve_preview(&self) -> bool {
3117 !self.has_edits_since(&self.preview_version)
3118 }
3119}
3120
3121#[doc(hidden)]
3122#[cfg(any(test, feature = "test-support"))]
3123impl Buffer {
3124 pub fn edit_via_marked_text(
3125 &mut self,
3126 marked_string: &str,
3127 autoindent_mode: Option<AutoindentMode>,
3128 cx: &mut Context<Self>,
3129 ) {
3130 let edits = self.edits_for_marked_text(marked_string);
3131 self.edit(edits, autoindent_mode, cx);
3132 }
3133
3134 pub fn set_group_interval(&mut self, group_interval: Duration) {
3135 self.text.set_group_interval(group_interval);
3136 }
3137
3138 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3139 where
3140 T: rand::Rng,
3141 {
3142 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3143 let mut last_end = None;
3144 for _ in 0..old_range_count {
3145 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3146 break;
3147 }
3148
3149 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3150 let mut range = self.random_byte_range(new_start, rng);
3151 if rng.random_bool(0.2) {
3152 mem::swap(&mut range.start, &mut range.end);
3153 }
3154 last_end = Some(range.end);
3155
3156 let new_text_len = rng.random_range(0..10);
3157 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3158 new_text = new_text.to_uppercase();
3159
3160 edits.push((range, new_text));
3161 }
3162 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3163 self.edit(edits, None, cx);
3164 }
3165
3166 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3167 let was_dirty = self.is_dirty();
3168 let old_version = self.version.clone();
3169
3170 let ops = self.text.randomly_undo_redo(rng);
3171 if !ops.is_empty() {
3172 for op in ops {
3173 self.send_operation(Operation::Buffer(op), true, cx);
3174 self.did_edit(&old_version, was_dirty, cx);
3175 }
3176 }
3177 }
3178}
3179
3180impl EventEmitter<BufferEvent> for Buffer {}
3181
3182impl Deref for Buffer {
3183 type Target = TextBuffer;
3184
3185 fn deref(&self) -> &Self::Target {
3186 &self.text
3187 }
3188}
3189
3190impl BufferSnapshot {
3191 /// Returns [`IndentSize`] for a given line that respects user settings and
3192 /// language preferences.
3193 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3194 indent_size_for_line(self, row)
3195 }
3196
3197 /// Returns [`IndentSize`] for a given position that respects user settings
3198 /// and language preferences.
3199 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3200 let settings = language_settings(
3201 self.language_at(position).map(|l| l.name()),
3202 self.file(),
3203 cx,
3204 );
3205 if settings.hard_tabs {
3206 IndentSize::tab()
3207 } else {
3208 IndentSize::spaces(settings.tab_size.get())
3209 }
3210 }
3211
3212 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3213 /// is passed in as `single_indent_size`.
3214 pub fn suggested_indents(
3215 &self,
3216 rows: impl Iterator<Item = u32>,
3217 single_indent_size: IndentSize,
3218 ) -> BTreeMap<u32, IndentSize> {
3219 let mut result = BTreeMap::new();
3220
3221 for row_range in contiguous_ranges(rows, 10) {
3222 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3223 Some(suggestions) => suggestions,
3224 _ => break,
3225 };
3226
3227 for (row, suggestion) in row_range.zip(suggestions) {
3228 let indent_size = if let Some(suggestion) = suggestion {
3229 result
3230 .get(&suggestion.basis_row)
3231 .copied()
3232 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3233 .with_delta(suggestion.delta, single_indent_size)
3234 } else {
3235 self.indent_size_for_line(row)
3236 };
3237
3238 result.insert(row, indent_size);
3239 }
3240 }
3241
3242 result
3243 }
3244
3245 fn suggest_autoindents(
3246 &self,
3247 row_range: Range<u32>,
3248 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3249 let config = &self.language.as_ref()?.config;
3250 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3251
3252 #[derive(Debug, Clone)]
3253 struct StartPosition {
3254 start: Point,
3255 suffix: SharedString,
3256 language: Arc<Language>,
3257 }
3258
3259 // Find the suggested indentation ranges based on the syntax tree.
3260 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3261 let end = Point::new(row_range.end, 0);
3262 let range = (start..end).to_offset(&self.text);
3263 let mut matches = self.syntax.matches_with_options(
3264 range.clone(),
3265 &self.text,
3266 TreeSitterOptions {
3267 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3268 max_start_depth: None,
3269 },
3270 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3271 );
3272 let indent_configs = matches
3273 .grammars()
3274 .iter()
3275 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3276 .collect::<Vec<_>>();
3277
3278 let mut indent_ranges = Vec::<Range<Point>>::new();
3279 let mut start_positions = Vec::<StartPosition>::new();
3280 let mut outdent_positions = Vec::<Point>::new();
3281 while let Some(mat) = matches.peek() {
3282 let mut start: Option<Point> = None;
3283 let mut end: Option<Point> = None;
3284
3285 let config = indent_configs[mat.grammar_index];
3286 for capture in mat.captures {
3287 if capture.index == config.indent_capture_ix {
3288 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3289 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3290 } else if Some(capture.index) == config.start_capture_ix {
3291 start = Some(Point::from_ts_point(capture.node.end_position()));
3292 } else if Some(capture.index) == config.end_capture_ix {
3293 end = Some(Point::from_ts_point(capture.node.start_position()));
3294 } else if Some(capture.index) == config.outdent_capture_ix {
3295 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3296 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3297 start_positions.push(StartPosition {
3298 start: Point::from_ts_point(capture.node.start_position()),
3299 suffix: suffix.clone(),
3300 language: mat.language.clone(),
3301 });
3302 }
3303 }
3304
3305 matches.advance();
3306 if let Some((start, end)) = start.zip(end) {
3307 if start.row == end.row {
3308 continue;
3309 }
3310 let range = start..end;
3311 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3312 Err(ix) => indent_ranges.insert(ix, range),
3313 Ok(ix) => {
3314 let prev_range = &mut indent_ranges[ix];
3315 prev_range.end = prev_range.end.max(range.end);
3316 }
3317 }
3318 }
3319 }
3320
3321 let mut error_ranges = Vec::<Range<Point>>::new();
3322 let mut matches = self
3323 .syntax
3324 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3325 while let Some(mat) = matches.peek() {
3326 let node = mat.captures[0].node;
3327 let start = Point::from_ts_point(node.start_position());
3328 let end = Point::from_ts_point(node.end_position());
3329 let range = start..end;
3330 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3331 Ok(ix) | Err(ix) => ix,
3332 };
3333 let mut end_ix = ix;
3334 while let Some(existing_range) = error_ranges.get(end_ix) {
3335 if existing_range.end < end {
3336 end_ix += 1;
3337 } else {
3338 break;
3339 }
3340 }
3341 error_ranges.splice(ix..end_ix, [range]);
3342 matches.advance();
3343 }
3344
3345 outdent_positions.sort();
3346 for outdent_position in outdent_positions {
3347 // find the innermost indent range containing this outdent_position
3348 // set its end to the outdent position
3349 if let Some(range_to_truncate) = indent_ranges
3350 .iter_mut()
3351 .rfind(|indent_range| indent_range.contains(&outdent_position))
3352 {
3353 range_to_truncate.end = outdent_position;
3354 }
3355 }
3356
3357 start_positions.sort_by_key(|b| b.start);
3358
3359 // Find the suggested indentation increases and decreased based on regexes.
3360 let mut regex_outdent_map = HashMap::default();
3361 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3362 let mut start_positions_iter = start_positions.iter().peekable();
3363
3364 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3365 self.for_each_line(
3366 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3367 ..Point::new(row_range.end, 0),
3368 |row, line| {
3369 let indent_len = self.indent_size_for_line(row).len;
3370 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3371 let row_language_config = row_language
3372 .as_ref()
3373 .map(|lang| lang.config())
3374 .unwrap_or(config);
3375
3376 if row_language_config
3377 .decrease_indent_pattern
3378 .as_ref()
3379 .is_some_and(|regex| regex.is_match(line))
3380 {
3381 indent_change_rows.push((row, Ordering::Less));
3382 }
3383 if row_language_config
3384 .increase_indent_pattern
3385 .as_ref()
3386 .is_some_and(|regex| regex.is_match(line))
3387 {
3388 indent_change_rows.push((row + 1, Ordering::Greater));
3389 }
3390 while let Some(pos) = start_positions_iter.peek() {
3391 if pos.start.row < row {
3392 let pos = start_positions_iter.next().unwrap().clone();
3393 last_seen_suffix
3394 .entry(pos.suffix.to_string())
3395 .or_default()
3396 .push(pos);
3397 } else {
3398 break;
3399 }
3400 }
3401 for rule in &row_language_config.decrease_indent_patterns {
3402 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3403 let row_start_column = self.indent_size_for_line(row).len;
3404 let basis_row = rule
3405 .valid_after
3406 .iter()
3407 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3408 .flatten()
3409 .filter(|pos| {
3410 row_language
3411 .as_ref()
3412 .or(self.language.as_ref())
3413 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3414 })
3415 .filter(|pos| pos.start.column <= row_start_column)
3416 .max_by_key(|pos| pos.start.row);
3417 if let Some(outdent_to) = basis_row {
3418 regex_outdent_map.insert(row, outdent_to.start.row);
3419 }
3420 break;
3421 }
3422 }
3423 },
3424 );
3425
3426 let mut indent_changes = indent_change_rows.into_iter().peekable();
3427 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3428 prev_non_blank_row.unwrap_or(0)
3429 } else {
3430 row_range.start.saturating_sub(1)
3431 };
3432
3433 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3434 Some(row_range.map(move |row| {
3435 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3436
3437 let mut indent_from_prev_row = false;
3438 let mut outdent_from_prev_row = false;
3439 let mut outdent_to_row = u32::MAX;
3440 let mut from_regex = false;
3441
3442 while let Some((indent_row, delta)) = indent_changes.peek() {
3443 match indent_row.cmp(&row) {
3444 Ordering::Equal => match delta {
3445 Ordering::Less => {
3446 from_regex = true;
3447 outdent_from_prev_row = true
3448 }
3449 Ordering::Greater => {
3450 indent_from_prev_row = true;
3451 from_regex = true
3452 }
3453 _ => {}
3454 },
3455
3456 Ordering::Greater => break,
3457 Ordering::Less => {}
3458 }
3459
3460 indent_changes.next();
3461 }
3462
3463 for range in &indent_ranges {
3464 if range.start.row >= row {
3465 break;
3466 }
3467 if range.start.row == prev_row && range.end > row_start {
3468 indent_from_prev_row = true;
3469 }
3470 if range.end > prev_row_start && range.end <= row_start {
3471 outdent_to_row = outdent_to_row.min(range.start.row);
3472 }
3473 }
3474
3475 if let Some(basis_row) = regex_outdent_map.get(&row) {
3476 indent_from_prev_row = false;
3477 outdent_to_row = *basis_row;
3478 from_regex = true;
3479 }
3480
3481 let within_error = error_ranges
3482 .iter()
3483 .any(|e| e.start.row < row && e.end > row_start);
3484
3485 let suggestion = if outdent_to_row == prev_row
3486 || (outdent_from_prev_row && indent_from_prev_row)
3487 {
3488 Some(IndentSuggestion {
3489 basis_row: prev_row,
3490 delta: Ordering::Equal,
3491 within_error: within_error && !from_regex,
3492 })
3493 } else if indent_from_prev_row {
3494 Some(IndentSuggestion {
3495 basis_row: prev_row,
3496 delta: Ordering::Greater,
3497 within_error: within_error && !from_regex,
3498 })
3499 } else if outdent_to_row < prev_row {
3500 Some(IndentSuggestion {
3501 basis_row: outdent_to_row,
3502 delta: Ordering::Equal,
3503 within_error: within_error && !from_regex,
3504 })
3505 } else if outdent_from_prev_row {
3506 Some(IndentSuggestion {
3507 basis_row: prev_row,
3508 delta: Ordering::Less,
3509 within_error: within_error && !from_regex,
3510 })
3511 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3512 {
3513 Some(IndentSuggestion {
3514 basis_row: prev_row,
3515 delta: Ordering::Equal,
3516 within_error: within_error && !from_regex,
3517 })
3518 } else {
3519 None
3520 };
3521
3522 prev_row = row;
3523 prev_row_start = row_start;
3524 suggestion
3525 }))
3526 }
3527
3528 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3529 while row > 0 {
3530 row -= 1;
3531 if !self.is_line_blank(row) {
3532 return Some(row);
3533 }
3534 }
3535 None
3536 }
3537
3538 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3539 let captures = self.syntax.captures(range, &self.text, |grammar| {
3540 grammar
3541 .highlights_config
3542 .as_ref()
3543 .map(|config| &config.query)
3544 });
3545 let highlight_maps = captures
3546 .grammars()
3547 .iter()
3548 .map(|grammar| grammar.highlight_map())
3549 .collect();
3550 (captures, highlight_maps)
3551 }
3552
3553 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3554 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3555 /// returned in chunks where each chunk has a single syntax highlighting style and
3556 /// diagnostic status.
3557 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3558 let range = range.start.to_offset(self)..range.end.to_offset(self);
3559
3560 let mut syntax = None;
3561 if language_aware {
3562 syntax = Some(self.get_highlights(range.clone()));
3563 }
3564 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3565 let diagnostics = language_aware;
3566 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3567 }
3568
3569 pub fn highlighted_text_for_range<T: ToOffset>(
3570 &self,
3571 range: Range<T>,
3572 override_style: Option<HighlightStyle>,
3573 syntax_theme: &SyntaxTheme,
3574 ) -> HighlightedText {
3575 HighlightedText::from_buffer_range(
3576 range,
3577 &self.text,
3578 &self.syntax,
3579 override_style,
3580 syntax_theme,
3581 )
3582 }
3583
3584 /// Invokes the given callback for each line of text in the given range of the buffer.
3585 /// Uses callback to avoid allocating a string for each line.
3586 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3587 let mut line = String::new();
3588 let mut row = range.start.row;
3589 for chunk in self
3590 .as_rope()
3591 .chunks_in_range(range.to_offset(self))
3592 .chain(["\n"])
3593 {
3594 for (newline_ix, text) in chunk.split('\n').enumerate() {
3595 if newline_ix > 0 {
3596 callback(row, &line);
3597 row += 1;
3598 line.clear();
3599 }
3600 line.push_str(text);
3601 }
3602 }
3603 }
3604
3605 /// Iterates over every [`SyntaxLayer`] in the buffer.
3606 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3607 self.syntax_layers_for_range(0..self.len(), true)
3608 }
3609
3610 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3611 let offset = position.to_offset(self);
3612 self.syntax_layers_for_range(offset..offset, false)
3613 .filter(|l| {
3614 if let Some(ranges) = l.included_sub_ranges {
3615 ranges.iter().any(|range| {
3616 let start = range.start.to_offset(self);
3617 start <= offset && {
3618 let end = range.end.to_offset(self);
3619 offset < end
3620 }
3621 })
3622 } else {
3623 l.node().start_byte() <= offset && l.node().end_byte() > offset
3624 }
3625 })
3626 .last()
3627 }
3628
3629 pub fn syntax_layers_for_range<D: ToOffset>(
3630 &self,
3631 range: Range<D>,
3632 include_hidden: bool,
3633 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3634 self.syntax
3635 .layers_for_range(range, &self.text, include_hidden)
3636 }
3637
3638 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3639 &self,
3640 range: Range<D>,
3641 ) -> Option<SyntaxLayer<'_>> {
3642 let range = range.to_offset(self);
3643 self.syntax
3644 .layers_for_range(range, &self.text, false)
3645 .max_by(|a, b| {
3646 if a.depth != b.depth {
3647 a.depth.cmp(&b.depth)
3648 } else if a.offset.0 != b.offset.0 {
3649 a.offset.0.cmp(&b.offset.0)
3650 } else {
3651 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3652 }
3653 })
3654 }
3655
3656 /// Returns the main [`Language`].
3657 pub fn language(&self) -> Option<&Arc<Language>> {
3658 self.language.as_ref()
3659 }
3660
3661 /// Returns the [`Language`] at the given location.
3662 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3663 self.syntax_layer_at(position)
3664 .map(|info| info.language)
3665 .or(self.language.as_ref())
3666 }
3667
3668 /// Returns the settings for the language at the given location.
3669 pub fn settings_at<'a, D: ToOffset>(
3670 &'a self,
3671 position: D,
3672 cx: &'a App,
3673 ) -> Cow<'a, LanguageSettings> {
3674 language_settings(
3675 self.language_at(position).map(|l| l.name()),
3676 self.file.as_ref(),
3677 cx,
3678 )
3679 }
3680
3681 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3682 CharClassifier::new(self.language_scope_at(point))
3683 }
3684
3685 /// Returns the [`LanguageScope`] at the given location.
3686 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3687 let offset = position.to_offset(self);
3688 let mut scope = None;
3689 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3690
3691 // Use the layer that has the smallest node intersecting the given point.
3692 for layer in self
3693 .syntax
3694 .layers_for_range(offset..offset, &self.text, false)
3695 {
3696 let mut cursor = layer.node().walk();
3697
3698 let mut range = None;
3699 loop {
3700 let child_range = cursor.node().byte_range();
3701 if !child_range.contains(&offset) {
3702 break;
3703 }
3704
3705 range = Some(child_range);
3706 if cursor.goto_first_child_for_byte(offset).is_none() {
3707 break;
3708 }
3709 }
3710
3711 if let Some(range) = range
3712 && smallest_range_and_depth.as_ref().is_none_or(
3713 |(smallest_range, smallest_range_depth)| {
3714 if layer.depth > *smallest_range_depth {
3715 true
3716 } else if layer.depth == *smallest_range_depth {
3717 range.len() < smallest_range.len()
3718 } else {
3719 false
3720 }
3721 },
3722 )
3723 {
3724 smallest_range_and_depth = Some((range, layer.depth));
3725 scope = Some(LanguageScope {
3726 language: layer.language.clone(),
3727 override_id: layer.override_id(offset, &self.text),
3728 });
3729 }
3730 }
3731
3732 scope.or_else(|| {
3733 self.language.clone().map(|language| LanguageScope {
3734 language,
3735 override_id: None,
3736 })
3737 })
3738 }
3739
3740 /// Returns a tuple of the range and character kind of the word
3741 /// surrounding the given position.
3742 pub fn surrounding_word<T: ToOffset>(
3743 &self,
3744 start: T,
3745 scope_context: Option<CharScopeContext>,
3746 ) -> (Range<usize>, Option<CharKind>) {
3747 let mut start = start.to_offset(self);
3748 let mut end = start;
3749 let mut next_chars = self.chars_at(start).take(128).peekable();
3750 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3751
3752 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3753 let word_kind = cmp::max(
3754 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3755 next_chars.peek().copied().map(|c| classifier.kind(c)),
3756 );
3757
3758 for ch in prev_chars {
3759 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3760 start -= ch.len_utf8();
3761 } else {
3762 break;
3763 }
3764 }
3765
3766 for ch in next_chars {
3767 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3768 end += ch.len_utf8();
3769 } else {
3770 break;
3771 }
3772 }
3773
3774 (start..end, word_kind)
3775 }
3776
3777 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3778 /// range. When `require_larger` is true, the node found must be larger than the query range.
3779 ///
3780 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3781 /// be moved to the root of the tree.
3782 fn goto_node_enclosing_range(
3783 cursor: &mut tree_sitter::TreeCursor,
3784 query_range: &Range<usize>,
3785 require_larger: bool,
3786 ) -> bool {
3787 let mut ascending = false;
3788 loop {
3789 let mut range = cursor.node().byte_range();
3790 if query_range.is_empty() {
3791 // When the query range is empty and the current node starts after it, move to the
3792 // previous sibling to find the node the containing node.
3793 if range.start > query_range.start {
3794 cursor.goto_previous_sibling();
3795 range = cursor.node().byte_range();
3796 }
3797 } else {
3798 // When the query range is non-empty and the current node ends exactly at the start,
3799 // move to the next sibling to find a node that extends beyond the start.
3800 if range.end == query_range.start {
3801 cursor.goto_next_sibling();
3802 range = cursor.node().byte_range();
3803 }
3804 }
3805
3806 let encloses = range.contains_inclusive(query_range)
3807 && (!require_larger || range.len() > query_range.len());
3808 if !encloses {
3809 ascending = true;
3810 if !cursor.goto_parent() {
3811 return false;
3812 }
3813 continue;
3814 } else if ascending {
3815 return true;
3816 }
3817
3818 // Descend into the current node.
3819 if cursor
3820 .goto_first_child_for_byte(query_range.start)
3821 .is_none()
3822 {
3823 return true;
3824 }
3825 }
3826 }
3827
3828 pub fn syntax_ancestor<'a, T: ToOffset>(
3829 &'a self,
3830 range: Range<T>,
3831 ) -> Option<tree_sitter::Node<'a>> {
3832 let range = range.start.to_offset(self)..range.end.to_offset(self);
3833 let mut result: Option<tree_sitter::Node<'a>> = None;
3834 for layer in self
3835 .syntax
3836 .layers_for_range(range.clone(), &self.text, true)
3837 {
3838 let mut cursor = layer.node().walk();
3839
3840 // Find the node that both contains the range and is larger than it.
3841 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3842 continue;
3843 }
3844
3845 let left_node = cursor.node();
3846 let mut layer_result = left_node;
3847
3848 // For an empty range, try to find another node immediately to the right of the range.
3849 if left_node.end_byte() == range.start {
3850 let mut right_node = None;
3851 while !cursor.goto_next_sibling() {
3852 if !cursor.goto_parent() {
3853 break;
3854 }
3855 }
3856
3857 while cursor.node().start_byte() == range.start {
3858 right_node = Some(cursor.node());
3859 if !cursor.goto_first_child() {
3860 break;
3861 }
3862 }
3863
3864 // If there is a candidate node on both sides of the (empty) range, then
3865 // decide between the two by favoring a named node over an anonymous token.
3866 // If both nodes are the same in that regard, favor the right one.
3867 if let Some(right_node) = right_node
3868 && (right_node.is_named() || !left_node.is_named())
3869 {
3870 layer_result = right_node;
3871 }
3872 }
3873
3874 if let Some(previous_result) = &result
3875 && previous_result.byte_range().len() < layer_result.byte_range().len()
3876 {
3877 continue;
3878 }
3879 result = Some(layer_result);
3880 }
3881
3882 result
3883 }
3884
3885 /// Find the previous sibling syntax node at the given range.
3886 ///
3887 /// This function locates the syntax node that precedes the node containing
3888 /// the given range. It searches hierarchically by:
3889 /// 1. Finding the node that contains the given range
3890 /// 2. Looking for the previous sibling at the same tree level
3891 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3892 ///
3893 /// Returns `None` if there is no previous sibling at any ancestor level.
3894 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3895 &'a self,
3896 range: Range<T>,
3897 ) -> Option<tree_sitter::Node<'a>> {
3898 let range = range.start.to_offset(self)..range.end.to_offset(self);
3899 let mut result: Option<tree_sitter::Node<'a>> = None;
3900
3901 for layer in self
3902 .syntax
3903 .layers_for_range(range.clone(), &self.text, true)
3904 {
3905 let mut cursor = layer.node().walk();
3906
3907 // Find the node that contains the range
3908 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3909 continue;
3910 }
3911
3912 // Look for the previous sibling, moving up ancestor levels if needed
3913 loop {
3914 if cursor.goto_previous_sibling() {
3915 let layer_result = cursor.node();
3916
3917 if let Some(previous_result) = &result {
3918 if previous_result.byte_range().end < layer_result.byte_range().end {
3919 continue;
3920 }
3921 }
3922 result = Some(layer_result);
3923 break;
3924 }
3925
3926 // No sibling found at this level, try moving up to parent
3927 if !cursor.goto_parent() {
3928 break;
3929 }
3930 }
3931 }
3932
3933 result
3934 }
3935
3936 /// Find the next sibling syntax node at the given range.
3937 ///
3938 /// This function locates the syntax node that follows the node containing
3939 /// the given range. It searches hierarchically by:
3940 /// 1. Finding the node that contains the given range
3941 /// 2. Looking for the next sibling at the same tree level
3942 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3943 ///
3944 /// Returns `None` if there is no next sibling at any ancestor level.
3945 pub fn syntax_next_sibling<'a, T: ToOffset>(
3946 &'a self,
3947 range: Range<T>,
3948 ) -> Option<tree_sitter::Node<'a>> {
3949 let range = range.start.to_offset(self)..range.end.to_offset(self);
3950 let mut result: Option<tree_sitter::Node<'a>> = None;
3951
3952 for layer in self
3953 .syntax
3954 .layers_for_range(range.clone(), &self.text, true)
3955 {
3956 let mut cursor = layer.node().walk();
3957
3958 // Find the node that contains the range
3959 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3960 continue;
3961 }
3962
3963 // Look for the next sibling, moving up ancestor levels if needed
3964 loop {
3965 if cursor.goto_next_sibling() {
3966 let layer_result = cursor.node();
3967
3968 if let Some(previous_result) = &result {
3969 if previous_result.byte_range().start > layer_result.byte_range().start {
3970 continue;
3971 }
3972 }
3973 result = Some(layer_result);
3974 break;
3975 }
3976
3977 // No sibling found at this level, try moving up to parent
3978 if !cursor.goto_parent() {
3979 break;
3980 }
3981 }
3982 }
3983
3984 result
3985 }
3986
3987 /// Returns the root syntax node within the given row
3988 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3989 let start_offset = position.to_offset(self);
3990
3991 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3992
3993 let layer = self
3994 .syntax
3995 .layers_for_range(start_offset..start_offset, &self.text, true)
3996 .next()?;
3997
3998 let mut cursor = layer.node().walk();
3999
4000 // Descend to the first leaf that touches the start of the range.
4001 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4002 if cursor.node().end_byte() == start_offset {
4003 cursor.goto_next_sibling();
4004 }
4005 }
4006
4007 // Ascend to the root node within the same row.
4008 while cursor.goto_parent() {
4009 if cursor.node().start_position().row != row {
4010 break;
4011 }
4012 }
4013
4014 Some(cursor.node())
4015 }
4016
4017 /// Returns the outline for the buffer.
4018 ///
4019 /// This method allows passing an optional [`SyntaxTheme`] to
4020 /// syntax-highlight the returned symbols.
4021 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4022 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4023 }
4024
4025 /// Returns all the symbols that contain the given position.
4026 ///
4027 /// This method allows passing an optional [`SyntaxTheme`] to
4028 /// syntax-highlight the returned symbols.
4029 pub fn symbols_containing<T: ToOffset>(
4030 &self,
4031 position: T,
4032 theme: Option<&SyntaxTheme>,
4033 ) -> Vec<OutlineItem<Anchor>> {
4034 let position = position.to_offset(self);
4035 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4036 let end = self.clip_offset(position + 1, Bias::Right);
4037 let mut items = self.outline_items_containing(start..end, false, theme);
4038 let mut prev_depth = None;
4039 items.retain(|item| {
4040 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4041 prev_depth = Some(item.depth);
4042 result
4043 });
4044 items
4045 }
4046
4047 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4048 let range = range.to_offset(self);
4049 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4050 grammar.outline_config.as_ref().map(|c| &c.query)
4051 });
4052 let configs = matches
4053 .grammars()
4054 .iter()
4055 .map(|g| g.outline_config.as_ref().unwrap())
4056 .collect::<Vec<_>>();
4057
4058 while let Some(mat) = matches.peek() {
4059 let config = &configs[mat.grammar_index];
4060 let containing_item_node = maybe!({
4061 let item_node = mat.captures.iter().find_map(|cap| {
4062 if cap.index == config.item_capture_ix {
4063 Some(cap.node)
4064 } else {
4065 None
4066 }
4067 })?;
4068
4069 let item_byte_range = item_node.byte_range();
4070 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4071 None
4072 } else {
4073 Some(item_node)
4074 }
4075 });
4076
4077 if let Some(item_node) = containing_item_node {
4078 return Some(
4079 Point::from_ts_point(item_node.start_position())
4080 ..Point::from_ts_point(item_node.end_position()),
4081 );
4082 }
4083
4084 matches.advance();
4085 }
4086 None
4087 }
4088
4089 pub fn outline_items_containing<T: ToOffset>(
4090 &self,
4091 range: Range<T>,
4092 include_extra_context: bool,
4093 theme: Option<&SyntaxTheme>,
4094 ) -> Vec<OutlineItem<Anchor>> {
4095 self.outline_items_containing_internal(
4096 range,
4097 include_extra_context,
4098 theme,
4099 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4100 )
4101 }
4102
4103 pub fn outline_items_as_points_containing<T: ToOffset>(
4104 &self,
4105 range: Range<T>,
4106 include_extra_context: bool,
4107 theme: Option<&SyntaxTheme>,
4108 ) -> Vec<OutlineItem<Point>> {
4109 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4110 range
4111 })
4112 }
4113
4114 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4115 &self,
4116 range: Range<T>,
4117 include_extra_context: bool,
4118 theme: Option<&SyntaxTheme>,
4119 ) -> Vec<OutlineItem<usize>> {
4120 self.outline_items_containing_internal(
4121 range,
4122 include_extra_context,
4123 theme,
4124 |buffer, range| range.to_offset(buffer),
4125 )
4126 }
4127
4128 fn outline_items_containing_internal<T: ToOffset, U>(
4129 &self,
4130 range: Range<T>,
4131 include_extra_context: bool,
4132 theme: Option<&SyntaxTheme>,
4133 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4134 ) -> Vec<OutlineItem<U>> {
4135 let range = range.to_offset(self);
4136 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4137 grammar.outline_config.as_ref().map(|c| &c.query)
4138 });
4139
4140 let mut items = Vec::new();
4141 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4142 while let Some(mat) = matches.peek() {
4143 let config = matches.grammars()[mat.grammar_index]
4144 .outline_config
4145 .as_ref()
4146 .unwrap();
4147 if let Some(item) =
4148 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4149 {
4150 items.push(item);
4151 } else if let Some(capture) = mat
4152 .captures
4153 .iter()
4154 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4155 {
4156 let capture_range = capture.node.start_position()..capture.node.end_position();
4157 let mut capture_row_range =
4158 capture_range.start.row as u32..capture_range.end.row as u32;
4159 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4160 {
4161 capture_row_range.end -= 1;
4162 }
4163 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4164 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4165 last_row_range.end = capture_row_range.end;
4166 } else {
4167 annotation_row_ranges.push(capture_row_range);
4168 }
4169 } else {
4170 annotation_row_ranges.push(capture_row_range);
4171 }
4172 }
4173 matches.advance();
4174 }
4175
4176 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4177
4178 // Assign depths based on containment relationships and convert to anchors.
4179 let mut item_ends_stack = Vec::<Point>::new();
4180 let mut anchor_items = Vec::new();
4181 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4182 for item in items {
4183 while let Some(last_end) = item_ends_stack.last().copied() {
4184 if last_end < item.range.end {
4185 item_ends_stack.pop();
4186 } else {
4187 break;
4188 }
4189 }
4190
4191 let mut annotation_row_range = None;
4192 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4193 let row_preceding_item = item.range.start.row.saturating_sub(1);
4194 if next_annotation_row_range.end < row_preceding_item {
4195 annotation_row_ranges.next();
4196 } else {
4197 if next_annotation_row_range.end == row_preceding_item {
4198 annotation_row_range = Some(next_annotation_row_range.clone());
4199 annotation_row_ranges.next();
4200 }
4201 break;
4202 }
4203 }
4204
4205 anchor_items.push(OutlineItem {
4206 depth: item_ends_stack.len(),
4207 range: range_callback(self, item.range.clone()),
4208 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4209 text: item.text,
4210 highlight_ranges: item.highlight_ranges,
4211 name_ranges: item.name_ranges,
4212 body_range: item.body_range.map(|r| range_callback(self, r)),
4213 annotation_range: annotation_row_range.map(|annotation_range| {
4214 let point_range = Point::new(annotation_range.start, 0)
4215 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4216 range_callback(self, point_range)
4217 }),
4218 });
4219 item_ends_stack.push(item.range.end);
4220 }
4221
4222 anchor_items
4223 }
4224
4225 fn next_outline_item(
4226 &self,
4227 config: &OutlineConfig,
4228 mat: &SyntaxMapMatch,
4229 range: &Range<usize>,
4230 include_extra_context: bool,
4231 theme: Option<&SyntaxTheme>,
4232 ) -> Option<OutlineItem<Point>> {
4233 let item_node = mat.captures.iter().find_map(|cap| {
4234 if cap.index == config.item_capture_ix {
4235 Some(cap.node)
4236 } else {
4237 None
4238 }
4239 })?;
4240
4241 let item_byte_range = item_node.byte_range();
4242 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4243 return None;
4244 }
4245 let item_point_range = Point::from_ts_point(item_node.start_position())
4246 ..Point::from_ts_point(item_node.end_position());
4247
4248 let mut open_point = None;
4249 let mut close_point = None;
4250
4251 let mut buffer_ranges = Vec::new();
4252 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4253 let mut range = node.start_byte()..node.end_byte();
4254 let start = node.start_position();
4255 if node.end_position().row > start.row {
4256 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4257 }
4258
4259 if !range.is_empty() {
4260 buffer_ranges.push((range, node_is_name));
4261 }
4262 };
4263
4264 for capture in mat.captures {
4265 if capture.index == config.name_capture_ix {
4266 add_to_buffer_ranges(capture.node, true);
4267 } else if Some(capture.index) == config.context_capture_ix
4268 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4269 {
4270 add_to_buffer_ranges(capture.node, false);
4271 } else {
4272 if Some(capture.index) == config.open_capture_ix {
4273 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4274 } else if Some(capture.index) == config.close_capture_ix {
4275 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4276 }
4277 }
4278 }
4279
4280 if buffer_ranges.is_empty() {
4281 return None;
4282 }
4283 let source_range_for_text =
4284 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4285
4286 let mut text = String::new();
4287 let mut highlight_ranges = Vec::new();
4288 let mut name_ranges = Vec::new();
4289 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4290 let mut last_buffer_range_end = 0;
4291 for (buffer_range, is_name) in buffer_ranges {
4292 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4293 if space_added {
4294 text.push(' ');
4295 }
4296 let before_append_len = text.len();
4297 let mut offset = buffer_range.start;
4298 chunks.seek(buffer_range.clone());
4299 for mut chunk in chunks.by_ref() {
4300 if chunk.text.len() > buffer_range.end - offset {
4301 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4302 offset = buffer_range.end;
4303 } else {
4304 offset += chunk.text.len();
4305 }
4306 let style = chunk
4307 .syntax_highlight_id
4308 .zip(theme)
4309 .and_then(|(highlight, theme)| highlight.style(theme));
4310 if let Some(style) = style {
4311 let start = text.len();
4312 let end = start + chunk.text.len();
4313 highlight_ranges.push((start..end, style));
4314 }
4315 text.push_str(chunk.text);
4316 if offset >= buffer_range.end {
4317 break;
4318 }
4319 }
4320 if is_name {
4321 let after_append_len = text.len();
4322 let start = if space_added && !name_ranges.is_empty() {
4323 before_append_len - 1
4324 } else {
4325 before_append_len
4326 };
4327 name_ranges.push(start..after_append_len);
4328 }
4329 last_buffer_range_end = buffer_range.end;
4330 }
4331
4332 Some(OutlineItem {
4333 depth: 0, // We'll calculate the depth later
4334 range: item_point_range,
4335 source_range_for_text: source_range_for_text.to_point(self),
4336 text,
4337 highlight_ranges,
4338 name_ranges,
4339 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4340 annotation_range: None,
4341 })
4342 }
4343
4344 pub fn function_body_fold_ranges<T: ToOffset>(
4345 &self,
4346 within: Range<T>,
4347 ) -> impl Iterator<Item = Range<usize>> + '_ {
4348 self.text_object_ranges(within, TreeSitterOptions::default())
4349 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4350 }
4351
4352 /// For each grammar in the language, runs the provided
4353 /// [`tree_sitter::Query`] against the given range.
4354 pub fn matches(
4355 &self,
4356 range: Range<usize>,
4357 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4358 ) -> SyntaxMapMatches<'_> {
4359 self.syntax.matches(range, self, query)
4360 }
4361
4362 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4363 /// Hence, may return more bracket pairs than the range contains.
4364 ///
4365 /// Will omit known chunks.
4366 /// The resulting bracket match collections are not ordered.
4367 pub fn fetch_bracket_ranges(
4368 &self,
4369 range: Range<usize>,
4370 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4371 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4372 let mut all_bracket_matches = HashMap::default();
4373
4374 for chunk in self
4375 .tree_sitter_data
4376 .chunks
4377 .applicable_chunks(&[range.to_point(self)])
4378 {
4379 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4380 continue;
4381 }
4382 let chunk_range = chunk.anchor_range();
4383 let chunk_range = chunk_range.to_offset(&self);
4384
4385 if let Some(cached_brackets) =
4386 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4387 {
4388 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4389 continue;
4390 }
4391
4392 let mut all_brackets = Vec::new();
4393 let mut opens = Vec::new();
4394 let mut color_pairs = Vec::new();
4395
4396 let mut matches = self.syntax.matches_with_options(
4397 chunk_range.clone(),
4398 &self.text,
4399 TreeSitterOptions {
4400 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4401 max_start_depth: None,
4402 },
4403 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4404 );
4405 let configs = matches
4406 .grammars()
4407 .iter()
4408 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4409 .collect::<Vec<_>>();
4410
4411 while let Some(mat) = matches.peek() {
4412 let mut open = None;
4413 let mut close = None;
4414 let syntax_layer_depth = mat.depth;
4415 let config = configs[mat.grammar_index];
4416 let pattern = &config.patterns[mat.pattern_index];
4417 for capture in mat.captures {
4418 if capture.index == config.open_capture_ix {
4419 open = Some(capture.node.byte_range());
4420 } else if capture.index == config.close_capture_ix {
4421 close = Some(capture.node.byte_range());
4422 }
4423 }
4424
4425 matches.advance();
4426
4427 let Some((open_range, close_range)) = open.zip(close) else {
4428 continue;
4429 };
4430
4431 let bracket_range = open_range.start..=close_range.end;
4432 if !bracket_range.overlaps(&chunk_range) {
4433 continue;
4434 }
4435
4436 let index = all_brackets.len();
4437 all_brackets.push(BracketMatch {
4438 open_range: open_range.clone(),
4439 close_range: close_range.clone(),
4440 newline_only: pattern.newline_only,
4441 syntax_layer_depth,
4442 color_index: None,
4443 });
4444
4445 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4446 // bracket will match the entire tag with all text inside.
4447 // For now, avoid highlighting any pair that has more than single char in each bracket.
4448 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4449 let should_color =
4450 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4451 if should_color {
4452 opens.push(open_range.clone());
4453 color_pairs.push((open_range, close_range, index));
4454 }
4455 }
4456
4457 opens.sort_by_key(|r| (r.start, r.end));
4458 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4459 color_pairs.sort_by_key(|(_, close, _)| close.end);
4460
4461 let mut open_stack = Vec::new();
4462 let mut open_index = 0;
4463 for (open, close, index) in color_pairs {
4464 while open_index < opens.len() && opens[open_index].start < close.start {
4465 open_stack.push(opens[open_index].clone());
4466 open_index += 1;
4467 }
4468
4469 if open_stack.last() == Some(&open) {
4470 let depth_index = open_stack.len() - 1;
4471 all_brackets[index].color_index = Some(depth_index);
4472 open_stack.pop();
4473 }
4474 }
4475
4476 all_brackets.sort_by_key(|bracket_match| {
4477 (bracket_match.open_range.start, bracket_match.open_range.end)
4478 });
4479
4480 if let empty_slot @ None =
4481 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4482 {
4483 *empty_slot = Some(all_brackets.clone());
4484 }
4485 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4486 }
4487
4488 all_bracket_matches
4489 }
4490
4491 pub fn all_bracket_ranges(
4492 &self,
4493 range: Range<usize>,
4494 ) -> impl Iterator<Item = BracketMatch<usize>> {
4495 self.fetch_bracket_ranges(range.clone(), None)
4496 .into_values()
4497 .flatten()
4498 .filter(move |bracket_match| {
4499 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4500 bracket_range.overlaps(&range)
4501 })
4502 }
4503
4504 /// Returns bracket range pairs overlapping or adjacent to `range`
4505 pub fn bracket_ranges<T: ToOffset>(
4506 &self,
4507 range: Range<T>,
4508 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4509 // Find bracket pairs that *inclusively* contain the given range.
4510 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4511 self.all_bracket_ranges(range)
4512 .filter(|pair| !pair.newline_only)
4513 }
4514
4515 pub fn debug_variables_query<T: ToOffset>(
4516 &self,
4517 range: Range<T>,
4518 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4519 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4520
4521 let mut matches = self.syntax.matches_with_options(
4522 range.clone(),
4523 &self.text,
4524 TreeSitterOptions::default(),
4525 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4526 );
4527
4528 let configs = matches
4529 .grammars()
4530 .iter()
4531 .map(|grammar| grammar.debug_variables_config.as_ref())
4532 .collect::<Vec<_>>();
4533
4534 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4535
4536 iter::from_fn(move || {
4537 loop {
4538 while let Some(capture) = captures.pop() {
4539 if capture.0.overlaps(&range) {
4540 return Some(capture);
4541 }
4542 }
4543
4544 let mat = matches.peek()?;
4545
4546 let Some(config) = configs[mat.grammar_index].as_ref() else {
4547 matches.advance();
4548 continue;
4549 };
4550
4551 for capture in mat.captures {
4552 let Some(ix) = config
4553 .objects_by_capture_ix
4554 .binary_search_by_key(&capture.index, |e| e.0)
4555 .ok()
4556 else {
4557 continue;
4558 };
4559 let text_object = config.objects_by_capture_ix[ix].1;
4560 let byte_range = capture.node.byte_range();
4561
4562 let mut found = false;
4563 for (range, existing) in captures.iter_mut() {
4564 if existing == &text_object {
4565 range.start = range.start.min(byte_range.start);
4566 range.end = range.end.max(byte_range.end);
4567 found = true;
4568 break;
4569 }
4570 }
4571
4572 if !found {
4573 captures.push((byte_range, text_object));
4574 }
4575 }
4576
4577 matches.advance();
4578 }
4579 })
4580 }
4581
4582 pub fn text_object_ranges<T: ToOffset>(
4583 &self,
4584 range: Range<T>,
4585 options: TreeSitterOptions,
4586 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4587 let range =
4588 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4589
4590 let mut matches =
4591 self.syntax
4592 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4593 grammar.text_object_config.as_ref().map(|c| &c.query)
4594 });
4595
4596 let configs = matches
4597 .grammars()
4598 .iter()
4599 .map(|grammar| grammar.text_object_config.as_ref())
4600 .collect::<Vec<_>>();
4601
4602 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4603
4604 iter::from_fn(move || {
4605 loop {
4606 while let Some(capture) = captures.pop() {
4607 if capture.0.overlaps(&range) {
4608 return Some(capture);
4609 }
4610 }
4611
4612 let mat = matches.peek()?;
4613
4614 let Some(config) = configs[mat.grammar_index].as_ref() else {
4615 matches.advance();
4616 continue;
4617 };
4618
4619 for capture in mat.captures {
4620 let Some(ix) = config
4621 .text_objects_by_capture_ix
4622 .binary_search_by_key(&capture.index, |e| e.0)
4623 .ok()
4624 else {
4625 continue;
4626 };
4627 let text_object = config.text_objects_by_capture_ix[ix].1;
4628 let byte_range = capture.node.byte_range();
4629
4630 let mut found = false;
4631 for (range, existing) in captures.iter_mut() {
4632 if existing == &text_object {
4633 range.start = range.start.min(byte_range.start);
4634 range.end = range.end.max(byte_range.end);
4635 found = true;
4636 break;
4637 }
4638 }
4639
4640 if !found {
4641 captures.push((byte_range, text_object));
4642 }
4643 }
4644
4645 matches.advance();
4646 }
4647 })
4648 }
4649
4650 /// Returns enclosing bracket ranges containing the given range
4651 pub fn enclosing_bracket_ranges<T: ToOffset>(
4652 &self,
4653 range: Range<T>,
4654 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4655 let range = range.start.to_offset(self)..range.end.to_offset(self);
4656
4657 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4658 let max_depth = result
4659 .iter()
4660 .map(|mat| mat.syntax_layer_depth)
4661 .max()
4662 .unwrap_or(0);
4663 result.into_iter().filter(move |pair| {
4664 pair.open_range.start <= range.start
4665 && pair.close_range.end >= range.end
4666 && pair.syntax_layer_depth == max_depth
4667 })
4668 }
4669
4670 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4671 ///
4672 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4673 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4674 &self,
4675 range: Range<T>,
4676 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4677 ) -> Option<(Range<usize>, Range<usize>)> {
4678 let range = range.start.to_offset(self)..range.end.to_offset(self);
4679
4680 // Get the ranges of the innermost pair of brackets.
4681 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4682
4683 for pair in self.enclosing_bracket_ranges(range) {
4684 if let Some(range_filter) = range_filter
4685 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4686 {
4687 continue;
4688 }
4689
4690 let len = pair.close_range.end - pair.open_range.start;
4691
4692 if let Some((existing_open, existing_close)) = &result {
4693 let existing_len = existing_close.end - existing_open.start;
4694 if len > existing_len {
4695 continue;
4696 }
4697 }
4698
4699 result = Some((pair.open_range, pair.close_range));
4700 }
4701
4702 result
4703 }
4704
4705 /// Returns anchor ranges for any matches of the redaction query.
4706 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4707 /// will be run on the relevant section of the buffer.
4708 pub fn redacted_ranges<T: ToOffset>(
4709 &self,
4710 range: Range<T>,
4711 ) -> impl Iterator<Item = Range<usize>> + '_ {
4712 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4713 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4714 grammar
4715 .redactions_config
4716 .as_ref()
4717 .map(|config| &config.query)
4718 });
4719
4720 let configs = syntax_matches
4721 .grammars()
4722 .iter()
4723 .map(|grammar| grammar.redactions_config.as_ref())
4724 .collect::<Vec<_>>();
4725
4726 iter::from_fn(move || {
4727 let redacted_range = syntax_matches
4728 .peek()
4729 .and_then(|mat| {
4730 configs[mat.grammar_index].and_then(|config| {
4731 mat.captures
4732 .iter()
4733 .find(|capture| capture.index == config.redaction_capture_ix)
4734 })
4735 })
4736 .map(|mat| mat.node.byte_range());
4737 syntax_matches.advance();
4738 redacted_range
4739 })
4740 }
4741
4742 pub fn injections_intersecting_range<T: ToOffset>(
4743 &self,
4744 range: Range<T>,
4745 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4746 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4747
4748 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4749 grammar
4750 .injection_config
4751 .as_ref()
4752 .map(|config| &config.query)
4753 });
4754
4755 let configs = syntax_matches
4756 .grammars()
4757 .iter()
4758 .map(|grammar| grammar.injection_config.as_ref())
4759 .collect::<Vec<_>>();
4760
4761 iter::from_fn(move || {
4762 let ranges = syntax_matches.peek().and_then(|mat| {
4763 let config = &configs[mat.grammar_index]?;
4764 let content_capture_range = mat.captures.iter().find_map(|capture| {
4765 if capture.index == config.content_capture_ix {
4766 Some(capture.node.byte_range())
4767 } else {
4768 None
4769 }
4770 })?;
4771 let language = self.language_at(content_capture_range.start)?;
4772 Some((content_capture_range, language))
4773 });
4774 syntax_matches.advance();
4775 ranges
4776 })
4777 }
4778
4779 pub fn runnable_ranges(
4780 &self,
4781 offset_range: Range<usize>,
4782 ) -> impl Iterator<Item = RunnableRange> + '_ {
4783 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4784 grammar.runnable_config.as_ref().map(|config| &config.query)
4785 });
4786
4787 let test_configs = syntax_matches
4788 .grammars()
4789 .iter()
4790 .map(|grammar| grammar.runnable_config.as_ref())
4791 .collect::<Vec<_>>();
4792
4793 iter::from_fn(move || {
4794 loop {
4795 let mat = syntax_matches.peek()?;
4796
4797 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4798 let mut run_range = None;
4799 let full_range = mat.captures.iter().fold(
4800 Range {
4801 start: usize::MAX,
4802 end: 0,
4803 },
4804 |mut acc, next| {
4805 let byte_range = next.node.byte_range();
4806 if acc.start > byte_range.start {
4807 acc.start = byte_range.start;
4808 }
4809 if acc.end < byte_range.end {
4810 acc.end = byte_range.end;
4811 }
4812 acc
4813 },
4814 );
4815 if full_range.start > full_range.end {
4816 // We did not find a full spanning range of this match.
4817 return None;
4818 }
4819 let extra_captures: SmallVec<[_; 1]> =
4820 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4821 test_configs
4822 .extra_captures
4823 .get(capture.index as usize)
4824 .cloned()
4825 .and_then(|tag_name| match tag_name {
4826 RunnableCapture::Named(name) => {
4827 Some((capture.node.byte_range(), name))
4828 }
4829 RunnableCapture::Run => {
4830 let _ = run_range.insert(capture.node.byte_range());
4831 None
4832 }
4833 })
4834 }));
4835 let run_range = run_range?;
4836 let tags = test_configs
4837 .query
4838 .property_settings(mat.pattern_index)
4839 .iter()
4840 .filter_map(|property| {
4841 if *property.key == *"tag" {
4842 property
4843 .value
4844 .as_ref()
4845 .map(|value| RunnableTag(value.to_string().into()))
4846 } else {
4847 None
4848 }
4849 })
4850 .collect();
4851 let extra_captures = extra_captures
4852 .into_iter()
4853 .map(|(range, name)| {
4854 (
4855 name.to_string(),
4856 self.text_for_range(range).collect::<String>(),
4857 )
4858 })
4859 .collect();
4860 // All tags should have the same range.
4861 Some(RunnableRange {
4862 run_range,
4863 full_range,
4864 runnable: Runnable {
4865 tags,
4866 language: mat.language,
4867 buffer: self.remote_id(),
4868 },
4869 extra_captures,
4870 buffer_id: self.remote_id(),
4871 })
4872 });
4873
4874 syntax_matches.advance();
4875 if test_range.is_some() {
4876 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4877 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4878 return test_range;
4879 }
4880 }
4881 })
4882 }
4883
4884 /// Returns selections for remote peers intersecting the given range.
4885 #[allow(clippy::type_complexity)]
4886 pub fn selections_in_range(
4887 &self,
4888 range: Range<Anchor>,
4889 include_local: bool,
4890 ) -> impl Iterator<
4891 Item = (
4892 ReplicaId,
4893 bool,
4894 CursorShape,
4895 impl Iterator<Item = &Selection<Anchor>> + '_,
4896 ),
4897 > + '_ {
4898 self.remote_selections
4899 .iter()
4900 .filter(move |(replica_id, set)| {
4901 (include_local || **replica_id != self.text.replica_id())
4902 && !set.selections.is_empty()
4903 })
4904 .map(move |(replica_id, set)| {
4905 let start_ix = match set.selections.binary_search_by(|probe| {
4906 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4907 }) {
4908 Ok(ix) | Err(ix) => ix,
4909 };
4910 let end_ix = match set.selections.binary_search_by(|probe| {
4911 probe.start.cmp(&range.end, self).then(Ordering::Less)
4912 }) {
4913 Ok(ix) | Err(ix) => ix,
4914 };
4915
4916 (
4917 *replica_id,
4918 set.line_mode,
4919 set.cursor_shape,
4920 set.selections[start_ix..end_ix].iter(),
4921 )
4922 })
4923 }
4924
4925 /// Returns if the buffer contains any diagnostics.
4926 pub fn has_diagnostics(&self) -> bool {
4927 !self.diagnostics.is_empty()
4928 }
4929
4930 /// Returns all the diagnostics intersecting the given range.
4931 pub fn diagnostics_in_range<'a, T, O>(
4932 &'a self,
4933 search_range: Range<T>,
4934 reversed: bool,
4935 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4936 where
4937 T: 'a + Clone + ToOffset,
4938 O: 'a + FromAnchor,
4939 {
4940 let mut iterators: Vec<_> = self
4941 .diagnostics
4942 .iter()
4943 .map(|(_, collection)| {
4944 collection
4945 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4946 .peekable()
4947 })
4948 .collect();
4949
4950 std::iter::from_fn(move || {
4951 let (next_ix, _) = iterators
4952 .iter_mut()
4953 .enumerate()
4954 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4955 .min_by(|(_, a), (_, b)| {
4956 let cmp = a
4957 .range
4958 .start
4959 .cmp(&b.range.start, self)
4960 // when range is equal, sort by diagnostic severity
4961 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4962 // and stabilize order with group_id
4963 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4964 if reversed { cmp.reverse() } else { cmp }
4965 })?;
4966 iterators[next_ix]
4967 .next()
4968 .map(
4969 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4970 diagnostic,
4971 range: FromAnchor::from_anchor(&range.start, self)
4972 ..FromAnchor::from_anchor(&range.end, self),
4973 },
4974 )
4975 })
4976 }
4977
4978 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4979 /// should be used instead.
4980 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4981 &self.diagnostics
4982 }
4983
4984 /// Returns all the diagnostic groups associated with the given
4985 /// language server ID. If no language server ID is provided,
4986 /// all diagnostics groups are returned.
4987 pub fn diagnostic_groups(
4988 &self,
4989 language_server_id: Option<LanguageServerId>,
4990 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4991 let mut groups = Vec::new();
4992
4993 if let Some(language_server_id) = language_server_id {
4994 if let Ok(ix) = self
4995 .diagnostics
4996 .binary_search_by_key(&language_server_id, |e| e.0)
4997 {
4998 self.diagnostics[ix]
4999 .1
5000 .groups(language_server_id, &mut groups, self);
5001 }
5002 } else {
5003 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5004 diagnostics.groups(*language_server_id, &mut groups, self);
5005 }
5006 }
5007
5008 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5009 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5010 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5011 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5012 });
5013
5014 groups
5015 }
5016
5017 /// Returns an iterator over the diagnostics for the given group.
5018 pub fn diagnostic_group<O>(
5019 &self,
5020 group_id: usize,
5021 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5022 where
5023 O: FromAnchor + 'static,
5024 {
5025 self.diagnostics
5026 .iter()
5027 .flat_map(move |(_, set)| set.group(group_id, self))
5028 }
5029
5030 /// An integer version number that accounts for all updates besides
5031 /// the buffer's text itself (which is versioned via a version vector).
5032 pub fn non_text_state_update_count(&self) -> usize {
5033 self.non_text_state_update_count
5034 }
5035
5036 /// An integer version that changes when the buffer's syntax changes.
5037 pub fn syntax_update_count(&self) -> usize {
5038 self.syntax.update_count()
5039 }
5040
5041 /// Returns a snapshot of underlying file.
5042 pub fn file(&self) -> Option<&Arc<dyn File>> {
5043 self.file.as_ref()
5044 }
5045
5046 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5047 if let Some(file) = self.file() {
5048 if file.path().file_name().is_none() || include_root {
5049 Some(file.full_path(cx).to_string_lossy().into_owned())
5050 } else {
5051 Some(file.path().display(file.path_style(cx)).to_string())
5052 }
5053 } else {
5054 None
5055 }
5056 }
5057
5058 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5059 let query_str = query.fuzzy_contents;
5060 if query_str.is_some_and(|query| query.is_empty()) {
5061 return BTreeMap::default();
5062 }
5063
5064 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5065 language,
5066 override_id: None,
5067 }));
5068
5069 let mut query_ix = 0;
5070 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5071 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5072
5073 let mut words = BTreeMap::default();
5074 let mut current_word_start_ix = None;
5075 let mut chunk_ix = query.range.start;
5076 for chunk in self.chunks(query.range, false) {
5077 for (i, c) in chunk.text.char_indices() {
5078 let ix = chunk_ix + i;
5079 if classifier.is_word(c) {
5080 if current_word_start_ix.is_none() {
5081 current_word_start_ix = Some(ix);
5082 }
5083
5084 if let Some(query_chars) = &query_chars
5085 && query_ix < query_len
5086 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5087 {
5088 query_ix += 1;
5089 }
5090 continue;
5091 } else if let Some(word_start) = current_word_start_ix.take()
5092 && query_ix == query_len
5093 {
5094 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5095 let mut word_text = self.text_for_range(word_start..ix).peekable();
5096 let first_char = word_text
5097 .peek()
5098 .and_then(|first_chunk| first_chunk.chars().next());
5099 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5100 if !query.skip_digits
5101 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5102 {
5103 words.insert(word_text.collect(), word_range);
5104 }
5105 }
5106 query_ix = 0;
5107 }
5108 chunk_ix += chunk.text.len();
5109 }
5110
5111 words
5112 }
5113}
5114
5115pub struct WordsQuery<'a> {
5116 /// Only returns words with all chars from the fuzzy string in them.
5117 pub fuzzy_contents: Option<&'a str>,
5118 /// Skips words that start with a digit.
5119 pub skip_digits: bool,
5120 /// Buffer offset range, to look for words.
5121 pub range: Range<usize>,
5122}
5123
5124fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5125 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5126}
5127
5128fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5129 let mut result = IndentSize::spaces(0);
5130 for c in text {
5131 let kind = match c {
5132 ' ' => IndentKind::Space,
5133 '\t' => IndentKind::Tab,
5134 _ => break,
5135 };
5136 if result.len == 0 {
5137 result.kind = kind;
5138 }
5139 result.len += 1;
5140 }
5141 result
5142}
5143
5144impl Clone for BufferSnapshot {
5145 fn clone(&self) -> Self {
5146 Self {
5147 text: self.text.clone(),
5148 syntax: self.syntax.clone(),
5149 file: self.file.clone(),
5150 remote_selections: self.remote_selections.clone(),
5151 diagnostics: self.diagnostics.clone(),
5152 language: self.language.clone(),
5153 tree_sitter_data: self.tree_sitter_data.clone(),
5154 non_text_state_update_count: self.non_text_state_update_count,
5155 capability: self.capability,
5156 }
5157 }
5158}
5159
5160impl Deref for BufferSnapshot {
5161 type Target = text::BufferSnapshot;
5162
5163 fn deref(&self) -> &Self::Target {
5164 &self.text
5165 }
5166}
5167
5168unsafe impl Send for BufferChunks<'_> {}
5169
5170impl<'a> BufferChunks<'a> {
5171 pub(crate) fn new(
5172 text: &'a Rope,
5173 range: Range<usize>,
5174 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5175 diagnostics: bool,
5176 buffer_snapshot: Option<&'a BufferSnapshot>,
5177 ) -> Self {
5178 let mut highlights = None;
5179 if let Some((captures, highlight_maps)) = syntax {
5180 highlights = Some(BufferChunkHighlights {
5181 captures,
5182 next_capture: None,
5183 stack: Default::default(),
5184 highlight_maps,
5185 })
5186 }
5187
5188 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5189 let chunks = text.chunks_in_range(range.clone());
5190
5191 let mut this = BufferChunks {
5192 range,
5193 buffer_snapshot,
5194 chunks,
5195 diagnostic_endpoints,
5196 error_depth: 0,
5197 warning_depth: 0,
5198 information_depth: 0,
5199 hint_depth: 0,
5200 unnecessary_depth: 0,
5201 underline: true,
5202 highlights,
5203 };
5204 this.initialize_diagnostic_endpoints();
5205 this
5206 }
5207
5208 /// Seeks to the given byte offset in the buffer.
5209 pub fn seek(&mut self, range: Range<usize>) {
5210 let old_range = std::mem::replace(&mut self.range, range.clone());
5211 self.chunks.set_range(self.range.clone());
5212 if let Some(highlights) = self.highlights.as_mut() {
5213 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5214 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5215 highlights
5216 .stack
5217 .retain(|(end_offset, _)| *end_offset > range.start);
5218 if let Some(capture) = &highlights.next_capture
5219 && range.start >= capture.node.start_byte()
5220 {
5221 let next_capture_end = capture.node.end_byte();
5222 if range.start < next_capture_end {
5223 highlights.stack.push((
5224 next_capture_end,
5225 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5226 ));
5227 }
5228 highlights.next_capture.take();
5229 }
5230 } else if let Some(snapshot) = self.buffer_snapshot {
5231 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5232 *highlights = BufferChunkHighlights {
5233 captures,
5234 next_capture: None,
5235 stack: Default::default(),
5236 highlight_maps,
5237 };
5238 } else {
5239 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5240 // Seeking such BufferChunks is not supported.
5241 debug_assert!(
5242 false,
5243 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5244 );
5245 }
5246
5247 highlights.captures.set_byte_range(self.range.clone());
5248 self.initialize_diagnostic_endpoints();
5249 }
5250 }
5251
5252 fn initialize_diagnostic_endpoints(&mut self) {
5253 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5254 && let Some(buffer) = self.buffer_snapshot
5255 {
5256 let mut diagnostic_endpoints = Vec::new();
5257 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5258 diagnostic_endpoints.push(DiagnosticEndpoint {
5259 offset: entry.range.start,
5260 is_start: true,
5261 severity: entry.diagnostic.severity,
5262 is_unnecessary: entry.diagnostic.is_unnecessary,
5263 underline: entry.diagnostic.underline,
5264 });
5265 diagnostic_endpoints.push(DiagnosticEndpoint {
5266 offset: entry.range.end,
5267 is_start: false,
5268 severity: entry.diagnostic.severity,
5269 is_unnecessary: entry.diagnostic.is_unnecessary,
5270 underline: entry.diagnostic.underline,
5271 });
5272 }
5273 diagnostic_endpoints
5274 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5275 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5276 self.hint_depth = 0;
5277 self.error_depth = 0;
5278 self.warning_depth = 0;
5279 self.information_depth = 0;
5280 }
5281 }
5282
5283 /// The current byte offset in the buffer.
5284 pub fn offset(&self) -> usize {
5285 self.range.start
5286 }
5287
5288 pub fn range(&self) -> Range<usize> {
5289 self.range.clone()
5290 }
5291
5292 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5293 let depth = match endpoint.severity {
5294 DiagnosticSeverity::ERROR => &mut self.error_depth,
5295 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5296 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5297 DiagnosticSeverity::HINT => &mut self.hint_depth,
5298 _ => return,
5299 };
5300 if endpoint.is_start {
5301 *depth += 1;
5302 } else {
5303 *depth -= 1;
5304 }
5305
5306 if endpoint.is_unnecessary {
5307 if endpoint.is_start {
5308 self.unnecessary_depth += 1;
5309 } else {
5310 self.unnecessary_depth -= 1;
5311 }
5312 }
5313 }
5314
5315 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5316 if self.error_depth > 0 {
5317 Some(DiagnosticSeverity::ERROR)
5318 } else if self.warning_depth > 0 {
5319 Some(DiagnosticSeverity::WARNING)
5320 } else if self.information_depth > 0 {
5321 Some(DiagnosticSeverity::INFORMATION)
5322 } else if self.hint_depth > 0 {
5323 Some(DiagnosticSeverity::HINT)
5324 } else {
5325 None
5326 }
5327 }
5328
5329 fn current_code_is_unnecessary(&self) -> bool {
5330 self.unnecessary_depth > 0
5331 }
5332}
5333
5334impl<'a> Iterator for BufferChunks<'a> {
5335 type Item = Chunk<'a>;
5336
5337 fn next(&mut self) -> Option<Self::Item> {
5338 let mut next_capture_start = usize::MAX;
5339 let mut next_diagnostic_endpoint = usize::MAX;
5340
5341 if let Some(highlights) = self.highlights.as_mut() {
5342 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5343 if *parent_capture_end <= self.range.start {
5344 highlights.stack.pop();
5345 } else {
5346 break;
5347 }
5348 }
5349
5350 if highlights.next_capture.is_none() {
5351 highlights.next_capture = highlights.captures.next();
5352 }
5353
5354 while let Some(capture) = highlights.next_capture.as_ref() {
5355 if self.range.start < capture.node.start_byte() {
5356 next_capture_start = capture.node.start_byte();
5357 break;
5358 } else {
5359 let highlight_id =
5360 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5361 highlights
5362 .stack
5363 .push((capture.node.end_byte(), highlight_id));
5364 highlights.next_capture = highlights.captures.next();
5365 }
5366 }
5367 }
5368
5369 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5370 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5371 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5372 if endpoint.offset <= self.range.start {
5373 self.update_diagnostic_depths(endpoint);
5374 diagnostic_endpoints.next();
5375 self.underline = endpoint.underline;
5376 } else {
5377 next_diagnostic_endpoint = endpoint.offset;
5378 break;
5379 }
5380 }
5381 }
5382 self.diagnostic_endpoints = diagnostic_endpoints;
5383
5384 if let Some(ChunkBitmaps {
5385 text: chunk,
5386 chars: chars_map,
5387 tabs,
5388 }) = self.chunks.peek_with_bitmaps()
5389 {
5390 let chunk_start = self.range.start;
5391 let mut chunk_end = (self.chunks.offset() + chunk.len())
5392 .min(next_capture_start)
5393 .min(next_diagnostic_endpoint);
5394 let mut highlight_id = None;
5395 if let Some(highlights) = self.highlights.as_ref()
5396 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5397 {
5398 chunk_end = chunk_end.min(*parent_capture_end);
5399 highlight_id = Some(*parent_highlight_id);
5400 }
5401 let bit_start = chunk_start - self.chunks.offset();
5402 let bit_end = chunk_end - self.chunks.offset();
5403
5404 let slice = &chunk[bit_start..bit_end];
5405
5406 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5407 let tabs = (tabs >> bit_start) & mask;
5408 let chars = (chars_map >> bit_start) & mask;
5409
5410 self.range.start = chunk_end;
5411 if self.range.start == self.chunks.offset() + chunk.len() {
5412 self.chunks.next().unwrap();
5413 }
5414
5415 Some(Chunk {
5416 text: slice,
5417 syntax_highlight_id: highlight_id,
5418 underline: self.underline,
5419 diagnostic_severity: self.current_diagnostic_severity(),
5420 is_unnecessary: self.current_code_is_unnecessary(),
5421 tabs,
5422 chars,
5423 ..Chunk::default()
5424 })
5425 } else {
5426 None
5427 }
5428 }
5429}
5430
5431impl operation_queue::Operation for Operation {
5432 fn lamport_timestamp(&self) -> clock::Lamport {
5433 match self {
5434 Operation::Buffer(_) => {
5435 unreachable!("buffer operations should never be deferred at this layer")
5436 }
5437 Operation::UpdateDiagnostics {
5438 lamport_timestamp, ..
5439 }
5440 | Operation::UpdateSelections {
5441 lamport_timestamp, ..
5442 }
5443 | Operation::UpdateCompletionTriggers {
5444 lamport_timestamp, ..
5445 }
5446 | Operation::UpdateLineEnding {
5447 lamport_timestamp, ..
5448 } => *lamport_timestamp,
5449 }
5450 }
5451}
5452
5453impl Default for Diagnostic {
5454 fn default() -> Self {
5455 Self {
5456 source: Default::default(),
5457 source_kind: DiagnosticSourceKind::Other,
5458 code: None,
5459 code_description: None,
5460 severity: DiagnosticSeverity::ERROR,
5461 message: Default::default(),
5462 markdown: None,
5463 group_id: 0,
5464 is_primary: false,
5465 is_disk_based: false,
5466 is_unnecessary: false,
5467 underline: true,
5468 data: None,
5469 registration_id: None,
5470 }
5471 }
5472}
5473
5474impl IndentSize {
5475 /// Returns an [`IndentSize`] representing the given spaces.
5476 pub fn spaces(len: u32) -> Self {
5477 Self {
5478 len,
5479 kind: IndentKind::Space,
5480 }
5481 }
5482
5483 /// Returns an [`IndentSize`] representing a tab.
5484 pub fn tab() -> Self {
5485 Self {
5486 len: 1,
5487 kind: IndentKind::Tab,
5488 }
5489 }
5490
5491 /// An iterator over the characters represented by this [`IndentSize`].
5492 pub fn chars(&self) -> impl Iterator<Item = char> {
5493 iter::repeat(self.char()).take(self.len as usize)
5494 }
5495
5496 /// The character representation of this [`IndentSize`].
5497 pub fn char(&self) -> char {
5498 match self.kind {
5499 IndentKind::Space => ' ',
5500 IndentKind::Tab => '\t',
5501 }
5502 }
5503
5504 /// Consumes the current [`IndentSize`] and returns a new one that has
5505 /// been shrunk or enlarged by the given size along the given direction.
5506 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5507 match direction {
5508 Ordering::Less => {
5509 if self.kind == size.kind && self.len >= size.len {
5510 self.len -= size.len;
5511 }
5512 }
5513 Ordering::Equal => {}
5514 Ordering::Greater => {
5515 if self.len == 0 {
5516 self = size;
5517 } else if self.kind == size.kind {
5518 self.len += size.len;
5519 }
5520 }
5521 }
5522 self
5523 }
5524
5525 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5526 match self.kind {
5527 IndentKind::Space => self.len as usize,
5528 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5529 }
5530 }
5531}
5532
5533#[cfg(any(test, feature = "test-support"))]
5534pub struct TestFile {
5535 pub path: Arc<RelPath>,
5536 pub root_name: String,
5537 pub local_root: Option<PathBuf>,
5538}
5539
5540#[cfg(any(test, feature = "test-support"))]
5541impl File for TestFile {
5542 fn path(&self) -> &Arc<RelPath> {
5543 &self.path
5544 }
5545
5546 fn full_path(&self, _: &gpui::App) -> PathBuf {
5547 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5548 }
5549
5550 fn as_local(&self) -> Option<&dyn LocalFile> {
5551 if self.local_root.is_some() {
5552 Some(self)
5553 } else {
5554 None
5555 }
5556 }
5557
5558 fn disk_state(&self) -> DiskState {
5559 unimplemented!()
5560 }
5561
5562 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5563 self.path().file_name().unwrap_or(self.root_name.as_ref())
5564 }
5565
5566 fn worktree_id(&self, _: &App) -> WorktreeId {
5567 WorktreeId::from_usize(0)
5568 }
5569
5570 fn to_proto(&self, _: &App) -> rpc::proto::File {
5571 unimplemented!()
5572 }
5573
5574 fn is_private(&self) -> bool {
5575 false
5576 }
5577
5578 fn path_style(&self, _cx: &App) -> PathStyle {
5579 PathStyle::local()
5580 }
5581}
5582
5583#[cfg(any(test, feature = "test-support"))]
5584impl LocalFile for TestFile {
5585 fn abs_path(&self, _cx: &App) -> PathBuf {
5586 PathBuf::from(self.local_root.as_ref().unwrap())
5587 .join(&self.root_name)
5588 .join(self.path.as_std_path())
5589 }
5590
5591 fn load(&self, _cx: &App) -> Task<Result<String>> {
5592 unimplemented!()
5593 }
5594
5595 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5596 unimplemented!()
5597 }
5598}
5599
5600pub(crate) fn contiguous_ranges(
5601 values: impl Iterator<Item = u32>,
5602 max_len: usize,
5603) -> impl Iterator<Item = Range<u32>> {
5604 let mut values = values;
5605 let mut current_range: Option<Range<u32>> = None;
5606 std::iter::from_fn(move || {
5607 loop {
5608 if let Some(value) = values.next() {
5609 if let Some(range) = &mut current_range
5610 && value == range.end
5611 && range.len() < max_len
5612 {
5613 range.end += 1;
5614 continue;
5615 }
5616
5617 let prev_range = current_range.clone();
5618 current_range = Some(value..(value + 1));
5619 if prev_range.is_some() {
5620 return prev_range;
5621 }
5622 } else {
5623 return current_range.take();
5624 }
5625 }
5626 })
5627}
5628
5629#[derive(Default, Debug)]
5630pub struct CharClassifier {
5631 scope: Option<LanguageScope>,
5632 scope_context: Option<CharScopeContext>,
5633 ignore_punctuation: bool,
5634}
5635
5636impl CharClassifier {
5637 pub fn new(scope: Option<LanguageScope>) -> Self {
5638 Self {
5639 scope,
5640 scope_context: None,
5641 ignore_punctuation: false,
5642 }
5643 }
5644
5645 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5646 Self {
5647 scope_context,
5648 ..self
5649 }
5650 }
5651
5652 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5653 Self {
5654 ignore_punctuation,
5655 ..self
5656 }
5657 }
5658
5659 pub fn is_whitespace(&self, c: char) -> bool {
5660 self.kind(c) == CharKind::Whitespace
5661 }
5662
5663 pub fn is_word(&self, c: char) -> bool {
5664 self.kind(c) == CharKind::Word
5665 }
5666
5667 pub fn is_punctuation(&self, c: char) -> bool {
5668 self.kind(c) == CharKind::Punctuation
5669 }
5670
5671 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5672 if c.is_alphanumeric() || c == '_' {
5673 return CharKind::Word;
5674 }
5675
5676 if let Some(scope) = &self.scope {
5677 let characters = match self.scope_context {
5678 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5679 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5680 None => scope.word_characters(),
5681 };
5682 if let Some(characters) = characters
5683 && characters.contains(&c)
5684 {
5685 return CharKind::Word;
5686 }
5687 }
5688
5689 if c.is_whitespace() {
5690 return CharKind::Whitespace;
5691 }
5692
5693 if ignore_punctuation {
5694 CharKind::Word
5695 } else {
5696 CharKind::Punctuation
5697 }
5698 }
5699
5700 pub fn kind(&self, c: char) -> CharKind {
5701 self.kind_with(c, self.ignore_punctuation)
5702 }
5703}
5704
5705/// Find all of the ranges of whitespace that occur at the ends of lines
5706/// in the given rope.
5707///
5708/// This could also be done with a regex search, but this implementation
5709/// avoids copying text.
5710pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5711 let mut ranges = Vec::new();
5712
5713 let mut offset = 0;
5714 let mut prev_chunk_trailing_whitespace_range = 0..0;
5715 for chunk in rope.chunks() {
5716 let mut prev_line_trailing_whitespace_range = 0..0;
5717 for (i, line) in chunk.split('\n').enumerate() {
5718 let line_end_offset = offset + line.len();
5719 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5720 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5721
5722 if i == 0 && trimmed_line_len == 0 {
5723 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5724 }
5725 if !prev_line_trailing_whitespace_range.is_empty() {
5726 ranges.push(prev_line_trailing_whitespace_range);
5727 }
5728
5729 offset = line_end_offset + 1;
5730 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5731 }
5732
5733 offset -= 1;
5734 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5735 }
5736
5737 if !prev_chunk_trailing_whitespace_range.is_empty() {
5738 ranges.push(prev_chunk_trailing_whitespace_range);
5739 }
5740
5741 ranges
5742}