1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16};
17pub use crate::{
18 Grammar, Language, LanguageRegistry,
19 diagnostic_set::DiagnosticSet,
20 highlight_map::{HighlightId, HighlightMap},
21 proto,
22};
23use anyhow::{Context as _, Result};
24pub use clock::ReplicaId;
25use clock::{Global, Lamport};
26use collections::{HashMap, HashSet};
27use fs::MTime;
28use futures::channel::oneshot;
29use gpui::{
30 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
31 Task, TaskLabel, TextStyle,
32};
33
34use itertools::Itertools;
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Not, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
134}
135
136#[derive(Debug, Clone)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self) {
146 self.brackets_by_chunks = vec![None; self.chunks.len()];
147 }
148
149 fn new(snapshot: text::BufferSnapshot) -> Self {
150 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
151 Self {
152 brackets_by_chunks: vec![None; chunks.len()],
153 chunks,
154 }
155 }
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq)]
159pub enum ParseStatus {
160 Idle,
161 Parsing,
162}
163
164struct BufferBranchState {
165 base_buffer: Entity<Buffer>,
166 merged_operations: Vec<Lamport>,
167}
168
169/// An immutable, cheaply cloneable representation of a fixed
170/// state of a buffer.
171pub struct BufferSnapshot {
172 pub text: text::BufferSnapshot,
173 pub syntax: SyntaxSnapshot,
174 file: Option<Arc<dyn File>>,
175 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
176 remote_selections: TreeMap<ReplicaId, SelectionSet>,
177 language: Option<Arc<Language>>,
178 non_text_state_update_count: usize,
179 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
180}
181
182/// The kind and amount of indentation in a particular line. For now,
183/// assumes that indentation is all the same character.
184#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
185pub struct IndentSize {
186 /// The number of bytes that comprise the indentation.
187 pub len: u32,
188 /// The kind of whitespace used for indentation.
189 pub kind: IndentKind,
190}
191
192/// A whitespace character that's used for indentation.
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
194pub enum IndentKind {
195 /// An ASCII space character.
196 #[default]
197 Space,
198 /// An ASCII tab character.
199 Tab,
200}
201
202/// The shape of a selection cursor.
203#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
204pub enum CursorShape {
205 /// A vertical bar
206 #[default]
207 Bar,
208 /// A block that surrounds the following character
209 Block,
210 /// An underline that runs along the following character
211 Underline,
212 /// A box drawn around the following character
213 Hollow,
214}
215
216impl From<settings::CursorShape> for CursorShape {
217 fn from(shape: settings::CursorShape) -> Self {
218 match shape {
219 settings::CursorShape::Bar => CursorShape::Bar,
220 settings::CursorShape::Block => CursorShape::Block,
221 settings::CursorShape::Underline => CursorShape::Underline,
222 settings::CursorShape::Hollow => CursorShape::Hollow,
223 }
224 }
225}
226
227#[derive(Clone, Debug)]
228struct SelectionSet {
229 line_mode: bool,
230 cursor_shape: CursorShape,
231 selections: Arc<[Selection<Anchor>]>,
232 lamport_timestamp: clock::Lamport,
233}
234
235/// A diagnostic associated with a certain range of a buffer.
236#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
237pub struct Diagnostic {
238 /// The name of the service that produced this diagnostic.
239 pub source: Option<String>,
240 /// A machine-readable code that identifies this diagnostic.
241 pub code: Option<NumberOrString>,
242 pub code_description: Option<lsp::Uri>,
243 /// Whether this diagnostic is a hint, warning, or error.
244 pub severity: DiagnosticSeverity,
245 /// The human-readable message associated with this diagnostic.
246 pub message: String,
247 /// The human-readable message (in markdown format)
248 pub markdown: Option<String>,
249 /// An id that identifies the group to which this diagnostic belongs.
250 ///
251 /// When a language server produces a diagnostic with
252 /// one or more associated diagnostics, those diagnostics are all
253 /// assigned a single group ID.
254 pub group_id: usize,
255 /// Whether this diagnostic is the primary diagnostic for its group.
256 ///
257 /// In a given group, the primary diagnostic is the top-level diagnostic
258 /// returned by the language server. The non-primary diagnostics are the
259 /// associated diagnostics.
260 pub is_primary: bool,
261 /// Whether this diagnostic is considered to originate from an analysis of
262 /// files on disk, as opposed to any unsaved buffer contents. This is a
263 /// property of a given diagnostic source, and is configured for a given
264 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
265 /// for the language server.
266 pub is_disk_based: bool,
267 /// Whether this diagnostic marks unnecessary code.
268 pub is_unnecessary: bool,
269 /// Quick separation of diagnostics groups based by their source.
270 pub source_kind: DiagnosticSourceKind,
271 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
272 pub data: Option<Value>,
273 /// Whether to underline the corresponding text range in the editor.
274 pub underline: bool,
275}
276
277#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
278pub enum DiagnosticSourceKind {
279 Pulled,
280 Pushed,
281 Other,
282}
283
284/// An operation used to synchronize this buffer with its other replicas.
285#[derive(Clone, Debug, PartialEq)]
286pub enum Operation {
287 /// A text operation.
288 Buffer(text::Operation),
289
290 /// An update to the buffer's diagnostics.
291 UpdateDiagnostics {
292 /// The id of the language server that produced the new diagnostics.
293 server_id: LanguageServerId,
294 /// The diagnostics.
295 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
296 /// The buffer's lamport timestamp.
297 lamport_timestamp: clock::Lamport,
298 },
299
300 /// An update to the most recent selections in this buffer.
301 UpdateSelections {
302 /// The selections.
303 selections: Arc<[Selection<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 /// Whether the selections are in 'line mode'.
307 line_mode: bool,
308 /// The [`CursorShape`] associated with these selections.
309 cursor_shape: CursorShape,
310 },
311
312 /// An update to the characters that should trigger autocompletion
313 /// for this buffer.
314 UpdateCompletionTriggers {
315 /// The characters that trigger autocompletion.
316 triggers: Vec<String>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// The language server ID.
320 server_id: LanguageServerId,
321 },
322
323 /// An update to the line ending type of this buffer.
324 UpdateLineEnding {
325 /// The line ending type.
326 line_ending: LineEnding,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 },
330}
331
332/// An event that occurs in a buffer.
333#[derive(Clone, Debug, PartialEq)]
334pub enum BufferEvent {
335 /// The buffer was changed in a way that must be
336 /// propagated to its other replicas.
337 Operation {
338 operation: Operation,
339 is_local: bool,
340 },
341 /// The buffer was edited.
342 Edited,
343 /// The buffer's `dirty` bit changed.
344 DirtyChanged,
345 /// The buffer was saved.
346 Saved,
347 /// The buffer's file was changed on disk.
348 FileHandleChanged,
349 /// The buffer was reloaded.
350 Reloaded,
351 /// The buffer is in need of a reload
352 ReloadNeeded,
353 /// The buffer's language was changed.
354 LanguageChanged,
355 /// The buffer's syntax trees were updated.
356 Reparsed,
357 /// The buffer's diagnostics were updated.
358 DiagnosticsUpdated,
359 /// The buffer gained or lost editing capabilities.
360 CapabilityChanged,
361}
362
363/// The file associated with a buffer.
364pub trait File: Send + Sync + Any {
365 /// Returns the [`LocalFile`] associated with this file, if the
366 /// file is local.
367 fn as_local(&self) -> Option<&dyn LocalFile>;
368
369 /// Returns whether this file is local.
370 fn is_local(&self) -> bool {
371 self.as_local().is_some()
372 }
373
374 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
375 /// only available in some states, such as modification time.
376 fn disk_state(&self) -> DiskState;
377
378 /// Returns the path of this file relative to the worktree's root directory.
379 fn path(&self) -> &Arc<RelPath>;
380
381 /// Returns the path of this file relative to the worktree's parent directory (this means it
382 /// includes the name of the worktree's root folder).
383 fn full_path(&self, cx: &App) -> PathBuf;
384
385 /// Returns the path style of this file.
386 fn path_style(&self, cx: &App) -> PathStyle;
387
388 /// Returns the last component of this handle's absolute path. If this handle refers to the root
389 /// of its worktree, then this method will return the name of the worktree itself.
390 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
391
392 /// Returns the id of the worktree to which this file belongs.
393 ///
394 /// This is needed for looking up project-specific settings.
395 fn worktree_id(&self, cx: &App) -> WorktreeId;
396
397 /// Converts this file into a protobuf message.
398 fn to_proto(&self, cx: &App) -> rpc::proto::File;
399
400 /// Return whether Zed considers this to be a private file.
401 fn is_private(&self) -> bool;
402}
403
404/// The file's storage status - whether it's stored (`Present`), and if so when it was last
405/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
406/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
407/// indicator for new files.
408#[derive(Copy, Clone, Debug, PartialEq)]
409pub enum DiskState {
410 /// File created in Zed that has not been saved.
411 New,
412 /// File present on the filesystem.
413 Present { mtime: MTime },
414 /// Deleted file that was previously present.
415 Deleted,
416}
417
418impl DiskState {
419 /// Returns the file's last known modification time on disk.
420 pub fn mtime(self) -> Option<MTime> {
421 match self {
422 DiskState::New => None,
423 DiskState::Present { mtime } => Some(mtime),
424 DiskState::Deleted => None,
425 }
426 }
427
428 pub fn exists(&self) -> bool {
429 match self {
430 DiskState::New => false,
431 DiskState::Present { .. } => true,
432 DiskState::Deleted => false,
433 }
434 }
435}
436
437/// The file associated with a buffer, in the case where the file is on the local disk.
438pub trait LocalFile: File {
439 /// Returns the absolute path of this file
440 fn abs_path(&self, cx: &App) -> PathBuf;
441
442 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
443 fn load(&self, cx: &App) -> Task<Result<String>>;
444
445 /// Loads the file's contents from disk.
446 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
447}
448
449/// The auto-indent behavior associated with an editing operation.
450/// For some editing operations, each affected line of text has its
451/// indentation recomputed. For other operations, the entire block
452/// of edited text is adjusted uniformly.
453#[derive(Clone, Debug)]
454pub enum AutoindentMode {
455 /// Indent each line of inserted text.
456 EachLine,
457 /// Apply the same indentation adjustment to all of the lines
458 /// in a given insertion.
459 Block {
460 /// The original indentation column of the first line of each
461 /// insertion, if it has been copied.
462 ///
463 /// Knowing this makes it possible to preserve the relative indentation
464 /// of every line in the insertion from when it was copied.
465 ///
466 /// If the original indent column is `a`, and the first line of insertion
467 /// is then auto-indented to column `b`, then every other line of
468 /// the insertion will be auto-indented to column `b - a`
469 original_indent_columns: Vec<Option<u32>>,
470 },
471}
472
473#[derive(Clone)]
474struct AutoindentRequest {
475 before_edit: BufferSnapshot,
476 entries: Vec<AutoindentRequestEntry>,
477 is_block_mode: bool,
478 ignore_empty_lines: bool,
479}
480
481#[derive(Debug, Clone)]
482struct AutoindentRequestEntry {
483 /// A range of the buffer whose indentation should be adjusted.
484 range: Range<Anchor>,
485 /// Whether or not these lines should be considered brand new, for the
486 /// purpose of auto-indent. When text is not new, its indentation will
487 /// only be adjusted if the suggested indentation level has *changed*
488 /// since the edit was made.
489 first_line_is_new: bool,
490 indent_size: IndentSize,
491 original_indent_column: Option<u32>,
492}
493
494#[derive(Debug)]
495struct IndentSuggestion {
496 basis_row: u32,
497 delta: Ordering,
498 within_error: bool,
499}
500
501struct BufferChunkHighlights<'a> {
502 captures: SyntaxMapCaptures<'a>,
503 next_capture: Option<SyntaxMapCapture<'a>>,
504 stack: Vec<(usize, HighlightId)>,
505 highlight_maps: Vec<HighlightMap>,
506}
507
508/// An iterator that yields chunks of a buffer's text, along with their
509/// syntax highlights and diagnostic status.
510pub struct BufferChunks<'a> {
511 buffer_snapshot: Option<&'a BufferSnapshot>,
512 range: Range<usize>,
513 chunks: text::Chunks<'a>,
514 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
515 error_depth: usize,
516 warning_depth: usize,
517 information_depth: usize,
518 hint_depth: usize,
519 unnecessary_depth: usize,
520 underline: bool,
521 highlights: Option<BufferChunkHighlights<'a>>,
522}
523
524/// A chunk of a buffer's text, along with its syntax highlight and
525/// diagnostic status.
526#[derive(Clone, Debug, Default)]
527pub struct Chunk<'a> {
528 /// The text of the chunk.
529 pub text: &'a str,
530 /// The syntax highlighting style of the chunk.
531 pub syntax_highlight_id: Option<HighlightId>,
532 /// The highlight style that has been applied to this chunk in
533 /// the editor.
534 pub highlight_style: Option<HighlightStyle>,
535 /// The severity of diagnostic associated with this chunk, if any.
536 pub diagnostic_severity: Option<DiagnosticSeverity>,
537 /// A bitset of which characters are tabs in this string.
538 pub tabs: u128,
539 /// Bitmap of character indices in this chunk
540 pub chars: u128,
541 /// Whether this chunk of text is marked as unnecessary.
542 pub is_unnecessary: bool,
543 /// Whether this chunk of text was originally a tab character.
544 pub is_tab: bool,
545 /// Whether this chunk of text was originally an inlay.
546 pub is_inlay: bool,
547 /// Whether to underline the corresponding text range in the editor.
548 pub underline: bool,
549}
550
551/// A set of edits to a given version of a buffer, computed asynchronously.
552#[derive(Debug)]
553pub struct Diff {
554 pub base_version: clock::Global,
555 pub line_ending: LineEnding,
556 pub edits: Vec<(Range<usize>, Arc<str>)>,
557}
558
559#[derive(Debug, Clone, Copy)]
560pub(crate) struct DiagnosticEndpoint {
561 offset: usize,
562 is_start: bool,
563 underline: bool,
564 severity: DiagnosticSeverity,
565 is_unnecessary: bool,
566}
567
568/// A class of characters, used for characterizing a run of text.
569#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
570pub enum CharKind {
571 /// Whitespace.
572 Whitespace,
573 /// Punctuation.
574 Punctuation,
575 /// Word.
576 Word,
577}
578
579/// Context for character classification within a specific scope.
580#[derive(Copy, Clone, Eq, PartialEq, Debug)]
581pub enum CharScopeContext {
582 /// Character classification for completion queries.
583 ///
584 /// This context treats certain characters as word constituents that would
585 /// normally be considered punctuation, such as '-' in Tailwind classes
586 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
587 Completion,
588 /// Character classification for linked edits.
589 ///
590 /// This context handles characters that should be treated as part of
591 /// identifiers during linked editing operations, such as '.' in JSX
592 /// component names like `<Animated.View>`.
593 LinkedEdit,
594}
595
596/// A runnable is a set of data about a region that could be resolved into a task
597pub struct Runnable {
598 pub tags: SmallVec<[RunnableTag; 1]>,
599 pub language: Arc<Language>,
600 pub buffer: BufferId,
601}
602
603#[derive(Default, Clone, Debug)]
604pub struct HighlightedText {
605 pub text: SharedString,
606 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
607}
608
609#[derive(Default, Debug)]
610struct HighlightedTextBuilder {
611 pub text: String,
612 highlights: Vec<(Range<usize>, HighlightStyle)>,
613}
614
615impl HighlightedText {
616 pub fn from_buffer_range<T: ToOffset>(
617 range: Range<T>,
618 snapshot: &text::BufferSnapshot,
619 syntax_snapshot: &SyntaxSnapshot,
620 override_style: Option<HighlightStyle>,
621 syntax_theme: &SyntaxTheme,
622 ) -> Self {
623 let mut highlighted_text = HighlightedTextBuilder::default();
624 highlighted_text.add_text_from_buffer_range(
625 range,
626 snapshot,
627 syntax_snapshot,
628 override_style,
629 syntax_theme,
630 );
631 highlighted_text.build()
632 }
633
634 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
635 gpui::StyledText::new(self.text.clone())
636 .with_default_highlights(default_style, self.highlights.iter().cloned())
637 }
638
639 /// Returns the first line without leading whitespace unless highlighted
640 /// and a boolean indicating if there are more lines after
641 pub fn first_line_preview(self) -> (Self, bool) {
642 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
643 let first_line = &self.text[..newline_ix];
644
645 // Trim leading whitespace, unless an edit starts prior to it.
646 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
647 if let Some((first_highlight_range, _)) = self.highlights.first() {
648 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
649 }
650
651 let preview_text = &first_line[preview_start_ix..];
652 let preview_highlights = self
653 .highlights
654 .into_iter()
655 .skip_while(|(range, _)| range.end <= preview_start_ix)
656 .take_while(|(range, _)| range.start < newline_ix)
657 .filter_map(|(mut range, highlight)| {
658 range.start = range.start.saturating_sub(preview_start_ix);
659 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
660 if range.is_empty() {
661 None
662 } else {
663 Some((range, highlight))
664 }
665 });
666
667 let preview = Self {
668 text: SharedString::new(preview_text),
669 highlights: preview_highlights.collect(),
670 };
671
672 (preview, self.text.len() > newline_ix)
673 }
674}
675
676impl HighlightedTextBuilder {
677 pub fn build(self) -> HighlightedText {
678 HighlightedText {
679 text: self.text.into(),
680 highlights: self.highlights,
681 }
682 }
683
684 pub fn add_text_from_buffer_range<T: ToOffset>(
685 &mut self,
686 range: Range<T>,
687 snapshot: &text::BufferSnapshot,
688 syntax_snapshot: &SyntaxSnapshot,
689 override_style: Option<HighlightStyle>,
690 syntax_theme: &SyntaxTheme,
691 ) {
692 let range = range.to_offset(snapshot);
693 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
694 let start = self.text.len();
695 self.text.push_str(chunk.text);
696 let end = self.text.len();
697
698 if let Some(highlight_style) = chunk
699 .syntax_highlight_id
700 .and_then(|id| id.style(syntax_theme))
701 {
702 let highlight_style = override_style.map_or(highlight_style, |override_style| {
703 highlight_style.highlight(override_style)
704 });
705 self.highlights.push((start..end, highlight_style));
706 } else if let Some(override_style) = override_style {
707 self.highlights.push((start..end, override_style));
708 }
709 }
710 }
711
712 fn highlighted_chunks<'a>(
713 range: Range<usize>,
714 snapshot: &'a text::BufferSnapshot,
715 syntax_snapshot: &'a SyntaxSnapshot,
716 ) -> BufferChunks<'a> {
717 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
718 grammar
719 .highlights_config
720 .as_ref()
721 .map(|config| &config.query)
722 });
723
724 let highlight_maps = captures
725 .grammars()
726 .iter()
727 .map(|grammar| grammar.highlight_map())
728 .collect();
729
730 BufferChunks::new(
731 snapshot.as_rope(),
732 range,
733 Some((captures, highlight_maps)),
734 false,
735 None,
736 )
737 }
738}
739
740#[derive(Clone)]
741pub struct EditPreview {
742 old_snapshot: text::BufferSnapshot,
743 applied_edits_snapshot: text::BufferSnapshot,
744 syntax_snapshot: SyntaxSnapshot,
745}
746
747impl EditPreview {
748 pub fn highlight_edits(
749 &self,
750 current_snapshot: &BufferSnapshot,
751 edits: &[(Range<Anchor>, impl AsRef<str>)],
752 include_deletions: bool,
753 cx: &App,
754 ) -> HighlightedText {
755 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
756 return HighlightedText::default();
757 };
758
759 let mut highlighted_text = HighlightedTextBuilder::default();
760
761 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
762
763 let insertion_highlight_style = HighlightStyle {
764 background_color: Some(cx.theme().status().created_background),
765 ..Default::default()
766 };
767 let deletion_highlight_style = HighlightStyle {
768 background_color: Some(cx.theme().status().deleted_background),
769 ..Default::default()
770 };
771 let syntax_theme = cx.theme().syntax();
772
773 for (range, edit_text) in edits {
774 let edit_new_end_in_preview_snapshot = range
775 .end
776 .bias_right(&self.old_snapshot)
777 .to_offset(&self.applied_edits_snapshot);
778 let edit_start_in_preview_snapshot =
779 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
780
781 let unchanged_range_in_preview_snapshot =
782 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
783 if !unchanged_range_in_preview_snapshot.is_empty() {
784 highlighted_text.add_text_from_buffer_range(
785 unchanged_range_in_preview_snapshot,
786 &self.applied_edits_snapshot,
787 &self.syntax_snapshot,
788 None,
789 syntax_theme,
790 );
791 }
792
793 let range_in_current_snapshot = range.to_offset(current_snapshot);
794 if include_deletions && !range_in_current_snapshot.is_empty() {
795 highlighted_text.add_text_from_buffer_range(
796 range_in_current_snapshot,
797 ¤t_snapshot.text,
798 ¤t_snapshot.syntax,
799 Some(deletion_highlight_style),
800 syntax_theme,
801 );
802 }
803
804 if !edit_text.as_ref().is_empty() {
805 highlighted_text.add_text_from_buffer_range(
806 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
807 &self.applied_edits_snapshot,
808 &self.syntax_snapshot,
809 Some(insertion_highlight_style),
810 syntax_theme,
811 );
812 }
813
814 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
815 }
816
817 highlighted_text.add_text_from_buffer_range(
818 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
819 &self.applied_edits_snapshot,
820 &self.syntax_snapshot,
821 None,
822 syntax_theme,
823 );
824
825 highlighted_text.build()
826 }
827
828 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
829 let (first, _) = edits.first()?;
830 let (last, _) = edits.last()?;
831
832 let start = first
833 .start
834 .bias_left(&self.old_snapshot)
835 .to_point(&self.applied_edits_snapshot);
836 let end = last
837 .end
838 .bias_right(&self.old_snapshot)
839 .to_point(&self.applied_edits_snapshot);
840
841 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
842 let range = Point::new(start.row, 0)
843 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
844
845 Some(range.to_offset(&self.applied_edits_snapshot))
846 }
847}
848
849#[derive(Clone, Debug, PartialEq, Eq)]
850pub struct BracketMatch<T> {
851 pub open_range: Range<T>,
852 pub close_range: Range<T>,
853 pub newline_only: bool,
854 pub syntax_layer_depth: usize,
855 pub color_index: Option<usize>,
856}
857
858impl<T> BracketMatch<T> {
859 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
860 (self.open_range, self.close_range)
861 }
862}
863
864impl Buffer {
865 /// Create a new buffer with the given base text.
866 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
867 Self::build(
868 TextBuffer::new(
869 ReplicaId::LOCAL,
870 cx.entity_id().as_non_zero_u64().into(),
871 base_text.into(),
872 ),
873 None,
874 Capability::ReadWrite,
875 )
876 }
877
878 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
879 pub fn local_normalized(
880 base_text_normalized: Rope,
881 line_ending: LineEnding,
882 cx: &Context<Self>,
883 ) -> Self {
884 Self::build(
885 TextBuffer::new_normalized(
886 ReplicaId::LOCAL,
887 cx.entity_id().as_non_zero_u64().into(),
888 line_ending,
889 base_text_normalized,
890 ),
891 None,
892 Capability::ReadWrite,
893 )
894 }
895
896 /// Create a new buffer that is a replica of a remote buffer.
897 pub fn remote(
898 remote_id: BufferId,
899 replica_id: ReplicaId,
900 capability: Capability,
901 base_text: impl Into<String>,
902 ) -> Self {
903 Self::build(
904 TextBuffer::new(replica_id, remote_id, base_text.into()),
905 None,
906 capability,
907 )
908 }
909
910 /// Create a new buffer that is a replica of a remote buffer, populating its
911 /// state from the given protobuf message.
912 pub fn from_proto(
913 replica_id: ReplicaId,
914 capability: Capability,
915 message: proto::BufferState,
916 file: Option<Arc<dyn File>>,
917 ) -> Result<Self> {
918 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
919 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
920 let mut this = Self::build(buffer, file, capability);
921 this.text.set_line_ending(proto::deserialize_line_ending(
922 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
923 ));
924 this.saved_version = proto::deserialize_version(&message.saved_version);
925 this.saved_mtime = message.saved_mtime.map(|time| time.into());
926 Ok(this)
927 }
928
929 /// Serialize the buffer's state to a protobuf message.
930 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
931 proto::BufferState {
932 id: self.remote_id().into(),
933 file: self.file.as_ref().map(|f| f.to_proto(cx)),
934 base_text: self.base_text().to_string(),
935 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
936 saved_version: proto::serialize_version(&self.saved_version),
937 saved_mtime: self.saved_mtime.map(|time| time.into()),
938 }
939 }
940
941 /// Serialize as protobufs all of the changes to the buffer since the given version.
942 pub fn serialize_ops(
943 &self,
944 since: Option<clock::Global>,
945 cx: &App,
946 ) -> Task<Vec<proto::Operation>> {
947 let mut operations = Vec::new();
948 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
949
950 operations.extend(self.remote_selections.iter().map(|(_, set)| {
951 proto::serialize_operation(&Operation::UpdateSelections {
952 selections: set.selections.clone(),
953 lamport_timestamp: set.lamport_timestamp,
954 line_mode: set.line_mode,
955 cursor_shape: set.cursor_shape,
956 })
957 }));
958
959 for (server_id, diagnostics) in &self.diagnostics {
960 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
961 lamport_timestamp: self.diagnostics_timestamp,
962 server_id: *server_id,
963 diagnostics: diagnostics.iter().cloned().collect(),
964 }));
965 }
966
967 for (server_id, completions) in &self.completion_triggers_per_language_server {
968 operations.push(proto::serialize_operation(
969 &Operation::UpdateCompletionTriggers {
970 triggers: completions.iter().cloned().collect(),
971 lamport_timestamp: self.completion_triggers_timestamp,
972 server_id: *server_id,
973 },
974 ));
975 }
976
977 let text_operations = self.text.operations().clone();
978 cx.background_spawn(async move {
979 let since = since.unwrap_or_default();
980 operations.extend(
981 text_operations
982 .iter()
983 .filter(|(_, op)| !since.observed(op.timestamp()))
984 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
985 );
986 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
987 operations
988 })
989 }
990
991 /// Assign a language to the buffer, returning the buffer.
992 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
993 self.set_language(Some(language), cx);
994 self
995 }
996
997 /// Returns the [`Capability`] of this buffer.
998 pub fn capability(&self) -> Capability {
999 self.capability
1000 }
1001
1002 /// Whether this buffer can only be read.
1003 pub fn read_only(&self) -> bool {
1004 self.capability == Capability::ReadOnly
1005 }
1006
1007 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1008 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1009 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1010 let snapshot = buffer.snapshot();
1011 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1012 let tree_sitter_data = TreeSitterData::new(snapshot);
1013 Self {
1014 saved_mtime,
1015 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1016 saved_version: buffer.version(),
1017 preview_version: buffer.version(),
1018 reload_task: None,
1019 transaction_depth: 0,
1020 was_dirty_before_starting_transaction: None,
1021 has_unsaved_edits: Cell::new((buffer.version(), false)),
1022 text: buffer,
1023 branch_state: None,
1024 file,
1025 capability,
1026 syntax_map,
1027 reparse: None,
1028 non_text_state_update_count: 0,
1029 sync_parse_timeout: Duration::from_millis(1),
1030 parse_status: watch::channel(ParseStatus::Idle),
1031 autoindent_requests: Default::default(),
1032 wait_for_autoindent_txs: Default::default(),
1033 pending_autoindent: Default::default(),
1034 language: None,
1035 remote_selections: Default::default(),
1036 diagnostics: Default::default(),
1037 diagnostics_timestamp: Lamport::MIN,
1038 completion_triggers: Default::default(),
1039 completion_triggers_per_language_server: Default::default(),
1040 completion_triggers_timestamp: Lamport::MIN,
1041 deferred_ops: OperationQueue::new(),
1042 has_conflict: false,
1043 change_bits: Default::default(),
1044 _subscriptions: Vec::new(),
1045 }
1046 }
1047
1048 pub fn build_snapshot(
1049 text: Rope,
1050 language: Option<Arc<Language>>,
1051 language_registry: Option<Arc<LanguageRegistry>>,
1052 cx: &mut App,
1053 ) -> impl Future<Output = BufferSnapshot> + use<> {
1054 let entity_id = cx.reserve_entity::<Self>().entity_id();
1055 let buffer_id = entity_id.as_non_zero_u64().into();
1056 async move {
1057 let text =
1058 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1059 .snapshot();
1060 let mut syntax = SyntaxMap::new(&text).snapshot();
1061 if let Some(language) = language.clone() {
1062 let language_registry = language_registry.clone();
1063 syntax.reparse(&text, language_registry, language);
1064 }
1065 let tree_sitter_data = TreeSitterData::new(text.clone());
1066 BufferSnapshot {
1067 text,
1068 syntax,
1069 file: None,
1070 diagnostics: Default::default(),
1071 remote_selections: Default::default(),
1072 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1073 language,
1074 non_text_state_update_count: 0,
1075 }
1076 }
1077 }
1078
1079 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1080 let entity_id = cx.reserve_entity::<Self>().entity_id();
1081 let buffer_id = entity_id.as_non_zero_u64().into();
1082 let text = TextBuffer::new_normalized(
1083 ReplicaId::LOCAL,
1084 buffer_id,
1085 Default::default(),
1086 Rope::new(),
1087 )
1088 .snapshot();
1089 let syntax = SyntaxMap::new(&text).snapshot();
1090 let tree_sitter_data = TreeSitterData::new(text.clone());
1091 BufferSnapshot {
1092 text,
1093 syntax,
1094 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1095 file: None,
1096 diagnostics: Default::default(),
1097 remote_selections: Default::default(),
1098 language: None,
1099 non_text_state_update_count: 0,
1100 }
1101 }
1102
1103 #[cfg(any(test, feature = "test-support"))]
1104 pub fn build_snapshot_sync(
1105 text: Rope,
1106 language: Option<Arc<Language>>,
1107 language_registry: Option<Arc<LanguageRegistry>>,
1108 cx: &mut App,
1109 ) -> BufferSnapshot {
1110 let entity_id = cx.reserve_entity::<Self>().entity_id();
1111 let buffer_id = entity_id.as_non_zero_u64().into();
1112 let text =
1113 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1114 .snapshot();
1115 let mut syntax = SyntaxMap::new(&text).snapshot();
1116 if let Some(language) = language.clone() {
1117 syntax.reparse(&text, language_registry, language);
1118 }
1119 let tree_sitter_data = TreeSitterData::new(text.clone());
1120 BufferSnapshot {
1121 text,
1122 syntax,
1123 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1124 file: None,
1125 diagnostics: Default::default(),
1126 remote_selections: Default::default(),
1127 language,
1128 non_text_state_update_count: 0,
1129 }
1130 }
1131
1132 /// Retrieve a snapshot of the buffer's current state. This is computationally
1133 /// cheap, and allows reading from the buffer on a background thread.
1134 pub fn snapshot(&self) -> BufferSnapshot {
1135 let text = self.text.snapshot();
1136 let mut syntax_map = self.syntax_map.lock();
1137 syntax_map.interpolate(&text);
1138 let syntax = syntax_map.snapshot();
1139
1140 BufferSnapshot {
1141 text,
1142 syntax,
1143 tree_sitter_data: self.tree_sitter_data.clone(),
1144 file: self.file.clone(),
1145 remote_selections: self.remote_selections.clone(),
1146 diagnostics: self.diagnostics.clone(),
1147 language: self.language.clone(),
1148 non_text_state_update_count: self.non_text_state_update_count,
1149 }
1150 }
1151
1152 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1153 let this = cx.entity();
1154 cx.new(|cx| {
1155 let mut branch = Self {
1156 branch_state: Some(BufferBranchState {
1157 base_buffer: this.clone(),
1158 merged_operations: Default::default(),
1159 }),
1160 language: self.language.clone(),
1161 has_conflict: self.has_conflict,
1162 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1163 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1164 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1165 };
1166 if let Some(language_registry) = self.language_registry() {
1167 branch.set_language_registry(language_registry);
1168 }
1169
1170 // Reparse the branch buffer so that we get syntax highlighting immediately.
1171 branch.reparse(cx);
1172
1173 branch
1174 })
1175 }
1176
1177 pub fn preview_edits(
1178 &self,
1179 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1180 cx: &App,
1181 ) -> Task<EditPreview> {
1182 let registry = self.language_registry();
1183 let language = self.language().cloned();
1184 let old_snapshot = self.text.snapshot();
1185 let mut branch_buffer = self.text.branch();
1186 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1187 cx.background_spawn(async move {
1188 if !edits.is_empty() {
1189 if let Some(language) = language.clone() {
1190 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1191 }
1192
1193 branch_buffer.edit(edits.iter().cloned());
1194 let snapshot = branch_buffer.snapshot();
1195 syntax_snapshot.interpolate(&snapshot);
1196
1197 if let Some(language) = language {
1198 syntax_snapshot.reparse(&snapshot, registry, language);
1199 }
1200 }
1201 EditPreview {
1202 old_snapshot,
1203 applied_edits_snapshot: branch_buffer.snapshot(),
1204 syntax_snapshot,
1205 }
1206 })
1207 }
1208
1209 /// Applies all of the changes in this buffer that intersect any of the
1210 /// given `ranges` to its base buffer.
1211 ///
1212 /// If `ranges` is empty, then all changes will be applied. This buffer must
1213 /// be a branch buffer to call this method.
1214 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1215 let Some(base_buffer) = self.base_buffer() else {
1216 debug_panic!("not a branch buffer");
1217 return;
1218 };
1219
1220 let mut ranges = if ranges.is_empty() {
1221 &[0..usize::MAX]
1222 } else {
1223 ranges.as_slice()
1224 }
1225 .iter()
1226 .peekable();
1227
1228 let mut edits = Vec::new();
1229 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1230 let mut is_included = false;
1231 while let Some(range) = ranges.peek() {
1232 if range.end < edit.new.start {
1233 ranges.next().unwrap();
1234 } else {
1235 if range.start <= edit.new.end {
1236 is_included = true;
1237 }
1238 break;
1239 }
1240 }
1241
1242 if is_included {
1243 edits.push((
1244 edit.old.clone(),
1245 self.text_for_range(edit.new.clone()).collect::<String>(),
1246 ));
1247 }
1248 }
1249
1250 let operation = base_buffer.update(cx, |base_buffer, cx| {
1251 // cx.emit(BufferEvent::DiffBaseChanged);
1252 base_buffer.edit(edits, None, cx)
1253 });
1254
1255 if let Some(operation) = operation
1256 && let Some(BufferBranchState {
1257 merged_operations, ..
1258 }) = &mut self.branch_state
1259 {
1260 merged_operations.push(operation);
1261 }
1262 }
1263
1264 fn on_base_buffer_event(
1265 &mut self,
1266 _: Entity<Buffer>,
1267 event: &BufferEvent,
1268 cx: &mut Context<Self>,
1269 ) {
1270 let BufferEvent::Operation { operation, .. } = event else {
1271 return;
1272 };
1273 let Some(BufferBranchState {
1274 merged_operations, ..
1275 }) = &mut self.branch_state
1276 else {
1277 return;
1278 };
1279
1280 let mut operation_to_undo = None;
1281 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1282 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1283 {
1284 merged_operations.remove(ix);
1285 operation_to_undo = Some(operation.timestamp);
1286 }
1287
1288 self.apply_ops([operation.clone()], cx);
1289
1290 if let Some(timestamp) = operation_to_undo {
1291 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1292 self.undo_operations(counts, cx);
1293 }
1294 }
1295
1296 #[cfg(test)]
1297 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1298 &self.text
1299 }
1300
1301 /// Retrieve a snapshot of the buffer's raw text, without any
1302 /// language-related state like the syntax tree or diagnostics.
1303 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1304 self.text.snapshot()
1305 }
1306
1307 /// The file associated with the buffer, if any.
1308 pub fn file(&self) -> Option<&Arc<dyn File>> {
1309 self.file.as_ref()
1310 }
1311
1312 /// The version of the buffer that was last saved or reloaded from disk.
1313 pub fn saved_version(&self) -> &clock::Global {
1314 &self.saved_version
1315 }
1316
1317 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1318 pub fn saved_mtime(&self) -> Option<MTime> {
1319 self.saved_mtime
1320 }
1321
1322 /// Assign a language to the buffer.
1323 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1324 self.non_text_state_update_count += 1;
1325 self.syntax_map.lock().clear(&self.text);
1326 self.language = language;
1327 self.was_changed();
1328 self.reparse(cx);
1329 cx.emit(BufferEvent::LanguageChanged);
1330 }
1331
1332 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1333 /// other languages if parts of the buffer are written in different languages.
1334 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1335 self.syntax_map
1336 .lock()
1337 .set_language_registry(language_registry);
1338 }
1339
1340 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1341 self.syntax_map.lock().language_registry()
1342 }
1343
1344 /// Assign the line ending type to the buffer.
1345 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1346 self.text.set_line_ending(line_ending);
1347
1348 let lamport_timestamp = self.text.lamport_clock.tick();
1349 self.send_operation(
1350 Operation::UpdateLineEnding {
1351 line_ending,
1352 lamport_timestamp,
1353 },
1354 true,
1355 cx,
1356 );
1357 }
1358
1359 /// Assign the buffer a new [`Capability`].
1360 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1361 if self.capability != capability {
1362 self.capability = capability;
1363 cx.emit(BufferEvent::CapabilityChanged)
1364 }
1365 }
1366
1367 /// This method is called to signal that the buffer has been saved.
1368 pub fn did_save(
1369 &mut self,
1370 version: clock::Global,
1371 mtime: Option<MTime>,
1372 cx: &mut Context<Self>,
1373 ) {
1374 self.saved_version = version.clone();
1375 self.has_unsaved_edits.set((version, false));
1376 self.has_conflict = false;
1377 self.saved_mtime = mtime;
1378 self.was_changed();
1379 cx.emit(BufferEvent::Saved);
1380 cx.notify();
1381 }
1382
1383 /// Reloads the contents of the buffer from disk.
1384 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1385 let (tx, rx) = futures::channel::oneshot::channel();
1386 let prev_version = self.text.version();
1387 self.reload_task = Some(cx.spawn(async move |this, cx| {
1388 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1389 let file = this.file.as_ref()?.as_local()?;
1390
1391 Some((file.disk_state().mtime(), file.load(cx)))
1392 })?
1393 else {
1394 return Ok(());
1395 };
1396
1397 let new_text = new_text.await?;
1398 let diff = this
1399 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1400 .await;
1401 this.update(cx, |this, cx| {
1402 if this.version() == diff.base_version {
1403 this.finalize_last_transaction();
1404 this.apply_diff(diff, cx);
1405 tx.send(this.finalize_last_transaction().cloned()).ok();
1406 this.has_conflict = false;
1407 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1408 } else {
1409 if !diff.edits.is_empty()
1410 || this
1411 .edits_since::<usize>(&diff.base_version)
1412 .next()
1413 .is_some()
1414 {
1415 this.has_conflict = true;
1416 }
1417
1418 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1419 }
1420
1421 this.reload_task.take();
1422 })
1423 }));
1424 rx
1425 }
1426
1427 /// This method is called to signal that the buffer has been reloaded.
1428 pub fn did_reload(
1429 &mut self,
1430 version: clock::Global,
1431 line_ending: LineEnding,
1432 mtime: Option<MTime>,
1433 cx: &mut Context<Self>,
1434 ) {
1435 self.saved_version = version;
1436 self.has_unsaved_edits
1437 .set((self.saved_version.clone(), false));
1438 self.text.set_line_ending(line_ending);
1439 self.saved_mtime = mtime;
1440 cx.emit(BufferEvent::Reloaded);
1441 cx.notify();
1442 }
1443
1444 /// Updates the [`File`] backing this buffer. This should be called when
1445 /// the file has changed or has been deleted.
1446 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1447 let was_dirty = self.is_dirty();
1448 let mut file_changed = false;
1449
1450 if let Some(old_file) = self.file.as_ref() {
1451 if new_file.path() != old_file.path() {
1452 file_changed = true;
1453 }
1454
1455 let old_state = old_file.disk_state();
1456 let new_state = new_file.disk_state();
1457 if old_state != new_state {
1458 file_changed = true;
1459 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1460 cx.emit(BufferEvent::ReloadNeeded)
1461 }
1462 }
1463 } else {
1464 file_changed = true;
1465 };
1466
1467 self.file = Some(new_file);
1468 if file_changed {
1469 self.was_changed();
1470 self.non_text_state_update_count += 1;
1471 if was_dirty != self.is_dirty() {
1472 cx.emit(BufferEvent::DirtyChanged);
1473 }
1474 cx.emit(BufferEvent::FileHandleChanged);
1475 cx.notify();
1476 }
1477 }
1478
1479 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1480 Some(self.branch_state.as_ref()?.base_buffer.clone())
1481 }
1482
1483 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1484 pub fn language(&self) -> Option<&Arc<Language>> {
1485 self.language.as_ref()
1486 }
1487
1488 /// Returns the [`Language`] at the given location.
1489 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1490 let offset = position.to_offset(self);
1491 let mut is_first = true;
1492 let start_anchor = self.anchor_before(offset);
1493 let end_anchor = self.anchor_after(offset);
1494 self.syntax_map
1495 .lock()
1496 .layers_for_range(offset..offset, &self.text, false)
1497 .filter(|layer| {
1498 if is_first {
1499 is_first = false;
1500 return true;
1501 }
1502
1503 layer
1504 .included_sub_ranges
1505 .map(|sub_ranges| {
1506 sub_ranges.iter().any(|sub_range| {
1507 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1508 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1509 !is_before_start && !is_after_end
1510 })
1511 })
1512 .unwrap_or(true)
1513 })
1514 .last()
1515 .map(|info| info.language.clone())
1516 .or_else(|| self.language.clone())
1517 }
1518
1519 /// Returns each [`Language`] for the active syntax layers at the given location.
1520 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1521 let offset = position.to_offset(self);
1522 let mut languages: Vec<Arc<Language>> = self
1523 .syntax_map
1524 .lock()
1525 .layers_for_range(offset..offset, &self.text, false)
1526 .map(|info| info.language.clone())
1527 .collect();
1528
1529 if languages.is_empty()
1530 && let Some(buffer_language) = self.language()
1531 {
1532 languages.push(buffer_language.clone());
1533 }
1534
1535 languages
1536 }
1537
1538 /// An integer version number that accounts for all updates besides
1539 /// the buffer's text itself (which is versioned via a version vector).
1540 pub fn non_text_state_update_count(&self) -> usize {
1541 self.non_text_state_update_count
1542 }
1543
1544 /// Whether the buffer is being parsed in the background.
1545 #[cfg(any(test, feature = "test-support"))]
1546 pub fn is_parsing(&self) -> bool {
1547 self.reparse.is_some()
1548 }
1549
1550 /// Indicates whether the buffer contains any regions that may be
1551 /// written in a language that hasn't been loaded yet.
1552 pub fn contains_unknown_injections(&self) -> bool {
1553 self.syntax_map.lock().contains_unknown_injections()
1554 }
1555
1556 #[cfg(any(test, feature = "test-support"))]
1557 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1558 self.sync_parse_timeout = timeout;
1559 }
1560
1561 /// Called after an edit to synchronize the buffer's main parse tree with
1562 /// the buffer's new underlying state.
1563 ///
1564 /// Locks the syntax map and interpolates the edits since the last reparse
1565 /// into the foreground syntax tree.
1566 ///
1567 /// Then takes a stable snapshot of the syntax map before unlocking it.
1568 /// The snapshot with the interpolated edits is sent to a background thread,
1569 /// where we ask Tree-sitter to perform an incremental parse.
1570 ///
1571 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1572 /// waiting on the parse to complete. As soon as it completes, we proceed
1573 /// synchronously, unless a 1ms timeout elapses.
1574 ///
1575 /// If we time out waiting on the parse, we spawn a second task waiting
1576 /// until the parse does complete and return with the interpolated tree still
1577 /// in the foreground. When the background parse completes, call back into
1578 /// the main thread and assign the foreground parse state.
1579 ///
1580 /// If the buffer or grammar changed since the start of the background parse,
1581 /// initiate an additional reparse recursively. To avoid concurrent parses
1582 /// for the same buffer, we only initiate a new parse if we are not already
1583 /// parsing in the background.
1584 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1585 if self.reparse.is_some() {
1586 return;
1587 }
1588 let language = if let Some(language) = self.language.clone() {
1589 language
1590 } else {
1591 return;
1592 };
1593
1594 let text = self.text_snapshot();
1595 let parsed_version = self.version();
1596
1597 let mut syntax_map = self.syntax_map.lock();
1598 syntax_map.interpolate(&text);
1599 let language_registry = syntax_map.language_registry();
1600 let mut syntax_snapshot = syntax_map.snapshot();
1601 drop(syntax_map);
1602
1603 let parse_task = cx.background_spawn({
1604 let language = language.clone();
1605 let language_registry = language_registry.clone();
1606 async move {
1607 syntax_snapshot.reparse(&text, language_registry, language);
1608 syntax_snapshot
1609 }
1610 });
1611
1612 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1613 match cx
1614 .background_executor()
1615 .block_with_timeout(self.sync_parse_timeout, parse_task)
1616 {
1617 Ok(new_syntax_snapshot) => {
1618 self.did_finish_parsing(new_syntax_snapshot, cx);
1619 self.reparse = None;
1620 }
1621 Err(parse_task) => {
1622 // todo(lw): hot foreground spawn
1623 self.reparse = Some(cx.spawn(async move |this, cx| {
1624 let new_syntax_map = cx.background_spawn(parse_task).await;
1625 this.update(cx, move |this, cx| {
1626 let grammar_changed = || {
1627 this.language.as_ref().is_none_or(|current_language| {
1628 !Arc::ptr_eq(&language, current_language)
1629 })
1630 };
1631 let language_registry_changed = || {
1632 new_syntax_map.contains_unknown_injections()
1633 && language_registry.is_some_and(|registry| {
1634 registry.version() != new_syntax_map.language_registry_version()
1635 })
1636 };
1637 let parse_again = this.version.changed_since(&parsed_version)
1638 || language_registry_changed()
1639 || grammar_changed();
1640 this.did_finish_parsing(new_syntax_map, cx);
1641 this.reparse = None;
1642 if parse_again {
1643 this.reparse(cx);
1644 }
1645 })
1646 .ok();
1647 }));
1648 }
1649 }
1650 }
1651
1652 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1653 self.was_changed();
1654 self.non_text_state_update_count += 1;
1655 self.syntax_map.lock().did_parse(syntax_snapshot);
1656 self.request_autoindent(cx);
1657 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1658 self.tree_sitter_data.lock().clear();
1659 cx.emit(BufferEvent::Reparsed);
1660 cx.notify();
1661 }
1662
1663 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1664 self.parse_status.1.clone()
1665 }
1666
1667 /// Wait until the buffer is no longer parsing
1668 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1669 let mut parse_status = self.parse_status();
1670 async move {
1671 while *parse_status.borrow() != ParseStatus::Idle {
1672 if parse_status.changed().await.is_err() {
1673 break;
1674 }
1675 }
1676 }
1677 }
1678
1679 /// Assign to the buffer a set of diagnostics created by a given language server.
1680 pub fn update_diagnostics(
1681 &mut self,
1682 server_id: LanguageServerId,
1683 diagnostics: DiagnosticSet,
1684 cx: &mut Context<Self>,
1685 ) {
1686 let lamport_timestamp = self.text.lamport_clock.tick();
1687 let op = Operation::UpdateDiagnostics {
1688 server_id,
1689 diagnostics: diagnostics.iter().cloned().collect(),
1690 lamport_timestamp,
1691 };
1692
1693 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1694 self.send_operation(op, true, cx);
1695 }
1696
1697 pub fn buffer_diagnostics(
1698 &self,
1699 for_server: Option<LanguageServerId>,
1700 ) -> Vec<&DiagnosticEntry<Anchor>> {
1701 match for_server {
1702 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1703 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1704 Err(_) => Vec::new(),
1705 },
1706 None => self
1707 .diagnostics
1708 .iter()
1709 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1710 .collect(),
1711 }
1712 }
1713
1714 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1715 if let Some(indent_sizes) = self.compute_autoindents() {
1716 let indent_sizes = cx.background_spawn(indent_sizes);
1717 match cx
1718 .background_executor()
1719 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1720 {
1721 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1722 Err(indent_sizes) => {
1723 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1724 let indent_sizes = indent_sizes.await;
1725 this.update(cx, |this, cx| {
1726 this.apply_autoindents(indent_sizes, cx);
1727 })
1728 .ok();
1729 }));
1730 }
1731 }
1732 } else {
1733 self.autoindent_requests.clear();
1734 for tx in self.wait_for_autoindent_txs.drain(..) {
1735 tx.send(()).ok();
1736 }
1737 }
1738 }
1739
1740 fn compute_autoindents(
1741 &self,
1742 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1743 let max_rows_between_yields = 100;
1744 let snapshot = self.snapshot();
1745 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1746 return None;
1747 }
1748
1749 let autoindent_requests = self.autoindent_requests.clone();
1750 Some(async move {
1751 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1752 for request in autoindent_requests {
1753 // Resolve each edited range to its row in the current buffer and in the
1754 // buffer before this batch of edits.
1755 let mut row_ranges = Vec::new();
1756 let mut old_to_new_rows = BTreeMap::new();
1757 let mut language_indent_sizes_by_new_row = Vec::new();
1758 for entry in &request.entries {
1759 let position = entry.range.start;
1760 let new_row = position.to_point(&snapshot).row;
1761 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1762 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1763
1764 if !entry.first_line_is_new {
1765 let old_row = position.to_point(&request.before_edit).row;
1766 old_to_new_rows.insert(old_row, new_row);
1767 }
1768 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1769 }
1770
1771 // Build a map containing the suggested indentation for each of the edited lines
1772 // with respect to the state of the buffer before these edits. This map is keyed
1773 // by the rows for these lines in the current state of the buffer.
1774 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1775 let old_edited_ranges =
1776 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1777 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1778 let mut language_indent_size = IndentSize::default();
1779 for old_edited_range in old_edited_ranges {
1780 let suggestions = request
1781 .before_edit
1782 .suggest_autoindents(old_edited_range.clone())
1783 .into_iter()
1784 .flatten();
1785 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1786 if let Some(suggestion) = suggestion {
1787 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1788
1789 // Find the indent size based on the language for this row.
1790 while let Some((row, size)) = language_indent_sizes.peek() {
1791 if *row > new_row {
1792 break;
1793 }
1794 language_indent_size = *size;
1795 language_indent_sizes.next();
1796 }
1797
1798 let suggested_indent = old_to_new_rows
1799 .get(&suggestion.basis_row)
1800 .and_then(|from_row| {
1801 Some(old_suggestions.get(from_row).copied()?.0)
1802 })
1803 .unwrap_or_else(|| {
1804 request
1805 .before_edit
1806 .indent_size_for_line(suggestion.basis_row)
1807 })
1808 .with_delta(suggestion.delta, language_indent_size);
1809 old_suggestions
1810 .insert(new_row, (suggested_indent, suggestion.within_error));
1811 }
1812 }
1813 yield_now().await;
1814 }
1815
1816 // Compute new suggestions for each line, but only include them in the result
1817 // if they differ from the old suggestion for that line.
1818 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1819 let mut language_indent_size = IndentSize::default();
1820 for (row_range, original_indent_column) in row_ranges {
1821 let new_edited_row_range = if request.is_block_mode {
1822 row_range.start..row_range.start + 1
1823 } else {
1824 row_range.clone()
1825 };
1826
1827 let suggestions = snapshot
1828 .suggest_autoindents(new_edited_row_range.clone())
1829 .into_iter()
1830 .flatten();
1831 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1832 if let Some(suggestion) = suggestion {
1833 // Find the indent size based on the language for this row.
1834 while let Some((row, size)) = language_indent_sizes.peek() {
1835 if *row > new_row {
1836 break;
1837 }
1838 language_indent_size = *size;
1839 language_indent_sizes.next();
1840 }
1841
1842 let suggested_indent = indent_sizes
1843 .get(&suggestion.basis_row)
1844 .copied()
1845 .map(|e| e.0)
1846 .unwrap_or_else(|| {
1847 snapshot.indent_size_for_line(suggestion.basis_row)
1848 })
1849 .with_delta(suggestion.delta, language_indent_size);
1850
1851 if old_suggestions.get(&new_row).is_none_or(
1852 |(old_indentation, was_within_error)| {
1853 suggested_indent != *old_indentation
1854 && (!suggestion.within_error || *was_within_error)
1855 },
1856 ) {
1857 indent_sizes.insert(
1858 new_row,
1859 (suggested_indent, request.ignore_empty_lines),
1860 );
1861 }
1862 }
1863 }
1864
1865 if let (true, Some(original_indent_column)) =
1866 (request.is_block_mode, original_indent_column)
1867 {
1868 let new_indent =
1869 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1870 *indent
1871 } else {
1872 snapshot.indent_size_for_line(row_range.start)
1873 };
1874 let delta = new_indent.len as i64 - original_indent_column as i64;
1875 if delta != 0 {
1876 for row in row_range.skip(1) {
1877 indent_sizes.entry(row).or_insert_with(|| {
1878 let mut size = snapshot.indent_size_for_line(row);
1879 if size.kind == new_indent.kind {
1880 match delta.cmp(&0) {
1881 Ordering::Greater => size.len += delta as u32,
1882 Ordering::Less => {
1883 size.len = size.len.saturating_sub(-delta as u32)
1884 }
1885 Ordering::Equal => {}
1886 }
1887 }
1888 (size, request.ignore_empty_lines)
1889 });
1890 }
1891 }
1892 }
1893
1894 yield_now().await;
1895 }
1896 }
1897
1898 indent_sizes
1899 .into_iter()
1900 .filter_map(|(row, (indent, ignore_empty_lines))| {
1901 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1902 None
1903 } else {
1904 Some((row, indent))
1905 }
1906 })
1907 .collect()
1908 })
1909 }
1910
1911 fn apply_autoindents(
1912 &mut self,
1913 indent_sizes: BTreeMap<u32, IndentSize>,
1914 cx: &mut Context<Self>,
1915 ) {
1916 self.autoindent_requests.clear();
1917 for tx in self.wait_for_autoindent_txs.drain(..) {
1918 tx.send(()).ok();
1919 }
1920
1921 let edits: Vec<_> = indent_sizes
1922 .into_iter()
1923 .filter_map(|(row, indent_size)| {
1924 let current_size = indent_size_for_line(self, row);
1925 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1926 })
1927 .collect();
1928
1929 let preserve_preview = self.preserve_preview();
1930 self.edit(edits, None, cx);
1931 if preserve_preview {
1932 self.refresh_preview();
1933 }
1934 }
1935
1936 /// Create a minimal edit that will cause the given row to be indented
1937 /// with the given size. After applying this edit, the length of the line
1938 /// will always be at least `new_size.len`.
1939 pub fn edit_for_indent_size_adjustment(
1940 row: u32,
1941 current_size: IndentSize,
1942 new_size: IndentSize,
1943 ) -> Option<(Range<Point>, String)> {
1944 if new_size.kind == current_size.kind {
1945 match new_size.len.cmp(¤t_size.len) {
1946 Ordering::Greater => {
1947 let point = Point::new(row, 0);
1948 Some((
1949 point..point,
1950 iter::repeat(new_size.char())
1951 .take((new_size.len - current_size.len) as usize)
1952 .collect::<String>(),
1953 ))
1954 }
1955
1956 Ordering::Less => Some((
1957 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1958 String::new(),
1959 )),
1960
1961 Ordering::Equal => None,
1962 }
1963 } else {
1964 Some((
1965 Point::new(row, 0)..Point::new(row, current_size.len),
1966 iter::repeat(new_size.char())
1967 .take(new_size.len as usize)
1968 .collect::<String>(),
1969 ))
1970 }
1971 }
1972
1973 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1974 /// and the given new text.
1975 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1976 let old_text = self.as_rope().clone();
1977 let base_version = self.version();
1978 cx.background_executor()
1979 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1980 let old_text = old_text.to_string();
1981 let line_ending = LineEnding::detect(&new_text);
1982 LineEnding::normalize(&mut new_text);
1983 let edits = text_diff(&old_text, &new_text);
1984 Diff {
1985 base_version,
1986 line_ending,
1987 edits,
1988 }
1989 })
1990 }
1991
1992 /// Spawns a background task that searches the buffer for any whitespace
1993 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1994 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1995 let old_text = self.as_rope().clone();
1996 let line_ending = self.line_ending();
1997 let base_version = self.version();
1998 cx.background_spawn(async move {
1999 let ranges = trailing_whitespace_ranges(&old_text);
2000 let empty = Arc::<str>::from("");
2001 Diff {
2002 base_version,
2003 line_ending,
2004 edits: ranges
2005 .into_iter()
2006 .map(|range| (range, empty.clone()))
2007 .collect(),
2008 }
2009 })
2010 }
2011
2012 /// Ensures that the buffer ends with a single newline character, and
2013 /// no other whitespace. Skips if the buffer is empty.
2014 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2015 let len = self.len();
2016 if len == 0 {
2017 return;
2018 }
2019 let mut offset = len;
2020 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2021 let non_whitespace_len = chunk
2022 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2023 .len();
2024 offset -= chunk.len();
2025 offset += non_whitespace_len;
2026 if non_whitespace_len != 0 {
2027 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2028 return;
2029 }
2030 break;
2031 }
2032 }
2033 self.edit([(offset..len, "\n")], None, cx);
2034 }
2035
2036 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2037 /// calculated, then adjust the diff to account for those changes, and discard any
2038 /// parts of the diff that conflict with those changes.
2039 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2040 let snapshot = self.snapshot();
2041 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2042 let mut delta = 0;
2043 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2044 while let Some(edit_since) = edits_since.peek() {
2045 // If the edit occurs after a diff hunk, then it does not
2046 // affect that hunk.
2047 if edit_since.old.start > range.end {
2048 break;
2049 }
2050 // If the edit precedes the diff hunk, then adjust the hunk
2051 // to reflect the edit.
2052 else if edit_since.old.end < range.start {
2053 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2054 edits_since.next();
2055 }
2056 // If the edit intersects a diff hunk, then discard that hunk.
2057 else {
2058 return None;
2059 }
2060 }
2061
2062 let start = (range.start as i64 + delta) as usize;
2063 let end = (range.end as i64 + delta) as usize;
2064 Some((start..end, new_text))
2065 });
2066
2067 self.start_transaction();
2068 self.text.set_line_ending(diff.line_ending);
2069 self.edit(adjusted_edits, None, cx);
2070 self.end_transaction(cx)
2071 }
2072
2073 pub fn has_unsaved_edits(&self) -> bool {
2074 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2075
2076 if last_version == self.version {
2077 self.has_unsaved_edits
2078 .set((last_version, has_unsaved_edits));
2079 return has_unsaved_edits;
2080 }
2081
2082 let has_edits = self.has_edits_since(&self.saved_version);
2083 self.has_unsaved_edits
2084 .set((self.version.clone(), has_edits));
2085 has_edits
2086 }
2087
2088 /// Checks if the buffer has unsaved changes.
2089 pub fn is_dirty(&self) -> bool {
2090 if self.capability == Capability::ReadOnly {
2091 return false;
2092 }
2093 if self.has_conflict {
2094 return true;
2095 }
2096 match self.file.as_ref().map(|f| f.disk_state()) {
2097 Some(DiskState::New) | Some(DiskState::Deleted) => {
2098 !self.is_empty() && self.has_unsaved_edits()
2099 }
2100 _ => self.has_unsaved_edits(),
2101 }
2102 }
2103
2104 /// Marks the buffer as having a conflict regardless of current buffer state.
2105 pub fn set_conflict(&mut self) {
2106 self.has_conflict = true;
2107 }
2108
2109 /// Checks if the buffer and its file have both changed since the buffer
2110 /// was last saved or reloaded.
2111 pub fn has_conflict(&self) -> bool {
2112 if self.has_conflict {
2113 return true;
2114 }
2115 let Some(file) = self.file.as_ref() else {
2116 return false;
2117 };
2118 match file.disk_state() {
2119 DiskState::New => false,
2120 DiskState::Present { mtime } => match self.saved_mtime {
2121 Some(saved_mtime) => {
2122 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2123 }
2124 None => true,
2125 },
2126 DiskState::Deleted => false,
2127 }
2128 }
2129
2130 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2131 pub fn subscribe(&mut self) -> Subscription<usize> {
2132 self.text.subscribe()
2133 }
2134
2135 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2136 ///
2137 /// This allows downstream code to check if the buffer's text has changed without
2138 /// waiting for an effect cycle, which would be required if using eents.
2139 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2140 if let Err(ix) = self
2141 .change_bits
2142 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2143 {
2144 self.change_bits.insert(ix, bit);
2145 }
2146 }
2147
2148 /// Set the change bit for all "listeners".
2149 fn was_changed(&mut self) {
2150 self.change_bits.retain(|change_bit| {
2151 change_bit
2152 .upgrade()
2153 .inspect(|bit| {
2154 _ = bit.replace(true);
2155 })
2156 .is_some()
2157 });
2158 }
2159
2160 /// Starts a transaction, if one is not already in-progress. When undoing or
2161 /// redoing edits, all of the edits performed within a transaction are undone
2162 /// or redone together.
2163 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2164 self.start_transaction_at(Instant::now())
2165 }
2166
2167 /// Starts a transaction, providing the current time. Subsequent transactions
2168 /// that occur within a short period of time will be grouped together. This
2169 /// is controlled by the buffer's undo grouping duration.
2170 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2171 self.transaction_depth += 1;
2172 if self.was_dirty_before_starting_transaction.is_none() {
2173 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2174 }
2175 self.text.start_transaction_at(now)
2176 }
2177
2178 /// Terminates the current transaction, if this is the outermost transaction.
2179 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2180 self.end_transaction_at(Instant::now(), cx)
2181 }
2182
2183 /// Terminates the current transaction, providing the current time. Subsequent transactions
2184 /// that occur within a short period of time will be grouped together. This
2185 /// is controlled by the buffer's undo grouping duration.
2186 pub fn end_transaction_at(
2187 &mut self,
2188 now: Instant,
2189 cx: &mut Context<Self>,
2190 ) -> Option<TransactionId> {
2191 assert!(self.transaction_depth > 0);
2192 self.transaction_depth -= 1;
2193 let was_dirty = if self.transaction_depth == 0 {
2194 self.was_dirty_before_starting_transaction.take().unwrap()
2195 } else {
2196 false
2197 };
2198 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2199 self.did_edit(&start_version, was_dirty, cx);
2200 Some(transaction_id)
2201 } else {
2202 None
2203 }
2204 }
2205
2206 /// Manually add a transaction to the buffer's undo history.
2207 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2208 self.text.push_transaction(transaction, now);
2209 }
2210
2211 /// Differs from `push_transaction` in that it does not clear the redo
2212 /// stack. Intended to be used to create a parent transaction to merge
2213 /// potential child transactions into.
2214 ///
2215 /// The caller is responsible for removing it from the undo history using
2216 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2217 /// are merged into this transaction, the caller is responsible for ensuring
2218 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2219 /// cleared is to create transactions with the usual `start_transaction` and
2220 /// `end_transaction` methods and merging the resulting transactions into
2221 /// the transaction created by this method
2222 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2223 self.text.push_empty_transaction(now)
2224 }
2225
2226 /// Prevent the last transaction from being grouped with any subsequent transactions,
2227 /// even if they occur with the buffer's undo grouping duration.
2228 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2229 self.text.finalize_last_transaction()
2230 }
2231
2232 /// Manually group all changes since a given transaction.
2233 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2234 self.text.group_until_transaction(transaction_id);
2235 }
2236
2237 /// Manually remove a transaction from the buffer's undo history
2238 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2239 self.text.forget_transaction(transaction_id)
2240 }
2241
2242 /// Retrieve a transaction from the buffer's undo history
2243 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2244 self.text.get_transaction(transaction_id)
2245 }
2246
2247 /// Manually merge two transactions in the buffer's undo history.
2248 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2249 self.text.merge_transactions(transaction, destination);
2250 }
2251
2252 /// Waits for the buffer to receive operations with the given timestamps.
2253 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2254 &mut self,
2255 edit_ids: It,
2256 ) -> impl Future<Output = Result<()>> + use<It> {
2257 self.text.wait_for_edits(edit_ids)
2258 }
2259
2260 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2261 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2262 &mut self,
2263 anchors: It,
2264 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2265 self.text.wait_for_anchors(anchors)
2266 }
2267
2268 /// Waits for the buffer to receive operations up to the given version.
2269 pub fn wait_for_version(
2270 &mut self,
2271 version: clock::Global,
2272 ) -> impl Future<Output = Result<()>> + use<> {
2273 self.text.wait_for_version(version)
2274 }
2275
2276 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2277 /// [`Buffer::wait_for_version`] to resolve with an error.
2278 pub fn give_up_waiting(&mut self) {
2279 self.text.give_up_waiting();
2280 }
2281
2282 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2283 let mut rx = None;
2284 if !self.autoindent_requests.is_empty() {
2285 let channel = oneshot::channel();
2286 self.wait_for_autoindent_txs.push(channel.0);
2287 rx = Some(channel.1);
2288 }
2289 rx
2290 }
2291
2292 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2293 pub fn set_active_selections(
2294 &mut self,
2295 selections: Arc<[Selection<Anchor>]>,
2296 line_mode: bool,
2297 cursor_shape: CursorShape,
2298 cx: &mut Context<Self>,
2299 ) {
2300 let lamport_timestamp = self.text.lamport_clock.tick();
2301 self.remote_selections.insert(
2302 self.text.replica_id(),
2303 SelectionSet {
2304 selections: selections.clone(),
2305 lamport_timestamp,
2306 line_mode,
2307 cursor_shape,
2308 },
2309 );
2310 self.send_operation(
2311 Operation::UpdateSelections {
2312 selections,
2313 line_mode,
2314 lamport_timestamp,
2315 cursor_shape,
2316 },
2317 true,
2318 cx,
2319 );
2320 self.non_text_state_update_count += 1;
2321 cx.notify();
2322 }
2323
2324 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2325 /// this replica.
2326 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2327 if self
2328 .remote_selections
2329 .get(&self.text.replica_id())
2330 .is_none_or(|set| !set.selections.is_empty())
2331 {
2332 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2333 }
2334 }
2335
2336 pub fn set_agent_selections(
2337 &mut self,
2338 selections: Arc<[Selection<Anchor>]>,
2339 line_mode: bool,
2340 cursor_shape: CursorShape,
2341 cx: &mut Context<Self>,
2342 ) {
2343 let lamport_timestamp = self.text.lamport_clock.tick();
2344 self.remote_selections.insert(
2345 ReplicaId::AGENT,
2346 SelectionSet {
2347 selections,
2348 lamport_timestamp,
2349 line_mode,
2350 cursor_shape,
2351 },
2352 );
2353 self.non_text_state_update_count += 1;
2354 cx.notify();
2355 }
2356
2357 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2358 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2359 }
2360
2361 /// Replaces the buffer's entire text.
2362 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2363 where
2364 T: Into<Arc<str>>,
2365 {
2366 self.autoindent_requests.clear();
2367 self.edit([(0..self.len(), text)], None, cx)
2368 }
2369
2370 /// Appends the given text to the end of the buffer.
2371 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2372 where
2373 T: Into<Arc<str>>,
2374 {
2375 self.edit([(self.len()..self.len(), text)], None, cx)
2376 }
2377
2378 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2379 /// delete, and a string of text to insert at that location.
2380 ///
2381 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2382 /// request for the edited ranges, which will be processed when the buffer finishes
2383 /// parsing.
2384 ///
2385 /// Parsing takes place at the end of a transaction, and may compute synchronously
2386 /// or asynchronously, depending on the changes.
2387 pub fn edit<I, S, T>(
2388 &mut self,
2389 edits_iter: I,
2390 autoindent_mode: Option<AutoindentMode>,
2391 cx: &mut Context<Self>,
2392 ) -> Option<clock::Lamport>
2393 where
2394 I: IntoIterator<Item = (Range<S>, T)>,
2395 S: ToOffset,
2396 T: Into<Arc<str>>,
2397 {
2398 // Skip invalid edits and coalesce contiguous ones.
2399 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2400
2401 for (range, new_text) in edits_iter {
2402 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2403
2404 if range.start > range.end {
2405 mem::swap(&mut range.start, &mut range.end);
2406 }
2407 let new_text = new_text.into();
2408 if !new_text.is_empty() || !range.is_empty() {
2409 if let Some((prev_range, prev_text)) = edits.last_mut()
2410 && prev_range.end >= range.start
2411 {
2412 prev_range.end = cmp::max(prev_range.end, range.end);
2413 *prev_text = format!("{prev_text}{new_text}").into();
2414 } else {
2415 edits.push((range, new_text));
2416 }
2417 }
2418 }
2419 if edits.is_empty() {
2420 return None;
2421 }
2422
2423 self.start_transaction();
2424 self.pending_autoindent.take();
2425 let autoindent_request = autoindent_mode
2426 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2427
2428 let edit_operation = self.text.edit(edits.iter().cloned());
2429 let edit_id = edit_operation.timestamp();
2430
2431 if let Some((before_edit, mode)) = autoindent_request {
2432 let mut delta = 0isize;
2433 let mut previous_setting = None;
2434 let entries: Vec<_> = edits
2435 .into_iter()
2436 .enumerate()
2437 .zip(&edit_operation.as_edit().unwrap().new_text)
2438 .filter(|((_, (range, _)), _)| {
2439 let language = before_edit.language_at(range.start);
2440 let language_id = language.map(|l| l.id());
2441 if let Some((cached_language_id, auto_indent)) = previous_setting
2442 && cached_language_id == language_id
2443 {
2444 auto_indent
2445 } else {
2446 // The auto-indent setting is not present in editorconfigs, hence
2447 // we can avoid passing the file here.
2448 let auto_indent =
2449 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2450 previous_setting = Some((language_id, auto_indent));
2451 auto_indent
2452 }
2453 })
2454 .map(|((ix, (range, _)), new_text)| {
2455 let new_text_length = new_text.len();
2456 let old_start = range.start.to_point(&before_edit);
2457 let new_start = (delta + range.start as isize) as usize;
2458 let range_len = range.end - range.start;
2459 delta += new_text_length as isize - range_len as isize;
2460
2461 // Decide what range of the insertion to auto-indent, and whether
2462 // the first line of the insertion should be considered a newly-inserted line
2463 // or an edit to an existing line.
2464 let mut range_of_insertion_to_indent = 0..new_text_length;
2465 let mut first_line_is_new = true;
2466
2467 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2468 let old_line_end = before_edit.line_len(old_start.row);
2469
2470 if old_start.column > old_line_start {
2471 first_line_is_new = false;
2472 }
2473
2474 if !new_text.contains('\n')
2475 && (old_start.column + (range_len as u32) < old_line_end
2476 || old_line_end == old_line_start)
2477 {
2478 first_line_is_new = false;
2479 }
2480
2481 // When inserting text starting with a newline, avoid auto-indenting the
2482 // previous line.
2483 if new_text.starts_with('\n') {
2484 range_of_insertion_to_indent.start += 1;
2485 first_line_is_new = true;
2486 }
2487
2488 let mut original_indent_column = None;
2489 if let AutoindentMode::Block {
2490 original_indent_columns,
2491 } = &mode
2492 {
2493 original_indent_column = Some(if new_text.starts_with('\n') {
2494 indent_size_for_text(
2495 new_text[range_of_insertion_to_indent.clone()].chars(),
2496 )
2497 .len
2498 } else {
2499 original_indent_columns
2500 .get(ix)
2501 .copied()
2502 .flatten()
2503 .unwrap_or_else(|| {
2504 indent_size_for_text(
2505 new_text[range_of_insertion_to_indent.clone()].chars(),
2506 )
2507 .len
2508 })
2509 });
2510
2511 // Avoid auto-indenting the line after the edit.
2512 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2513 range_of_insertion_to_indent.end -= 1;
2514 }
2515 }
2516
2517 AutoindentRequestEntry {
2518 first_line_is_new,
2519 original_indent_column,
2520 indent_size: before_edit.language_indent_size_at(range.start, cx),
2521 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2522 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2523 }
2524 })
2525 .collect();
2526
2527 if !entries.is_empty() {
2528 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2529 before_edit,
2530 entries,
2531 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2532 ignore_empty_lines: false,
2533 }));
2534 }
2535 }
2536
2537 self.end_transaction(cx);
2538 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2539 Some(edit_id)
2540 }
2541
2542 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2543 self.was_changed();
2544
2545 if self.edits_since::<usize>(old_version).next().is_none() {
2546 return;
2547 }
2548
2549 self.reparse(cx);
2550 cx.emit(BufferEvent::Edited);
2551 if was_dirty != self.is_dirty() {
2552 cx.emit(BufferEvent::DirtyChanged);
2553 }
2554 cx.notify();
2555 }
2556
2557 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2558 where
2559 I: IntoIterator<Item = Range<T>>,
2560 T: ToOffset + Copy,
2561 {
2562 let before_edit = self.snapshot();
2563 let entries = ranges
2564 .into_iter()
2565 .map(|range| AutoindentRequestEntry {
2566 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2567 first_line_is_new: true,
2568 indent_size: before_edit.language_indent_size_at(range.start, cx),
2569 original_indent_column: None,
2570 })
2571 .collect();
2572 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2573 before_edit,
2574 entries,
2575 is_block_mode: false,
2576 ignore_empty_lines: true,
2577 }));
2578 self.request_autoindent(cx);
2579 }
2580
2581 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2582 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2583 pub fn insert_empty_line(
2584 &mut self,
2585 position: impl ToPoint,
2586 space_above: bool,
2587 space_below: bool,
2588 cx: &mut Context<Self>,
2589 ) -> Point {
2590 let mut position = position.to_point(self);
2591
2592 self.start_transaction();
2593
2594 self.edit(
2595 [(position..position, "\n")],
2596 Some(AutoindentMode::EachLine),
2597 cx,
2598 );
2599
2600 if position.column > 0 {
2601 position += Point::new(1, 0);
2602 }
2603
2604 if !self.is_line_blank(position.row) {
2605 self.edit(
2606 [(position..position, "\n")],
2607 Some(AutoindentMode::EachLine),
2608 cx,
2609 );
2610 }
2611
2612 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2613 self.edit(
2614 [(position..position, "\n")],
2615 Some(AutoindentMode::EachLine),
2616 cx,
2617 );
2618 position.row += 1;
2619 }
2620
2621 if space_below
2622 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2623 {
2624 self.edit(
2625 [(position..position, "\n")],
2626 Some(AutoindentMode::EachLine),
2627 cx,
2628 );
2629 }
2630
2631 self.end_transaction(cx);
2632
2633 position
2634 }
2635
2636 /// Applies the given remote operations to the buffer.
2637 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2638 self.pending_autoindent.take();
2639 let was_dirty = self.is_dirty();
2640 let old_version = self.version.clone();
2641 let mut deferred_ops = Vec::new();
2642 let buffer_ops = ops
2643 .into_iter()
2644 .filter_map(|op| match op {
2645 Operation::Buffer(op) => Some(op),
2646 _ => {
2647 if self.can_apply_op(&op) {
2648 self.apply_op(op, cx);
2649 } else {
2650 deferred_ops.push(op);
2651 }
2652 None
2653 }
2654 })
2655 .collect::<Vec<_>>();
2656 for operation in buffer_ops.iter() {
2657 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2658 }
2659 self.text.apply_ops(buffer_ops);
2660 self.deferred_ops.insert(deferred_ops);
2661 self.flush_deferred_ops(cx);
2662 self.did_edit(&old_version, was_dirty, cx);
2663 // Notify independently of whether the buffer was edited as the operations could include a
2664 // selection update.
2665 cx.notify();
2666 }
2667
2668 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2669 let mut deferred_ops = Vec::new();
2670 for op in self.deferred_ops.drain().iter().cloned() {
2671 if self.can_apply_op(&op) {
2672 self.apply_op(op, cx);
2673 } else {
2674 deferred_ops.push(op);
2675 }
2676 }
2677 self.deferred_ops.insert(deferred_ops);
2678 }
2679
2680 pub fn has_deferred_ops(&self) -> bool {
2681 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2682 }
2683
2684 fn can_apply_op(&self, operation: &Operation) -> bool {
2685 match operation {
2686 Operation::Buffer(_) => {
2687 unreachable!("buffer operations should never be applied at this layer")
2688 }
2689 Operation::UpdateDiagnostics {
2690 diagnostics: diagnostic_set,
2691 ..
2692 } => diagnostic_set.iter().all(|diagnostic| {
2693 self.text.can_resolve(&diagnostic.range.start)
2694 && self.text.can_resolve(&diagnostic.range.end)
2695 }),
2696 Operation::UpdateSelections { selections, .. } => selections
2697 .iter()
2698 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2699 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2700 }
2701 }
2702
2703 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2704 match operation {
2705 Operation::Buffer(_) => {
2706 unreachable!("buffer operations should never be applied at this layer")
2707 }
2708 Operation::UpdateDiagnostics {
2709 server_id,
2710 diagnostics: diagnostic_set,
2711 lamport_timestamp,
2712 } => {
2713 let snapshot = self.snapshot();
2714 self.apply_diagnostic_update(
2715 server_id,
2716 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2717 lamport_timestamp,
2718 cx,
2719 );
2720 }
2721 Operation::UpdateSelections {
2722 selections,
2723 lamport_timestamp,
2724 line_mode,
2725 cursor_shape,
2726 } => {
2727 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2728 && set.lamport_timestamp > lamport_timestamp
2729 {
2730 return;
2731 }
2732
2733 self.remote_selections.insert(
2734 lamport_timestamp.replica_id,
2735 SelectionSet {
2736 selections,
2737 lamport_timestamp,
2738 line_mode,
2739 cursor_shape,
2740 },
2741 );
2742 self.text.lamport_clock.observe(lamport_timestamp);
2743 self.non_text_state_update_count += 1;
2744 }
2745 Operation::UpdateCompletionTriggers {
2746 triggers,
2747 lamport_timestamp,
2748 server_id,
2749 } => {
2750 if triggers.is_empty() {
2751 self.completion_triggers_per_language_server
2752 .remove(&server_id);
2753 self.completion_triggers = self
2754 .completion_triggers_per_language_server
2755 .values()
2756 .flat_map(|triggers| triggers.iter().cloned())
2757 .collect();
2758 } else {
2759 self.completion_triggers_per_language_server
2760 .insert(server_id, triggers.iter().cloned().collect());
2761 self.completion_triggers.extend(triggers);
2762 }
2763 self.text.lamport_clock.observe(lamport_timestamp);
2764 }
2765 Operation::UpdateLineEnding {
2766 line_ending,
2767 lamport_timestamp,
2768 } => {
2769 self.text.set_line_ending(line_ending);
2770 self.text.lamport_clock.observe(lamport_timestamp);
2771 }
2772 }
2773 }
2774
2775 fn apply_diagnostic_update(
2776 &mut self,
2777 server_id: LanguageServerId,
2778 diagnostics: DiagnosticSet,
2779 lamport_timestamp: clock::Lamport,
2780 cx: &mut Context<Self>,
2781 ) {
2782 if lamport_timestamp > self.diagnostics_timestamp {
2783 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2784 if diagnostics.is_empty() {
2785 if let Ok(ix) = ix {
2786 self.diagnostics.remove(ix);
2787 }
2788 } else {
2789 match ix {
2790 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2791 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2792 };
2793 }
2794 self.diagnostics_timestamp = lamport_timestamp;
2795 self.non_text_state_update_count += 1;
2796 self.text.lamport_clock.observe(lamport_timestamp);
2797 cx.notify();
2798 cx.emit(BufferEvent::DiagnosticsUpdated);
2799 }
2800 }
2801
2802 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2803 self.was_changed();
2804 cx.emit(BufferEvent::Operation {
2805 operation,
2806 is_local,
2807 });
2808 }
2809
2810 /// Removes the selections for a given peer.
2811 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2812 self.remote_selections.remove(&replica_id);
2813 cx.notify();
2814 }
2815
2816 /// Undoes the most recent transaction.
2817 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2818 let was_dirty = self.is_dirty();
2819 let old_version = self.version.clone();
2820
2821 if let Some((transaction_id, operation)) = self.text.undo() {
2822 self.send_operation(Operation::Buffer(operation), true, cx);
2823 self.did_edit(&old_version, was_dirty, cx);
2824 Some(transaction_id)
2825 } else {
2826 None
2827 }
2828 }
2829
2830 /// Manually undoes a specific transaction in the buffer's undo history.
2831 pub fn undo_transaction(
2832 &mut self,
2833 transaction_id: TransactionId,
2834 cx: &mut Context<Self>,
2835 ) -> bool {
2836 let was_dirty = self.is_dirty();
2837 let old_version = self.version.clone();
2838 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2839 self.send_operation(Operation::Buffer(operation), true, cx);
2840 self.did_edit(&old_version, was_dirty, cx);
2841 true
2842 } else {
2843 false
2844 }
2845 }
2846
2847 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2848 pub fn undo_to_transaction(
2849 &mut self,
2850 transaction_id: TransactionId,
2851 cx: &mut Context<Self>,
2852 ) -> bool {
2853 let was_dirty = self.is_dirty();
2854 let old_version = self.version.clone();
2855
2856 let operations = self.text.undo_to_transaction(transaction_id);
2857 let undone = !operations.is_empty();
2858 for operation in operations {
2859 self.send_operation(Operation::Buffer(operation), true, cx);
2860 }
2861 if undone {
2862 self.did_edit(&old_version, was_dirty, cx)
2863 }
2864 undone
2865 }
2866
2867 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2868 let was_dirty = self.is_dirty();
2869 let operation = self.text.undo_operations(counts);
2870 let old_version = self.version.clone();
2871 self.send_operation(Operation::Buffer(operation), true, cx);
2872 self.did_edit(&old_version, was_dirty, cx);
2873 }
2874
2875 /// Manually redoes a specific transaction in the buffer's redo history.
2876 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2877 let was_dirty = self.is_dirty();
2878 let old_version = self.version.clone();
2879
2880 if let Some((transaction_id, operation)) = self.text.redo() {
2881 self.send_operation(Operation::Buffer(operation), true, cx);
2882 self.did_edit(&old_version, was_dirty, cx);
2883 Some(transaction_id)
2884 } else {
2885 None
2886 }
2887 }
2888
2889 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2890 pub fn redo_to_transaction(
2891 &mut self,
2892 transaction_id: TransactionId,
2893 cx: &mut Context<Self>,
2894 ) -> bool {
2895 let was_dirty = self.is_dirty();
2896 let old_version = self.version.clone();
2897
2898 let operations = self.text.redo_to_transaction(transaction_id);
2899 let redone = !operations.is_empty();
2900 for operation in operations {
2901 self.send_operation(Operation::Buffer(operation), true, cx);
2902 }
2903 if redone {
2904 self.did_edit(&old_version, was_dirty, cx)
2905 }
2906 redone
2907 }
2908
2909 /// Override current completion triggers with the user-provided completion triggers.
2910 pub fn set_completion_triggers(
2911 &mut self,
2912 server_id: LanguageServerId,
2913 triggers: BTreeSet<String>,
2914 cx: &mut Context<Self>,
2915 ) {
2916 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2917 if triggers.is_empty() {
2918 self.completion_triggers_per_language_server
2919 .remove(&server_id);
2920 self.completion_triggers = self
2921 .completion_triggers_per_language_server
2922 .values()
2923 .flat_map(|triggers| triggers.iter().cloned())
2924 .collect();
2925 } else {
2926 self.completion_triggers_per_language_server
2927 .insert(server_id, triggers.clone());
2928 self.completion_triggers.extend(triggers.iter().cloned());
2929 }
2930 self.send_operation(
2931 Operation::UpdateCompletionTriggers {
2932 triggers: triggers.into_iter().collect(),
2933 lamport_timestamp: self.completion_triggers_timestamp,
2934 server_id,
2935 },
2936 true,
2937 cx,
2938 );
2939 cx.notify();
2940 }
2941
2942 /// Returns a list of strings which trigger a completion menu for this language.
2943 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2944 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2945 &self.completion_triggers
2946 }
2947
2948 /// Call this directly after performing edits to prevent the preview tab
2949 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2950 /// to return false until there are additional edits.
2951 pub fn refresh_preview(&mut self) {
2952 self.preview_version = self.version.clone();
2953 }
2954
2955 /// Whether we should preserve the preview status of a tab containing this buffer.
2956 pub fn preserve_preview(&self) -> bool {
2957 !self.has_edits_since(&self.preview_version)
2958 }
2959}
2960
2961#[doc(hidden)]
2962#[cfg(any(test, feature = "test-support"))]
2963impl Buffer {
2964 pub fn edit_via_marked_text(
2965 &mut self,
2966 marked_string: &str,
2967 autoindent_mode: Option<AutoindentMode>,
2968 cx: &mut Context<Self>,
2969 ) {
2970 let edits = self.edits_for_marked_text(marked_string);
2971 self.edit(edits, autoindent_mode, cx);
2972 }
2973
2974 pub fn set_group_interval(&mut self, group_interval: Duration) {
2975 self.text.set_group_interval(group_interval);
2976 }
2977
2978 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2979 where
2980 T: rand::Rng,
2981 {
2982 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2983 let mut last_end = None;
2984 for _ in 0..old_range_count {
2985 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2986 break;
2987 }
2988
2989 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2990 let mut range = self.random_byte_range(new_start, rng);
2991 if rng.random_bool(0.2) {
2992 mem::swap(&mut range.start, &mut range.end);
2993 }
2994 last_end = Some(range.end);
2995
2996 let new_text_len = rng.random_range(0..10);
2997 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2998 new_text = new_text.to_uppercase();
2999
3000 edits.push((range, new_text));
3001 }
3002 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3003 self.edit(edits, None, cx);
3004 }
3005
3006 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3007 let was_dirty = self.is_dirty();
3008 let old_version = self.version.clone();
3009
3010 let ops = self.text.randomly_undo_redo(rng);
3011 if !ops.is_empty() {
3012 for op in ops {
3013 self.send_operation(Operation::Buffer(op), true, cx);
3014 self.did_edit(&old_version, was_dirty, cx);
3015 }
3016 }
3017 }
3018}
3019
3020impl EventEmitter<BufferEvent> for Buffer {}
3021
3022impl Deref for Buffer {
3023 type Target = TextBuffer;
3024
3025 fn deref(&self) -> &Self::Target {
3026 &self.text
3027 }
3028}
3029
3030impl BufferSnapshot {
3031 /// Returns [`IndentSize`] for a given line that respects user settings and
3032 /// language preferences.
3033 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3034 indent_size_for_line(self, row)
3035 }
3036
3037 /// Returns [`IndentSize`] for a given position that respects user settings
3038 /// and language preferences.
3039 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3040 let settings = language_settings(
3041 self.language_at(position).map(|l| l.name()),
3042 self.file(),
3043 cx,
3044 );
3045 if settings.hard_tabs {
3046 IndentSize::tab()
3047 } else {
3048 IndentSize::spaces(settings.tab_size.get())
3049 }
3050 }
3051
3052 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3053 /// is passed in as `single_indent_size`.
3054 pub fn suggested_indents(
3055 &self,
3056 rows: impl Iterator<Item = u32>,
3057 single_indent_size: IndentSize,
3058 ) -> BTreeMap<u32, IndentSize> {
3059 let mut result = BTreeMap::new();
3060
3061 for row_range in contiguous_ranges(rows, 10) {
3062 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3063 Some(suggestions) => suggestions,
3064 _ => break,
3065 };
3066
3067 for (row, suggestion) in row_range.zip(suggestions) {
3068 let indent_size = if let Some(suggestion) = suggestion {
3069 result
3070 .get(&suggestion.basis_row)
3071 .copied()
3072 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3073 .with_delta(suggestion.delta, single_indent_size)
3074 } else {
3075 self.indent_size_for_line(row)
3076 };
3077
3078 result.insert(row, indent_size);
3079 }
3080 }
3081
3082 result
3083 }
3084
3085 fn suggest_autoindents(
3086 &self,
3087 row_range: Range<u32>,
3088 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3089 let config = &self.language.as_ref()?.config;
3090 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3091
3092 #[derive(Debug, Clone)]
3093 struct StartPosition {
3094 start: Point,
3095 suffix: SharedString,
3096 }
3097
3098 // Find the suggested indentation ranges based on the syntax tree.
3099 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3100 let end = Point::new(row_range.end, 0);
3101 let range = (start..end).to_offset(&self.text);
3102 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3103 Some(&grammar.indents_config.as_ref()?.query)
3104 });
3105 let indent_configs = matches
3106 .grammars()
3107 .iter()
3108 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3109 .collect::<Vec<_>>();
3110
3111 let mut indent_ranges = Vec::<Range<Point>>::new();
3112 let mut start_positions = Vec::<StartPosition>::new();
3113 let mut outdent_positions = Vec::<Point>::new();
3114 while let Some(mat) = matches.peek() {
3115 let mut start: Option<Point> = None;
3116 let mut end: Option<Point> = None;
3117
3118 let config = indent_configs[mat.grammar_index];
3119 for capture in mat.captures {
3120 if capture.index == config.indent_capture_ix {
3121 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3122 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3123 } else if Some(capture.index) == config.start_capture_ix {
3124 start = Some(Point::from_ts_point(capture.node.end_position()));
3125 } else if Some(capture.index) == config.end_capture_ix {
3126 end = Some(Point::from_ts_point(capture.node.start_position()));
3127 } else if Some(capture.index) == config.outdent_capture_ix {
3128 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3129 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3130 start_positions.push(StartPosition {
3131 start: Point::from_ts_point(capture.node.start_position()),
3132 suffix: suffix.clone(),
3133 });
3134 }
3135 }
3136
3137 matches.advance();
3138 if let Some((start, end)) = start.zip(end) {
3139 if start.row == end.row {
3140 continue;
3141 }
3142 let range = start..end;
3143 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3144 Err(ix) => indent_ranges.insert(ix, range),
3145 Ok(ix) => {
3146 let prev_range = &mut indent_ranges[ix];
3147 prev_range.end = prev_range.end.max(range.end);
3148 }
3149 }
3150 }
3151 }
3152
3153 let mut error_ranges = Vec::<Range<Point>>::new();
3154 let mut matches = self
3155 .syntax
3156 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3157 while let Some(mat) = matches.peek() {
3158 let node = mat.captures[0].node;
3159 let start = Point::from_ts_point(node.start_position());
3160 let end = Point::from_ts_point(node.end_position());
3161 let range = start..end;
3162 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3163 Ok(ix) | Err(ix) => ix,
3164 };
3165 let mut end_ix = ix;
3166 while let Some(existing_range) = error_ranges.get(end_ix) {
3167 if existing_range.end < end {
3168 end_ix += 1;
3169 } else {
3170 break;
3171 }
3172 }
3173 error_ranges.splice(ix..end_ix, [range]);
3174 matches.advance();
3175 }
3176
3177 outdent_positions.sort();
3178 for outdent_position in outdent_positions {
3179 // find the innermost indent range containing this outdent_position
3180 // set its end to the outdent position
3181 if let Some(range_to_truncate) = indent_ranges
3182 .iter_mut()
3183 .filter(|indent_range| indent_range.contains(&outdent_position))
3184 .next_back()
3185 {
3186 range_to_truncate.end = outdent_position;
3187 }
3188 }
3189
3190 start_positions.sort_by_key(|b| b.start);
3191
3192 // Find the suggested indentation increases and decreased based on regexes.
3193 let mut regex_outdent_map = HashMap::default();
3194 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3195 let mut start_positions_iter = start_positions.iter().peekable();
3196
3197 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3198 self.for_each_line(
3199 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3200 ..Point::new(row_range.end, 0),
3201 |row, line| {
3202 if config
3203 .decrease_indent_pattern
3204 .as_ref()
3205 .is_some_and(|regex| regex.is_match(line))
3206 {
3207 indent_change_rows.push((row, Ordering::Less));
3208 }
3209 if config
3210 .increase_indent_pattern
3211 .as_ref()
3212 .is_some_and(|regex| regex.is_match(line))
3213 {
3214 indent_change_rows.push((row + 1, Ordering::Greater));
3215 }
3216 while let Some(pos) = start_positions_iter.peek() {
3217 if pos.start.row < row {
3218 let pos = start_positions_iter.next().unwrap();
3219 last_seen_suffix
3220 .entry(pos.suffix.to_string())
3221 .or_default()
3222 .push(pos.start);
3223 } else {
3224 break;
3225 }
3226 }
3227 for rule in &config.decrease_indent_patterns {
3228 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3229 let row_start_column = self.indent_size_for_line(row).len;
3230 let basis_row = rule
3231 .valid_after
3232 .iter()
3233 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3234 .flatten()
3235 .filter(|start_point| start_point.column <= row_start_column)
3236 .max_by_key(|start_point| start_point.row);
3237 if let Some(outdent_to_row) = basis_row {
3238 regex_outdent_map.insert(row, outdent_to_row.row);
3239 }
3240 break;
3241 }
3242 }
3243 },
3244 );
3245
3246 let mut indent_changes = indent_change_rows.into_iter().peekable();
3247 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3248 prev_non_blank_row.unwrap_or(0)
3249 } else {
3250 row_range.start.saturating_sub(1)
3251 };
3252
3253 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3254 Some(row_range.map(move |row| {
3255 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3256
3257 let mut indent_from_prev_row = false;
3258 let mut outdent_from_prev_row = false;
3259 let mut outdent_to_row = u32::MAX;
3260 let mut from_regex = false;
3261
3262 while let Some((indent_row, delta)) = indent_changes.peek() {
3263 match indent_row.cmp(&row) {
3264 Ordering::Equal => match delta {
3265 Ordering::Less => {
3266 from_regex = true;
3267 outdent_from_prev_row = true
3268 }
3269 Ordering::Greater => {
3270 indent_from_prev_row = true;
3271 from_regex = true
3272 }
3273 _ => {}
3274 },
3275
3276 Ordering::Greater => break,
3277 Ordering::Less => {}
3278 }
3279
3280 indent_changes.next();
3281 }
3282
3283 for range in &indent_ranges {
3284 if range.start.row >= row {
3285 break;
3286 }
3287 if range.start.row == prev_row && range.end > row_start {
3288 indent_from_prev_row = true;
3289 }
3290 if range.end > prev_row_start && range.end <= row_start {
3291 outdent_to_row = outdent_to_row.min(range.start.row);
3292 }
3293 }
3294
3295 if let Some(basis_row) = regex_outdent_map.get(&row) {
3296 indent_from_prev_row = false;
3297 outdent_to_row = *basis_row;
3298 from_regex = true;
3299 }
3300
3301 let within_error = error_ranges
3302 .iter()
3303 .any(|e| e.start.row < row && e.end > row_start);
3304
3305 let suggestion = if outdent_to_row == prev_row
3306 || (outdent_from_prev_row && indent_from_prev_row)
3307 {
3308 Some(IndentSuggestion {
3309 basis_row: prev_row,
3310 delta: Ordering::Equal,
3311 within_error: within_error && !from_regex,
3312 })
3313 } else if indent_from_prev_row {
3314 Some(IndentSuggestion {
3315 basis_row: prev_row,
3316 delta: Ordering::Greater,
3317 within_error: within_error && !from_regex,
3318 })
3319 } else if outdent_to_row < prev_row {
3320 Some(IndentSuggestion {
3321 basis_row: outdent_to_row,
3322 delta: Ordering::Equal,
3323 within_error: within_error && !from_regex,
3324 })
3325 } else if outdent_from_prev_row {
3326 Some(IndentSuggestion {
3327 basis_row: prev_row,
3328 delta: Ordering::Less,
3329 within_error: within_error && !from_regex,
3330 })
3331 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3332 {
3333 Some(IndentSuggestion {
3334 basis_row: prev_row,
3335 delta: Ordering::Equal,
3336 within_error: within_error && !from_regex,
3337 })
3338 } else {
3339 None
3340 };
3341
3342 prev_row = row;
3343 prev_row_start = row_start;
3344 suggestion
3345 }))
3346 }
3347
3348 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3349 while row > 0 {
3350 row -= 1;
3351 if !self.is_line_blank(row) {
3352 return Some(row);
3353 }
3354 }
3355 None
3356 }
3357
3358 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3359 let captures = self.syntax.captures(range, &self.text, |grammar| {
3360 grammar
3361 .highlights_config
3362 .as_ref()
3363 .map(|config| &config.query)
3364 });
3365 let highlight_maps = captures
3366 .grammars()
3367 .iter()
3368 .map(|grammar| grammar.highlight_map())
3369 .collect();
3370 (captures, highlight_maps)
3371 }
3372
3373 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3374 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3375 /// returned in chunks where each chunk has a single syntax highlighting style and
3376 /// diagnostic status.
3377 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3378 let range = range.start.to_offset(self)..range.end.to_offset(self);
3379
3380 let mut syntax = None;
3381 if language_aware {
3382 syntax = Some(self.get_highlights(range.clone()));
3383 }
3384 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3385 let diagnostics = language_aware;
3386 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3387 }
3388
3389 pub fn highlighted_text_for_range<T: ToOffset>(
3390 &self,
3391 range: Range<T>,
3392 override_style: Option<HighlightStyle>,
3393 syntax_theme: &SyntaxTheme,
3394 ) -> HighlightedText {
3395 HighlightedText::from_buffer_range(
3396 range,
3397 &self.text,
3398 &self.syntax,
3399 override_style,
3400 syntax_theme,
3401 )
3402 }
3403
3404 /// Invokes the given callback for each line of text in the given range of the buffer.
3405 /// Uses callback to avoid allocating a string for each line.
3406 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3407 let mut line = String::new();
3408 let mut row = range.start.row;
3409 for chunk in self
3410 .as_rope()
3411 .chunks_in_range(range.to_offset(self))
3412 .chain(["\n"])
3413 {
3414 for (newline_ix, text) in chunk.split('\n').enumerate() {
3415 if newline_ix > 0 {
3416 callback(row, &line);
3417 row += 1;
3418 line.clear();
3419 }
3420 line.push_str(text);
3421 }
3422 }
3423 }
3424
3425 /// Iterates over every [`SyntaxLayer`] in the buffer.
3426 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3427 self.syntax_layers_for_range(0..self.len(), true)
3428 }
3429
3430 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3431 let offset = position.to_offset(self);
3432 self.syntax_layers_for_range(offset..offset, false)
3433 .filter(|l| {
3434 if let Some(ranges) = l.included_sub_ranges {
3435 ranges.iter().any(|range| {
3436 let start = range.start.to_offset(self);
3437 start <= offset && {
3438 let end = range.end.to_offset(self);
3439 offset < end
3440 }
3441 })
3442 } else {
3443 l.node().start_byte() <= offset && l.node().end_byte() > offset
3444 }
3445 })
3446 .last()
3447 }
3448
3449 pub fn syntax_layers_for_range<D: ToOffset>(
3450 &self,
3451 range: Range<D>,
3452 include_hidden: bool,
3453 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3454 self.syntax
3455 .layers_for_range(range, &self.text, include_hidden)
3456 }
3457
3458 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3459 &self,
3460 range: Range<D>,
3461 ) -> Option<SyntaxLayer<'_>> {
3462 let range = range.to_offset(self);
3463 self.syntax
3464 .layers_for_range(range, &self.text, false)
3465 .max_by(|a, b| {
3466 if a.depth != b.depth {
3467 a.depth.cmp(&b.depth)
3468 } else if a.offset.0 != b.offset.0 {
3469 a.offset.0.cmp(&b.offset.0)
3470 } else {
3471 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3472 }
3473 })
3474 }
3475
3476 /// Returns the main [`Language`].
3477 pub fn language(&self) -> Option<&Arc<Language>> {
3478 self.language.as_ref()
3479 }
3480
3481 /// Returns the [`Language`] at the given location.
3482 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3483 self.syntax_layer_at(position)
3484 .map(|info| info.language)
3485 .or(self.language.as_ref())
3486 }
3487
3488 /// Returns the settings for the language at the given location.
3489 pub fn settings_at<'a, D: ToOffset>(
3490 &'a self,
3491 position: D,
3492 cx: &'a App,
3493 ) -> Cow<'a, LanguageSettings> {
3494 language_settings(
3495 self.language_at(position).map(|l| l.name()),
3496 self.file.as_ref(),
3497 cx,
3498 )
3499 }
3500
3501 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3502 CharClassifier::new(self.language_scope_at(point))
3503 }
3504
3505 /// Returns the [`LanguageScope`] at the given location.
3506 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3507 let offset = position.to_offset(self);
3508 let mut scope = None;
3509 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3510
3511 // Use the layer that has the smallest node intersecting the given point.
3512 for layer in self
3513 .syntax
3514 .layers_for_range(offset..offset, &self.text, false)
3515 {
3516 let mut cursor = layer.node().walk();
3517
3518 let mut range = None;
3519 loop {
3520 let child_range = cursor.node().byte_range();
3521 if !child_range.contains(&offset) {
3522 break;
3523 }
3524
3525 range = Some(child_range);
3526 if cursor.goto_first_child_for_byte(offset).is_none() {
3527 break;
3528 }
3529 }
3530
3531 if let Some(range) = range
3532 && smallest_range_and_depth.as_ref().is_none_or(
3533 |(smallest_range, smallest_range_depth)| {
3534 if layer.depth > *smallest_range_depth {
3535 true
3536 } else if layer.depth == *smallest_range_depth {
3537 range.len() < smallest_range.len()
3538 } else {
3539 false
3540 }
3541 },
3542 )
3543 {
3544 smallest_range_and_depth = Some((range, layer.depth));
3545 scope = Some(LanguageScope {
3546 language: layer.language.clone(),
3547 override_id: layer.override_id(offset, &self.text),
3548 });
3549 }
3550 }
3551
3552 scope.or_else(|| {
3553 self.language.clone().map(|language| LanguageScope {
3554 language,
3555 override_id: None,
3556 })
3557 })
3558 }
3559
3560 /// Returns a tuple of the range and character kind of the word
3561 /// surrounding the given position.
3562 pub fn surrounding_word<T: ToOffset>(
3563 &self,
3564 start: T,
3565 scope_context: Option<CharScopeContext>,
3566 ) -> (Range<usize>, Option<CharKind>) {
3567 let mut start = start.to_offset(self);
3568 let mut end = start;
3569 let mut next_chars = self.chars_at(start).take(128).peekable();
3570 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3571
3572 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3573 let word_kind = cmp::max(
3574 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3575 next_chars.peek().copied().map(|c| classifier.kind(c)),
3576 );
3577
3578 for ch in prev_chars {
3579 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3580 start -= ch.len_utf8();
3581 } else {
3582 break;
3583 }
3584 }
3585
3586 for ch in next_chars {
3587 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3588 end += ch.len_utf8();
3589 } else {
3590 break;
3591 }
3592 }
3593
3594 (start..end, word_kind)
3595 }
3596
3597 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3598 /// range. When `require_larger` is true, the node found must be larger than the query range.
3599 ///
3600 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3601 /// be moved to the root of the tree.
3602 fn goto_node_enclosing_range(
3603 cursor: &mut tree_sitter::TreeCursor,
3604 query_range: &Range<usize>,
3605 require_larger: bool,
3606 ) -> bool {
3607 let mut ascending = false;
3608 loop {
3609 let mut range = cursor.node().byte_range();
3610 if query_range.is_empty() {
3611 // When the query range is empty and the current node starts after it, move to the
3612 // previous sibling to find the node the containing node.
3613 if range.start > query_range.start {
3614 cursor.goto_previous_sibling();
3615 range = cursor.node().byte_range();
3616 }
3617 } else {
3618 // When the query range is non-empty and the current node ends exactly at the start,
3619 // move to the next sibling to find a node that extends beyond the start.
3620 if range.end == query_range.start {
3621 cursor.goto_next_sibling();
3622 range = cursor.node().byte_range();
3623 }
3624 }
3625
3626 let encloses = range.contains_inclusive(query_range)
3627 && (!require_larger || range.len() > query_range.len());
3628 if !encloses {
3629 ascending = true;
3630 if !cursor.goto_parent() {
3631 return false;
3632 }
3633 continue;
3634 } else if ascending {
3635 return true;
3636 }
3637
3638 // Descend into the current node.
3639 if cursor
3640 .goto_first_child_for_byte(query_range.start)
3641 .is_none()
3642 {
3643 return true;
3644 }
3645 }
3646 }
3647
3648 pub fn syntax_ancestor<'a, T: ToOffset>(
3649 &'a self,
3650 range: Range<T>,
3651 ) -> Option<tree_sitter::Node<'a>> {
3652 let range = range.start.to_offset(self)..range.end.to_offset(self);
3653 let mut result: Option<tree_sitter::Node<'a>> = None;
3654 for layer in self
3655 .syntax
3656 .layers_for_range(range.clone(), &self.text, true)
3657 {
3658 let mut cursor = layer.node().walk();
3659
3660 // Find the node that both contains the range and is larger than it.
3661 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3662 continue;
3663 }
3664
3665 let left_node = cursor.node();
3666 let mut layer_result = left_node;
3667
3668 // For an empty range, try to find another node immediately to the right of the range.
3669 if left_node.end_byte() == range.start {
3670 let mut right_node = None;
3671 while !cursor.goto_next_sibling() {
3672 if !cursor.goto_parent() {
3673 break;
3674 }
3675 }
3676
3677 while cursor.node().start_byte() == range.start {
3678 right_node = Some(cursor.node());
3679 if !cursor.goto_first_child() {
3680 break;
3681 }
3682 }
3683
3684 // If there is a candidate node on both sides of the (empty) range, then
3685 // decide between the two by favoring a named node over an anonymous token.
3686 // If both nodes are the same in that regard, favor the right one.
3687 if let Some(right_node) = right_node
3688 && (right_node.is_named() || !left_node.is_named())
3689 {
3690 layer_result = right_node;
3691 }
3692 }
3693
3694 if let Some(previous_result) = &result
3695 && previous_result.byte_range().len() < layer_result.byte_range().len()
3696 {
3697 continue;
3698 }
3699 result = Some(layer_result);
3700 }
3701
3702 result
3703 }
3704
3705 /// Find the previous sibling syntax node at the given range.
3706 ///
3707 /// This function locates the syntax node that precedes the node containing
3708 /// the given range. It searches hierarchically by:
3709 /// 1. Finding the node that contains the given range
3710 /// 2. Looking for the previous sibling at the same tree level
3711 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3712 ///
3713 /// Returns `None` if there is no previous sibling at any ancestor level.
3714 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3715 &'a self,
3716 range: Range<T>,
3717 ) -> Option<tree_sitter::Node<'a>> {
3718 let range = range.start.to_offset(self)..range.end.to_offset(self);
3719 let mut result: Option<tree_sitter::Node<'a>> = None;
3720
3721 for layer in self
3722 .syntax
3723 .layers_for_range(range.clone(), &self.text, true)
3724 {
3725 let mut cursor = layer.node().walk();
3726
3727 // Find the node that contains the range
3728 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3729 continue;
3730 }
3731
3732 // Look for the previous sibling, moving up ancestor levels if needed
3733 loop {
3734 if cursor.goto_previous_sibling() {
3735 let layer_result = cursor.node();
3736
3737 if let Some(previous_result) = &result {
3738 if previous_result.byte_range().end < layer_result.byte_range().end {
3739 continue;
3740 }
3741 }
3742 result = Some(layer_result);
3743 break;
3744 }
3745
3746 // No sibling found at this level, try moving up to parent
3747 if !cursor.goto_parent() {
3748 break;
3749 }
3750 }
3751 }
3752
3753 result
3754 }
3755
3756 /// Find the next sibling syntax node at the given range.
3757 ///
3758 /// This function locates the syntax node that follows the node containing
3759 /// the given range. It searches hierarchically by:
3760 /// 1. Finding the node that contains the given range
3761 /// 2. Looking for the next sibling at the same tree level
3762 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3763 ///
3764 /// Returns `None` if there is no next sibling at any ancestor level.
3765 pub fn syntax_next_sibling<'a, T: ToOffset>(
3766 &'a self,
3767 range: Range<T>,
3768 ) -> Option<tree_sitter::Node<'a>> {
3769 let range = range.start.to_offset(self)..range.end.to_offset(self);
3770 let mut result: Option<tree_sitter::Node<'a>> = None;
3771
3772 for layer in self
3773 .syntax
3774 .layers_for_range(range.clone(), &self.text, true)
3775 {
3776 let mut cursor = layer.node().walk();
3777
3778 // Find the node that contains the range
3779 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3780 continue;
3781 }
3782
3783 // Look for the next sibling, moving up ancestor levels if needed
3784 loop {
3785 if cursor.goto_next_sibling() {
3786 let layer_result = cursor.node();
3787
3788 if let Some(previous_result) = &result {
3789 if previous_result.byte_range().start > layer_result.byte_range().start {
3790 continue;
3791 }
3792 }
3793 result = Some(layer_result);
3794 break;
3795 }
3796
3797 // No sibling found at this level, try moving up to parent
3798 if !cursor.goto_parent() {
3799 break;
3800 }
3801 }
3802 }
3803
3804 result
3805 }
3806
3807 /// Returns the root syntax node within the given row
3808 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3809 let start_offset = position.to_offset(self);
3810
3811 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3812
3813 let layer = self
3814 .syntax
3815 .layers_for_range(start_offset..start_offset, &self.text, true)
3816 .next()?;
3817
3818 let mut cursor = layer.node().walk();
3819
3820 // Descend to the first leaf that touches the start of the range.
3821 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3822 if cursor.node().end_byte() == start_offset {
3823 cursor.goto_next_sibling();
3824 }
3825 }
3826
3827 // Ascend to the root node within the same row.
3828 while cursor.goto_parent() {
3829 if cursor.node().start_position().row != row {
3830 break;
3831 }
3832 }
3833
3834 Some(cursor.node())
3835 }
3836
3837 /// Returns the outline for the buffer.
3838 ///
3839 /// This method allows passing an optional [`SyntaxTheme`] to
3840 /// syntax-highlight the returned symbols.
3841 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3842 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3843 }
3844
3845 /// Returns all the symbols that contain the given position.
3846 ///
3847 /// This method allows passing an optional [`SyntaxTheme`] to
3848 /// syntax-highlight the returned symbols.
3849 pub fn symbols_containing<T: ToOffset>(
3850 &self,
3851 position: T,
3852 theme: Option<&SyntaxTheme>,
3853 ) -> Vec<OutlineItem<Anchor>> {
3854 let position = position.to_offset(self);
3855 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3856 let end = self.clip_offset(position + 1, Bias::Right);
3857 let mut items = self.outline_items_containing(start..end, false, theme);
3858 let mut prev_depth = None;
3859 items.retain(|item| {
3860 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3861 prev_depth = Some(item.depth);
3862 result
3863 });
3864 items
3865 }
3866
3867 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3868 let range = range.to_offset(self);
3869 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3870 grammar.outline_config.as_ref().map(|c| &c.query)
3871 });
3872 let configs = matches
3873 .grammars()
3874 .iter()
3875 .map(|g| g.outline_config.as_ref().unwrap())
3876 .collect::<Vec<_>>();
3877
3878 while let Some(mat) = matches.peek() {
3879 let config = &configs[mat.grammar_index];
3880 let containing_item_node = maybe!({
3881 let item_node = mat.captures.iter().find_map(|cap| {
3882 if cap.index == config.item_capture_ix {
3883 Some(cap.node)
3884 } else {
3885 None
3886 }
3887 })?;
3888
3889 let item_byte_range = item_node.byte_range();
3890 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3891 None
3892 } else {
3893 Some(item_node)
3894 }
3895 });
3896
3897 if let Some(item_node) = containing_item_node {
3898 return Some(
3899 Point::from_ts_point(item_node.start_position())
3900 ..Point::from_ts_point(item_node.end_position()),
3901 );
3902 }
3903
3904 matches.advance();
3905 }
3906 None
3907 }
3908
3909 pub fn outline_items_containing<T: ToOffset>(
3910 &self,
3911 range: Range<T>,
3912 include_extra_context: bool,
3913 theme: Option<&SyntaxTheme>,
3914 ) -> Vec<OutlineItem<Anchor>> {
3915 self.outline_items_containing_internal(
3916 range,
3917 include_extra_context,
3918 theme,
3919 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3920 )
3921 }
3922
3923 pub fn outline_items_as_points_containing<T: ToOffset>(
3924 &self,
3925 range: Range<T>,
3926 include_extra_context: bool,
3927 theme: Option<&SyntaxTheme>,
3928 ) -> Vec<OutlineItem<Point>> {
3929 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3930 range
3931 })
3932 }
3933
3934 fn outline_items_containing_internal<T: ToOffset, U>(
3935 &self,
3936 range: Range<T>,
3937 include_extra_context: bool,
3938 theme: Option<&SyntaxTheme>,
3939 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3940 ) -> Vec<OutlineItem<U>> {
3941 let range = range.to_offset(self);
3942 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3943 grammar.outline_config.as_ref().map(|c| &c.query)
3944 });
3945
3946 let mut items = Vec::new();
3947 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3948 while let Some(mat) = matches.peek() {
3949 let config = matches.grammars()[mat.grammar_index]
3950 .outline_config
3951 .as_ref()
3952 .unwrap();
3953 if let Some(item) =
3954 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3955 {
3956 items.push(item);
3957 } else if let Some(capture) = mat
3958 .captures
3959 .iter()
3960 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3961 {
3962 let capture_range = capture.node.start_position()..capture.node.end_position();
3963 let mut capture_row_range =
3964 capture_range.start.row as u32..capture_range.end.row as u32;
3965 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3966 {
3967 capture_row_range.end -= 1;
3968 }
3969 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3970 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3971 last_row_range.end = capture_row_range.end;
3972 } else {
3973 annotation_row_ranges.push(capture_row_range);
3974 }
3975 } else {
3976 annotation_row_ranges.push(capture_row_range);
3977 }
3978 }
3979 matches.advance();
3980 }
3981
3982 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3983
3984 // Assign depths based on containment relationships and convert to anchors.
3985 let mut item_ends_stack = Vec::<Point>::new();
3986 let mut anchor_items = Vec::new();
3987 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3988 for item in items {
3989 while let Some(last_end) = item_ends_stack.last().copied() {
3990 if last_end < item.range.end {
3991 item_ends_stack.pop();
3992 } else {
3993 break;
3994 }
3995 }
3996
3997 let mut annotation_row_range = None;
3998 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3999 let row_preceding_item = item.range.start.row.saturating_sub(1);
4000 if next_annotation_row_range.end < row_preceding_item {
4001 annotation_row_ranges.next();
4002 } else {
4003 if next_annotation_row_range.end == row_preceding_item {
4004 annotation_row_range = Some(next_annotation_row_range.clone());
4005 annotation_row_ranges.next();
4006 }
4007 break;
4008 }
4009 }
4010
4011 anchor_items.push(OutlineItem {
4012 depth: item_ends_stack.len(),
4013 range: range_callback(self, item.range.clone()),
4014 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4015 text: item.text,
4016 highlight_ranges: item.highlight_ranges,
4017 name_ranges: item.name_ranges,
4018 body_range: item.body_range.map(|r| range_callback(self, r)),
4019 annotation_range: annotation_row_range.map(|annotation_range| {
4020 let point_range = Point::new(annotation_range.start, 0)
4021 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4022 range_callback(self, point_range)
4023 }),
4024 });
4025 item_ends_stack.push(item.range.end);
4026 }
4027
4028 anchor_items
4029 }
4030
4031 fn next_outline_item(
4032 &self,
4033 config: &OutlineConfig,
4034 mat: &SyntaxMapMatch,
4035 range: &Range<usize>,
4036 include_extra_context: bool,
4037 theme: Option<&SyntaxTheme>,
4038 ) -> Option<OutlineItem<Point>> {
4039 let item_node = mat.captures.iter().find_map(|cap| {
4040 if cap.index == config.item_capture_ix {
4041 Some(cap.node)
4042 } else {
4043 None
4044 }
4045 })?;
4046
4047 let item_byte_range = item_node.byte_range();
4048 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4049 return None;
4050 }
4051 let item_point_range = Point::from_ts_point(item_node.start_position())
4052 ..Point::from_ts_point(item_node.end_position());
4053
4054 let mut open_point = None;
4055 let mut close_point = None;
4056
4057 let mut buffer_ranges = Vec::new();
4058 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4059 let mut range = node.start_byte()..node.end_byte();
4060 let start = node.start_position();
4061 if node.end_position().row > start.row {
4062 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4063 }
4064
4065 if !range.is_empty() {
4066 buffer_ranges.push((range, node_is_name));
4067 }
4068 };
4069
4070 for capture in mat.captures {
4071 if capture.index == config.name_capture_ix {
4072 add_to_buffer_ranges(capture.node, true);
4073 } else if Some(capture.index) == config.context_capture_ix
4074 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4075 {
4076 add_to_buffer_ranges(capture.node, false);
4077 } else {
4078 if Some(capture.index) == config.open_capture_ix {
4079 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4080 } else if Some(capture.index) == config.close_capture_ix {
4081 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4082 }
4083 }
4084 }
4085
4086 if buffer_ranges.is_empty() {
4087 return None;
4088 }
4089 let source_range_for_text =
4090 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4091
4092 let mut text = String::new();
4093 let mut highlight_ranges = Vec::new();
4094 let mut name_ranges = Vec::new();
4095 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4096 let mut last_buffer_range_end = 0;
4097 for (buffer_range, is_name) in buffer_ranges {
4098 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4099 if space_added {
4100 text.push(' ');
4101 }
4102 let before_append_len = text.len();
4103 let mut offset = buffer_range.start;
4104 chunks.seek(buffer_range.clone());
4105 for mut chunk in chunks.by_ref() {
4106 if chunk.text.len() > buffer_range.end - offset {
4107 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4108 offset = buffer_range.end;
4109 } else {
4110 offset += chunk.text.len();
4111 }
4112 let style = chunk
4113 .syntax_highlight_id
4114 .zip(theme)
4115 .and_then(|(highlight, theme)| highlight.style(theme));
4116 if let Some(style) = style {
4117 let start = text.len();
4118 let end = start + chunk.text.len();
4119 highlight_ranges.push((start..end, style));
4120 }
4121 text.push_str(chunk.text);
4122 if offset >= buffer_range.end {
4123 break;
4124 }
4125 }
4126 if is_name {
4127 let after_append_len = text.len();
4128 let start = if space_added && !name_ranges.is_empty() {
4129 before_append_len - 1
4130 } else {
4131 before_append_len
4132 };
4133 name_ranges.push(start..after_append_len);
4134 }
4135 last_buffer_range_end = buffer_range.end;
4136 }
4137
4138 Some(OutlineItem {
4139 depth: 0, // We'll calculate the depth later
4140 range: item_point_range,
4141 source_range_for_text: source_range_for_text.to_point(self),
4142 text,
4143 highlight_ranges,
4144 name_ranges,
4145 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4146 annotation_range: None,
4147 })
4148 }
4149
4150 pub fn function_body_fold_ranges<T: ToOffset>(
4151 &self,
4152 within: Range<T>,
4153 ) -> impl Iterator<Item = Range<usize>> + '_ {
4154 self.text_object_ranges(within, TreeSitterOptions::default())
4155 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4156 }
4157
4158 /// For each grammar in the language, runs the provided
4159 /// [`tree_sitter::Query`] against the given range.
4160 pub fn matches(
4161 &self,
4162 range: Range<usize>,
4163 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4164 ) -> SyntaxMapMatches<'_> {
4165 self.syntax.matches(range, self, query)
4166 }
4167
4168 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4169 /// Hence, may return more bracket pairs than the range contains.
4170 ///
4171 /// Will omit known chunks.
4172 /// The resulting bracket match collections are not ordered.
4173 pub fn fetch_bracket_ranges(
4174 &self,
4175 range: Range<usize>,
4176 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4177 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4178 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4179
4180 let known_chunks = match known_chunks {
4181 Some((known_version, known_chunks)) => {
4182 if !tree_sitter_data
4183 .chunks
4184 .version()
4185 .changed_since(known_version)
4186 {
4187 known_chunks.clone()
4188 } else {
4189 HashSet::default()
4190 }
4191 }
4192 None => HashSet::default(),
4193 };
4194
4195 let mut new_bracket_matches = HashMap::default();
4196 let mut all_bracket_matches = HashMap::default();
4197
4198 for chunk in tree_sitter_data
4199 .chunks
4200 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4201 {
4202 if known_chunks.contains(&chunk.row_range()) {
4203 continue;
4204 }
4205 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4206 continue;
4207 };
4208 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4209
4210 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4211 Some(cached_brackets) => cached_brackets,
4212 None => {
4213 let mut bracket_pairs_ends = Vec::new();
4214 let mut matches =
4215 self.syntax
4216 .matches(chunk_range.clone(), &self.text, |grammar| {
4217 grammar.brackets_config.as_ref().map(|c| &c.query)
4218 });
4219 let configs = matches
4220 .grammars()
4221 .iter()
4222 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4223 .collect::<Vec<_>>();
4224
4225 let chunk_range = chunk_range.clone();
4226 let new_matches = iter::from_fn(move || {
4227 while let Some(mat) = matches.peek() {
4228 let mut open = None;
4229 let mut close = None;
4230 let depth = mat.depth;
4231 let config = configs[mat.grammar_index];
4232 let pattern = &config.patterns[mat.pattern_index];
4233 for capture in mat.captures {
4234 if capture.index == config.open_capture_ix {
4235 open = Some(capture.node.byte_range());
4236 } else if capture.index == config.close_capture_ix {
4237 close = Some(capture.node.byte_range());
4238 }
4239 }
4240
4241 matches.advance();
4242
4243 let Some((open_range, close_range)) = open.zip(close) else {
4244 continue;
4245 };
4246
4247 let bracket_range = open_range.start..=close_range.end;
4248 if !bracket_range.overlaps(&chunk_range) {
4249 continue;
4250 }
4251
4252 return Some((open_range, close_range, pattern, depth));
4253 }
4254 None
4255 })
4256 .sorted_by_key(|(open_range, _, _, _)| open_range.start)
4257 .map(|(open_range, close_range, pattern, syntax_layer_depth)| {
4258 while let Some(&last_bracket_end) = bracket_pairs_ends.last() {
4259 if last_bracket_end <= open_range.start {
4260 bracket_pairs_ends.pop();
4261 } else {
4262 break;
4263 }
4264 }
4265
4266 let bracket_depth = bracket_pairs_ends.len();
4267 bracket_pairs_ends.push(close_range.end);
4268
4269 BracketMatch {
4270 open_range,
4271 close_range,
4272 syntax_layer_depth,
4273 newline_only: pattern.newline_only,
4274 color_index: pattern.rainbow_exclude.not().then_some(bracket_depth),
4275 }
4276 })
4277 .collect::<Vec<_>>();
4278
4279 new_bracket_matches.insert(chunk.id, new_matches.clone());
4280 new_matches
4281 }
4282 };
4283 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4284 }
4285
4286 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4287 if latest_tree_sitter_data.chunks.version() == &self.version {
4288 for (chunk_id, new_matches) in new_bracket_matches {
4289 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4290 if old_chunks.is_none() {
4291 *old_chunks = Some(new_matches);
4292 }
4293 }
4294 }
4295
4296 all_bracket_matches
4297 }
4298
4299 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4300 let mut tree_sitter_data = self.tree_sitter_data.lock();
4301 if self
4302 .version
4303 .changed_since(tree_sitter_data.chunks.version())
4304 {
4305 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4306 }
4307 tree_sitter_data
4308 }
4309
4310 pub fn all_bracket_ranges(
4311 &self,
4312 range: Range<usize>,
4313 ) -> impl Iterator<Item = BracketMatch<usize>> {
4314 self.fetch_bracket_ranges(range.clone(), None)
4315 .into_values()
4316 .flatten()
4317 .filter(move |bracket_match| {
4318 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4319 bracket_range.overlaps(&range)
4320 })
4321 }
4322
4323 /// Returns bracket range pairs overlapping or adjacent to `range`
4324 pub fn bracket_ranges<T: ToOffset>(
4325 &self,
4326 range: Range<T>,
4327 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4328 // Find bracket pairs that *inclusively* contain the given range.
4329 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4330 self.all_bracket_ranges(range)
4331 .filter(|pair| !pair.newline_only)
4332 }
4333
4334 pub fn debug_variables_query<T: ToOffset>(
4335 &self,
4336 range: Range<T>,
4337 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4338 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4339
4340 let mut matches = self.syntax.matches_with_options(
4341 range.clone(),
4342 &self.text,
4343 TreeSitterOptions::default(),
4344 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4345 );
4346
4347 let configs = matches
4348 .grammars()
4349 .iter()
4350 .map(|grammar| grammar.debug_variables_config.as_ref())
4351 .collect::<Vec<_>>();
4352
4353 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4354
4355 iter::from_fn(move || {
4356 loop {
4357 while let Some(capture) = captures.pop() {
4358 if capture.0.overlaps(&range) {
4359 return Some(capture);
4360 }
4361 }
4362
4363 let mat = matches.peek()?;
4364
4365 let Some(config) = configs[mat.grammar_index].as_ref() else {
4366 matches.advance();
4367 continue;
4368 };
4369
4370 for capture in mat.captures {
4371 let Some(ix) = config
4372 .objects_by_capture_ix
4373 .binary_search_by_key(&capture.index, |e| e.0)
4374 .ok()
4375 else {
4376 continue;
4377 };
4378 let text_object = config.objects_by_capture_ix[ix].1;
4379 let byte_range = capture.node.byte_range();
4380
4381 let mut found = false;
4382 for (range, existing) in captures.iter_mut() {
4383 if existing == &text_object {
4384 range.start = range.start.min(byte_range.start);
4385 range.end = range.end.max(byte_range.end);
4386 found = true;
4387 break;
4388 }
4389 }
4390
4391 if !found {
4392 captures.push((byte_range, text_object));
4393 }
4394 }
4395
4396 matches.advance();
4397 }
4398 })
4399 }
4400
4401 pub fn text_object_ranges<T: ToOffset>(
4402 &self,
4403 range: Range<T>,
4404 options: TreeSitterOptions,
4405 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4406 let range =
4407 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4408
4409 let mut matches =
4410 self.syntax
4411 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4412 grammar.text_object_config.as_ref().map(|c| &c.query)
4413 });
4414
4415 let configs = matches
4416 .grammars()
4417 .iter()
4418 .map(|grammar| grammar.text_object_config.as_ref())
4419 .collect::<Vec<_>>();
4420
4421 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4422
4423 iter::from_fn(move || {
4424 loop {
4425 while let Some(capture) = captures.pop() {
4426 if capture.0.overlaps(&range) {
4427 return Some(capture);
4428 }
4429 }
4430
4431 let mat = matches.peek()?;
4432
4433 let Some(config) = configs[mat.grammar_index].as_ref() else {
4434 matches.advance();
4435 continue;
4436 };
4437
4438 for capture in mat.captures {
4439 let Some(ix) = config
4440 .text_objects_by_capture_ix
4441 .binary_search_by_key(&capture.index, |e| e.0)
4442 .ok()
4443 else {
4444 continue;
4445 };
4446 let text_object = config.text_objects_by_capture_ix[ix].1;
4447 let byte_range = capture.node.byte_range();
4448
4449 let mut found = false;
4450 for (range, existing) in captures.iter_mut() {
4451 if existing == &text_object {
4452 range.start = range.start.min(byte_range.start);
4453 range.end = range.end.max(byte_range.end);
4454 found = true;
4455 break;
4456 }
4457 }
4458
4459 if !found {
4460 captures.push((byte_range, text_object));
4461 }
4462 }
4463
4464 matches.advance();
4465 }
4466 })
4467 }
4468
4469 /// Returns enclosing bracket ranges containing the given range
4470 pub fn enclosing_bracket_ranges<T: ToOffset>(
4471 &self,
4472 range: Range<T>,
4473 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4474 let range = range.start.to_offset(self)..range.end.to_offset(self);
4475
4476 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4477 let max_depth = result
4478 .iter()
4479 .map(|mat| mat.syntax_layer_depth)
4480 .max()
4481 .unwrap_or(0);
4482 result.into_iter().filter(move |pair| {
4483 pair.open_range.start <= range.start
4484 && pair.close_range.end >= range.end
4485 && pair.syntax_layer_depth == max_depth
4486 })
4487 }
4488
4489 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4490 ///
4491 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4492 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4493 &self,
4494 range: Range<T>,
4495 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4496 ) -> Option<(Range<usize>, Range<usize>)> {
4497 let range = range.start.to_offset(self)..range.end.to_offset(self);
4498
4499 // Get the ranges of the innermost pair of brackets.
4500 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4501
4502 for pair in self.enclosing_bracket_ranges(range) {
4503 if let Some(range_filter) = range_filter
4504 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4505 {
4506 continue;
4507 }
4508
4509 let len = pair.close_range.end - pair.open_range.start;
4510
4511 if let Some((existing_open, existing_close)) = &result {
4512 let existing_len = existing_close.end - existing_open.start;
4513 if len > existing_len {
4514 continue;
4515 }
4516 }
4517
4518 result = Some((pair.open_range, pair.close_range));
4519 }
4520
4521 result
4522 }
4523
4524 /// Returns anchor ranges for any matches of the redaction query.
4525 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4526 /// will be run on the relevant section of the buffer.
4527 pub fn redacted_ranges<T: ToOffset>(
4528 &self,
4529 range: Range<T>,
4530 ) -> impl Iterator<Item = Range<usize>> + '_ {
4531 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4532 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4533 grammar
4534 .redactions_config
4535 .as_ref()
4536 .map(|config| &config.query)
4537 });
4538
4539 let configs = syntax_matches
4540 .grammars()
4541 .iter()
4542 .map(|grammar| grammar.redactions_config.as_ref())
4543 .collect::<Vec<_>>();
4544
4545 iter::from_fn(move || {
4546 let redacted_range = syntax_matches
4547 .peek()
4548 .and_then(|mat| {
4549 configs[mat.grammar_index].and_then(|config| {
4550 mat.captures
4551 .iter()
4552 .find(|capture| capture.index == config.redaction_capture_ix)
4553 })
4554 })
4555 .map(|mat| mat.node.byte_range());
4556 syntax_matches.advance();
4557 redacted_range
4558 })
4559 }
4560
4561 pub fn injections_intersecting_range<T: ToOffset>(
4562 &self,
4563 range: Range<T>,
4564 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4565 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4566
4567 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4568 grammar
4569 .injection_config
4570 .as_ref()
4571 .map(|config| &config.query)
4572 });
4573
4574 let configs = syntax_matches
4575 .grammars()
4576 .iter()
4577 .map(|grammar| grammar.injection_config.as_ref())
4578 .collect::<Vec<_>>();
4579
4580 iter::from_fn(move || {
4581 let ranges = syntax_matches.peek().and_then(|mat| {
4582 let config = &configs[mat.grammar_index]?;
4583 let content_capture_range = mat.captures.iter().find_map(|capture| {
4584 if capture.index == config.content_capture_ix {
4585 Some(capture.node.byte_range())
4586 } else {
4587 None
4588 }
4589 })?;
4590 let language = self.language_at(content_capture_range.start)?;
4591 Some((content_capture_range, language))
4592 });
4593 syntax_matches.advance();
4594 ranges
4595 })
4596 }
4597
4598 pub fn runnable_ranges(
4599 &self,
4600 offset_range: Range<usize>,
4601 ) -> impl Iterator<Item = RunnableRange> + '_ {
4602 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4603 grammar.runnable_config.as_ref().map(|config| &config.query)
4604 });
4605
4606 let test_configs = syntax_matches
4607 .grammars()
4608 .iter()
4609 .map(|grammar| grammar.runnable_config.as_ref())
4610 .collect::<Vec<_>>();
4611
4612 iter::from_fn(move || {
4613 loop {
4614 let mat = syntax_matches.peek()?;
4615
4616 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4617 let mut run_range = None;
4618 let full_range = mat.captures.iter().fold(
4619 Range {
4620 start: usize::MAX,
4621 end: 0,
4622 },
4623 |mut acc, next| {
4624 let byte_range = next.node.byte_range();
4625 if acc.start > byte_range.start {
4626 acc.start = byte_range.start;
4627 }
4628 if acc.end < byte_range.end {
4629 acc.end = byte_range.end;
4630 }
4631 acc
4632 },
4633 );
4634 if full_range.start > full_range.end {
4635 // We did not find a full spanning range of this match.
4636 return None;
4637 }
4638 let extra_captures: SmallVec<[_; 1]> =
4639 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4640 test_configs
4641 .extra_captures
4642 .get(capture.index as usize)
4643 .cloned()
4644 .and_then(|tag_name| match tag_name {
4645 RunnableCapture::Named(name) => {
4646 Some((capture.node.byte_range(), name))
4647 }
4648 RunnableCapture::Run => {
4649 let _ = run_range.insert(capture.node.byte_range());
4650 None
4651 }
4652 })
4653 }));
4654 let run_range = run_range?;
4655 let tags = test_configs
4656 .query
4657 .property_settings(mat.pattern_index)
4658 .iter()
4659 .filter_map(|property| {
4660 if *property.key == *"tag" {
4661 property
4662 .value
4663 .as_ref()
4664 .map(|value| RunnableTag(value.to_string().into()))
4665 } else {
4666 None
4667 }
4668 })
4669 .collect();
4670 let extra_captures = extra_captures
4671 .into_iter()
4672 .map(|(range, name)| {
4673 (
4674 name.to_string(),
4675 self.text_for_range(range).collect::<String>(),
4676 )
4677 })
4678 .collect();
4679 // All tags should have the same range.
4680 Some(RunnableRange {
4681 run_range,
4682 full_range,
4683 runnable: Runnable {
4684 tags,
4685 language: mat.language,
4686 buffer: self.remote_id(),
4687 },
4688 extra_captures,
4689 buffer_id: self.remote_id(),
4690 })
4691 });
4692
4693 syntax_matches.advance();
4694 if test_range.is_some() {
4695 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4696 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4697 return test_range;
4698 }
4699 }
4700 })
4701 }
4702
4703 /// Returns selections for remote peers intersecting the given range.
4704 #[allow(clippy::type_complexity)]
4705 pub fn selections_in_range(
4706 &self,
4707 range: Range<Anchor>,
4708 include_local: bool,
4709 ) -> impl Iterator<
4710 Item = (
4711 ReplicaId,
4712 bool,
4713 CursorShape,
4714 impl Iterator<Item = &Selection<Anchor>> + '_,
4715 ),
4716 > + '_ {
4717 self.remote_selections
4718 .iter()
4719 .filter(move |(replica_id, set)| {
4720 (include_local || **replica_id != self.text.replica_id())
4721 && !set.selections.is_empty()
4722 })
4723 .map(move |(replica_id, set)| {
4724 let start_ix = match set.selections.binary_search_by(|probe| {
4725 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4726 }) {
4727 Ok(ix) | Err(ix) => ix,
4728 };
4729 let end_ix = match set.selections.binary_search_by(|probe| {
4730 probe.start.cmp(&range.end, self).then(Ordering::Less)
4731 }) {
4732 Ok(ix) | Err(ix) => ix,
4733 };
4734
4735 (
4736 *replica_id,
4737 set.line_mode,
4738 set.cursor_shape,
4739 set.selections[start_ix..end_ix].iter(),
4740 )
4741 })
4742 }
4743
4744 /// Returns if the buffer contains any diagnostics.
4745 pub fn has_diagnostics(&self) -> bool {
4746 !self.diagnostics.is_empty()
4747 }
4748
4749 /// Returns all the diagnostics intersecting the given range.
4750 pub fn diagnostics_in_range<'a, T, O>(
4751 &'a self,
4752 search_range: Range<T>,
4753 reversed: bool,
4754 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4755 where
4756 T: 'a + Clone + ToOffset,
4757 O: 'a + FromAnchor,
4758 {
4759 let mut iterators: Vec<_> = self
4760 .diagnostics
4761 .iter()
4762 .map(|(_, collection)| {
4763 collection
4764 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4765 .peekable()
4766 })
4767 .collect();
4768
4769 std::iter::from_fn(move || {
4770 let (next_ix, _) = iterators
4771 .iter_mut()
4772 .enumerate()
4773 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4774 .min_by(|(_, a), (_, b)| {
4775 let cmp = a
4776 .range
4777 .start
4778 .cmp(&b.range.start, self)
4779 // when range is equal, sort by diagnostic severity
4780 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4781 // and stabilize order with group_id
4782 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4783 if reversed { cmp.reverse() } else { cmp }
4784 })?;
4785 iterators[next_ix]
4786 .next()
4787 .map(
4788 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4789 diagnostic,
4790 range: FromAnchor::from_anchor(&range.start, self)
4791 ..FromAnchor::from_anchor(&range.end, self),
4792 },
4793 )
4794 })
4795 }
4796
4797 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4798 /// should be used instead.
4799 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4800 &self.diagnostics
4801 }
4802
4803 /// Returns all the diagnostic groups associated with the given
4804 /// language server ID. If no language server ID is provided,
4805 /// all diagnostics groups are returned.
4806 pub fn diagnostic_groups(
4807 &self,
4808 language_server_id: Option<LanguageServerId>,
4809 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4810 let mut groups = Vec::new();
4811
4812 if let Some(language_server_id) = language_server_id {
4813 if let Ok(ix) = self
4814 .diagnostics
4815 .binary_search_by_key(&language_server_id, |e| e.0)
4816 {
4817 self.diagnostics[ix]
4818 .1
4819 .groups(language_server_id, &mut groups, self);
4820 }
4821 } else {
4822 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4823 diagnostics.groups(*language_server_id, &mut groups, self);
4824 }
4825 }
4826
4827 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4828 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4829 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4830 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4831 });
4832
4833 groups
4834 }
4835
4836 /// Returns an iterator over the diagnostics for the given group.
4837 pub fn diagnostic_group<O>(
4838 &self,
4839 group_id: usize,
4840 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4841 where
4842 O: FromAnchor + 'static,
4843 {
4844 self.diagnostics
4845 .iter()
4846 .flat_map(move |(_, set)| set.group(group_id, self))
4847 }
4848
4849 /// An integer version number that accounts for all updates besides
4850 /// the buffer's text itself (which is versioned via a version vector).
4851 pub fn non_text_state_update_count(&self) -> usize {
4852 self.non_text_state_update_count
4853 }
4854
4855 /// An integer version that changes when the buffer's syntax changes.
4856 pub fn syntax_update_count(&self) -> usize {
4857 self.syntax.update_count()
4858 }
4859
4860 /// Returns a snapshot of underlying file.
4861 pub fn file(&self) -> Option<&Arc<dyn File>> {
4862 self.file.as_ref()
4863 }
4864
4865 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4866 if let Some(file) = self.file() {
4867 if file.path().file_name().is_none() || include_root {
4868 Some(file.full_path(cx).to_string_lossy().into_owned())
4869 } else {
4870 Some(file.path().display(file.path_style(cx)).to_string())
4871 }
4872 } else {
4873 None
4874 }
4875 }
4876
4877 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4878 let query_str = query.fuzzy_contents;
4879 if query_str.is_some_and(|query| query.is_empty()) {
4880 return BTreeMap::default();
4881 }
4882
4883 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4884 language,
4885 override_id: None,
4886 }));
4887
4888 let mut query_ix = 0;
4889 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4890 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4891
4892 let mut words = BTreeMap::default();
4893 let mut current_word_start_ix = None;
4894 let mut chunk_ix = query.range.start;
4895 for chunk in self.chunks(query.range, false) {
4896 for (i, c) in chunk.text.char_indices() {
4897 let ix = chunk_ix + i;
4898 if classifier.is_word(c) {
4899 if current_word_start_ix.is_none() {
4900 current_word_start_ix = Some(ix);
4901 }
4902
4903 if let Some(query_chars) = &query_chars
4904 && query_ix < query_len
4905 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4906 {
4907 query_ix += 1;
4908 }
4909 continue;
4910 } else if let Some(word_start) = current_word_start_ix.take()
4911 && query_ix == query_len
4912 {
4913 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4914 let mut word_text = self.text_for_range(word_start..ix).peekable();
4915 let first_char = word_text
4916 .peek()
4917 .and_then(|first_chunk| first_chunk.chars().next());
4918 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4919 if !query.skip_digits
4920 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4921 {
4922 words.insert(word_text.collect(), word_range);
4923 }
4924 }
4925 query_ix = 0;
4926 }
4927 chunk_ix += chunk.text.len();
4928 }
4929
4930 words
4931 }
4932}
4933
4934pub struct WordsQuery<'a> {
4935 /// Only returns words with all chars from the fuzzy string in them.
4936 pub fuzzy_contents: Option<&'a str>,
4937 /// Skips words that start with a digit.
4938 pub skip_digits: bool,
4939 /// Buffer offset range, to look for words.
4940 pub range: Range<usize>,
4941}
4942
4943fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4944 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4945}
4946
4947fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4948 let mut result = IndentSize::spaces(0);
4949 for c in text {
4950 let kind = match c {
4951 ' ' => IndentKind::Space,
4952 '\t' => IndentKind::Tab,
4953 _ => break,
4954 };
4955 if result.len == 0 {
4956 result.kind = kind;
4957 }
4958 result.len += 1;
4959 }
4960 result
4961}
4962
4963impl Clone for BufferSnapshot {
4964 fn clone(&self) -> Self {
4965 Self {
4966 text: self.text.clone(),
4967 syntax: self.syntax.clone(),
4968 file: self.file.clone(),
4969 remote_selections: self.remote_selections.clone(),
4970 diagnostics: self.diagnostics.clone(),
4971 language: self.language.clone(),
4972 tree_sitter_data: self.tree_sitter_data.clone(),
4973 non_text_state_update_count: self.non_text_state_update_count,
4974 }
4975 }
4976}
4977
4978impl Deref for BufferSnapshot {
4979 type Target = text::BufferSnapshot;
4980
4981 fn deref(&self) -> &Self::Target {
4982 &self.text
4983 }
4984}
4985
4986unsafe impl Send for BufferChunks<'_> {}
4987
4988impl<'a> BufferChunks<'a> {
4989 pub(crate) fn new(
4990 text: &'a Rope,
4991 range: Range<usize>,
4992 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4993 diagnostics: bool,
4994 buffer_snapshot: Option<&'a BufferSnapshot>,
4995 ) -> Self {
4996 let mut highlights = None;
4997 if let Some((captures, highlight_maps)) = syntax {
4998 highlights = Some(BufferChunkHighlights {
4999 captures,
5000 next_capture: None,
5001 stack: Default::default(),
5002 highlight_maps,
5003 })
5004 }
5005
5006 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5007 let chunks = text.chunks_in_range(range.clone());
5008
5009 let mut this = BufferChunks {
5010 range,
5011 buffer_snapshot,
5012 chunks,
5013 diagnostic_endpoints,
5014 error_depth: 0,
5015 warning_depth: 0,
5016 information_depth: 0,
5017 hint_depth: 0,
5018 unnecessary_depth: 0,
5019 underline: true,
5020 highlights,
5021 };
5022 this.initialize_diagnostic_endpoints();
5023 this
5024 }
5025
5026 /// Seeks to the given byte offset in the buffer.
5027 pub fn seek(&mut self, range: Range<usize>) {
5028 let old_range = std::mem::replace(&mut self.range, range.clone());
5029 self.chunks.set_range(self.range.clone());
5030 if let Some(highlights) = self.highlights.as_mut() {
5031 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5032 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5033 highlights
5034 .stack
5035 .retain(|(end_offset, _)| *end_offset > range.start);
5036 if let Some(capture) = &highlights.next_capture
5037 && range.start >= capture.node.start_byte()
5038 {
5039 let next_capture_end = capture.node.end_byte();
5040 if range.start < next_capture_end {
5041 highlights.stack.push((
5042 next_capture_end,
5043 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5044 ));
5045 }
5046 highlights.next_capture.take();
5047 }
5048 } else if let Some(snapshot) = self.buffer_snapshot {
5049 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5050 *highlights = BufferChunkHighlights {
5051 captures,
5052 next_capture: None,
5053 stack: Default::default(),
5054 highlight_maps,
5055 };
5056 } else {
5057 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5058 // Seeking such BufferChunks is not supported.
5059 debug_assert!(
5060 false,
5061 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5062 );
5063 }
5064
5065 highlights.captures.set_byte_range(self.range.clone());
5066 self.initialize_diagnostic_endpoints();
5067 }
5068 }
5069
5070 fn initialize_diagnostic_endpoints(&mut self) {
5071 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5072 && let Some(buffer) = self.buffer_snapshot
5073 {
5074 let mut diagnostic_endpoints = Vec::new();
5075 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5076 diagnostic_endpoints.push(DiagnosticEndpoint {
5077 offset: entry.range.start,
5078 is_start: true,
5079 severity: entry.diagnostic.severity,
5080 is_unnecessary: entry.diagnostic.is_unnecessary,
5081 underline: entry.diagnostic.underline,
5082 });
5083 diagnostic_endpoints.push(DiagnosticEndpoint {
5084 offset: entry.range.end,
5085 is_start: false,
5086 severity: entry.diagnostic.severity,
5087 is_unnecessary: entry.diagnostic.is_unnecessary,
5088 underline: entry.diagnostic.underline,
5089 });
5090 }
5091 diagnostic_endpoints
5092 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5093 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5094 self.hint_depth = 0;
5095 self.error_depth = 0;
5096 self.warning_depth = 0;
5097 self.information_depth = 0;
5098 }
5099 }
5100
5101 /// The current byte offset in the buffer.
5102 pub fn offset(&self) -> usize {
5103 self.range.start
5104 }
5105
5106 pub fn range(&self) -> Range<usize> {
5107 self.range.clone()
5108 }
5109
5110 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5111 let depth = match endpoint.severity {
5112 DiagnosticSeverity::ERROR => &mut self.error_depth,
5113 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5114 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5115 DiagnosticSeverity::HINT => &mut self.hint_depth,
5116 _ => return,
5117 };
5118 if endpoint.is_start {
5119 *depth += 1;
5120 } else {
5121 *depth -= 1;
5122 }
5123
5124 if endpoint.is_unnecessary {
5125 if endpoint.is_start {
5126 self.unnecessary_depth += 1;
5127 } else {
5128 self.unnecessary_depth -= 1;
5129 }
5130 }
5131 }
5132
5133 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5134 if self.error_depth > 0 {
5135 Some(DiagnosticSeverity::ERROR)
5136 } else if self.warning_depth > 0 {
5137 Some(DiagnosticSeverity::WARNING)
5138 } else if self.information_depth > 0 {
5139 Some(DiagnosticSeverity::INFORMATION)
5140 } else if self.hint_depth > 0 {
5141 Some(DiagnosticSeverity::HINT)
5142 } else {
5143 None
5144 }
5145 }
5146
5147 fn current_code_is_unnecessary(&self) -> bool {
5148 self.unnecessary_depth > 0
5149 }
5150}
5151
5152impl<'a> Iterator for BufferChunks<'a> {
5153 type Item = Chunk<'a>;
5154
5155 fn next(&mut self) -> Option<Self::Item> {
5156 let mut next_capture_start = usize::MAX;
5157 let mut next_diagnostic_endpoint = usize::MAX;
5158
5159 if let Some(highlights) = self.highlights.as_mut() {
5160 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5161 if *parent_capture_end <= self.range.start {
5162 highlights.stack.pop();
5163 } else {
5164 break;
5165 }
5166 }
5167
5168 if highlights.next_capture.is_none() {
5169 highlights.next_capture = highlights.captures.next();
5170 }
5171
5172 while let Some(capture) = highlights.next_capture.as_ref() {
5173 if self.range.start < capture.node.start_byte() {
5174 next_capture_start = capture.node.start_byte();
5175 break;
5176 } else {
5177 let highlight_id =
5178 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5179 highlights
5180 .stack
5181 .push((capture.node.end_byte(), highlight_id));
5182 highlights.next_capture = highlights.captures.next();
5183 }
5184 }
5185 }
5186
5187 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5188 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5189 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5190 if endpoint.offset <= self.range.start {
5191 self.update_diagnostic_depths(endpoint);
5192 diagnostic_endpoints.next();
5193 self.underline = endpoint.underline;
5194 } else {
5195 next_diagnostic_endpoint = endpoint.offset;
5196 break;
5197 }
5198 }
5199 }
5200 self.diagnostic_endpoints = diagnostic_endpoints;
5201
5202 if let Some(ChunkBitmaps {
5203 text: chunk,
5204 chars: chars_map,
5205 tabs,
5206 }) = self.chunks.peek_with_bitmaps()
5207 {
5208 let chunk_start = self.range.start;
5209 let mut chunk_end = (self.chunks.offset() + chunk.len())
5210 .min(next_capture_start)
5211 .min(next_diagnostic_endpoint);
5212 let mut highlight_id = None;
5213 if let Some(highlights) = self.highlights.as_ref()
5214 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5215 {
5216 chunk_end = chunk_end.min(*parent_capture_end);
5217 highlight_id = Some(*parent_highlight_id);
5218 }
5219 let bit_start = chunk_start - self.chunks.offset();
5220 let bit_end = chunk_end - self.chunks.offset();
5221
5222 let slice = &chunk[bit_start..bit_end];
5223
5224 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5225 let tabs = (tabs >> bit_start) & mask;
5226 let chars = (chars_map >> bit_start) & mask;
5227
5228 self.range.start = chunk_end;
5229 if self.range.start == self.chunks.offset() + chunk.len() {
5230 self.chunks.next().unwrap();
5231 }
5232
5233 Some(Chunk {
5234 text: slice,
5235 syntax_highlight_id: highlight_id,
5236 underline: self.underline,
5237 diagnostic_severity: self.current_diagnostic_severity(),
5238 is_unnecessary: self.current_code_is_unnecessary(),
5239 tabs,
5240 chars,
5241 ..Chunk::default()
5242 })
5243 } else {
5244 None
5245 }
5246 }
5247}
5248
5249impl operation_queue::Operation for Operation {
5250 fn lamport_timestamp(&self) -> clock::Lamport {
5251 match self {
5252 Operation::Buffer(_) => {
5253 unreachable!("buffer operations should never be deferred at this layer")
5254 }
5255 Operation::UpdateDiagnostics {
5256 lamport_timestamp, ..
5257 }
5258 | Operation::UpdateSelections {
5259 lamport_timestamp, ..
5260 }
5261 | Operation::UpdateCompletionTriggers {
5262 lamport_timestamp, ..
5263 }
5264 | Operation::UpdateLineEnding {
5265 lamport_timestamp, ..
5266 } => *lamport_timestamp,
5267 }
5268 }
5269}
5270
5271impl Default for Diagnostic {
5272 fn default() -> Self {
5273 Self {
5274 source: Default::default(),
5275 source_kind: DiagnosticSourceKind::Other,
5276 code: None,
5277 code_description: None,
5278 severity: DiagnosticSeverity::ERROR,
5279 message: Default::default(),
5280 markdown: None,
5281 group_id: 0,
5282 is_primary: false,
5283 is_disk_based: false,
5284 is_unnecessary: false,
5285 underline: true,
5286 data: None,
5287 }
5288 }
5289}
5290
5291impl IndentSize {
5292 /// Returns an [`IndentSize`] representing the given spaces.
5293 pub fn spaces(len: u32) -> Self {
5294 Self {
5295 len,
5296 kind: IndentKind::Space,
5297 }
5298 }
5299
5300 /// Returns an [`IndentSize`] representing a tab.
5301 pub fn tab() -> Self {
5302 Self {
5303 len: 1,
5304 kind: IndentKind::Tab,
5305 }
5306 }
5307
5308 /// An iterator over the characters represented by this [`IndentSize`].
5309 pub fn chars(&self) -> impl Iterator<Item = char> {
5310 iter::repeat(self.char()).take(self.len as usize)
5311 }
5312
5313 /// The character representation of this [`IndentSize`].
5314 pub fn char(&self) -> char {
5315 match self.kind {
5316 IndentKind::Space => ' ',
5317 IndentKind::Tab => '\t',
5318 }
5319 }
5320
5321 /// Consumes the current [`IndentSize`] and returns a new one that has
5322 /// been shrunk or enlarged by the given size along the given direction.
5323 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5324 match direction {
5325 Ordering::Less => {
5326 if self.kind == size.kind && self.len >= size.len {
5327 self.len -= size.len;
5328 }
5329 }
5330 Ordering::Equal => {}
5331 Ordering::Greater => {
5332 if self.len == 0 {
5333 self = size;
5334 } else if self.kind == size.kind {
5335 self.len += size.len;
5336 }
5337 }
5338 }
5339 self
5340 }
5341
5342 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5343 match self.kind {
5344 IndentKind::Space => self.len as usize,
5345 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5346 }
5347 }
5348}
5349
5350#[cfg(any(test, feature = "test-support"))]
5351pub struct TestFile {
5352 pub path: Arc<RelPath>,
5353 pub root_name: String,
5354 pub local_root: Option<PathBuf>,
5355}
5356
5357#[cfg(any(test, feature = "test-support"))]
5358impl File for TestFile {
5359 fn path(&self) -> &Arc<RelPath> {
5360 &self.path
5361 }
5362
5363 fn full_path(&self, _: &gpui::App) -> PathBuf {
5364 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5365 }
5366
5367 fn as_local(&self) -> Option<&dyn LocalFile> {
5368 if self.local_root.is_some() {
5369 Some(self)
5370 } else {
5371 None
5372 }
5373 }
5374
5375 fn disk_state(&self) -> DiskState {
5376 unimplemented!()
5377 }
5378
5379 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5380 self.path().file_name().unwrap_or(self.root_name.as_ref())
5381 }
5382
5383 fn worktree_id(&self, _: &App) -> WorktreeId {
5384 WorktreeId::from_usize(0)
5385 }
5386
5387 fn to_proto(&self, _: &App) -> rpc::proto::File {
5388 unimplemented!()
5389 }
5390
5391 fn is_private(&self) -> bool {
5392 false
5393 }
5394
5395 fn path_style(&self, _cx: &App) -> PathStyle {
5396 PathStyle::local()
5397 }
5398}
5399
5400#[cfg(any(test, feature = "test-support"))]
5401impl LocalFile for TestFile {
5402 fn abs_path(&self, _cx: &App) -> PathBuf {
5403 PathBuf::from(self.local_root.as_ref().unwrap())
5404 .join(&self.root_name)
5405 .join(self.path.as_std_path())
5406 }
5407
5408 fn load(&self, _cx: &App) -> Task<Result<String>> {
5409 unimplemented!()
5410 }
5411
5412 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5413 unimplemented!()
5414 }
5415}
5416
5417pub(crate) fn contiguous_ranges(
5418 values: impl Iterator<Item = u32>,
5419 max_len: usize,
5420) -> impl Iterator<Item = Range<u32>> {
5421 let mut values = values;
5422 let mut current_range: Option<Range<u32>> = None;
5423 std::iter::from_fn(move || {
5424 loop {
5425 if let Some(value) = values.next() {
5426 if let Some(range) = &mut current_range
5427 && value == range.end
5428 && range.len() < max_len
5429 {
5430 range.end += 1;
5431 continue;
5432 }
5433
5434 let prev_range = current_range.clone();
5435 current_range = Some(value..(value + 1));
5436 if prev_range.is_some() {
5437 return prev_range;
5438 }
5439 } else {
5440 return current_range.take();
5441 }
5442 }
5443 })
5444}
5445
5446#[derive(Default, Debug)]
5447pub struct CharClassifier {
5448 scope: Option<LanguageScope>,
5449 scope_context: Option<CharScopeContext>,
5450 ignore_punctuation: bool,
5451}
5452
5453impl CharClassifier {
5454 pub fn new(scope: Option<LanguageScope>) -> Self {
5455 Self {
5456 scope,
5457 scope_context: None,
5458 ignore_punctuation: false,
5459 }
5460 }
5461
5462 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5463 Self {
5464 scope_context,
5465 ..self
5466 }
5467 }
5468
5469 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5470 Self {
5471 ignore_punctuation,
5472 ..self
5473 }
5474 }
5475
5476 pub fn is_whitespace(&self, c: char) -> bool {
5477 self.kind(c) == CharKind::Whitespace
5478 }
5479
5480 pub fn is_word(&self, c: char) -> bool {
5481 self.kind(c) == CharKind::Word
5482 }
5483
5484 pub fn is_punctuation(&self, c: char) -> bool {
5485 self.kind(c) == CharKind::Punctuation
5486 }
5487
5488 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5489 if c.is_alphanumeric() || c == '_' {
5490 return CharKind::Word;
5491 }
5492
5493 if let Some(scope) = &self.scope {
5494 let characters = match self.scope_context {
5495 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5496 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5497 None => scope.word_characters(),
5498 };
5499 if let Some(characters) = characters
5500 && characters.contains(&c)
5501 {
5502 return CharKind::Word;
5503 }
5504 }
5505
5506 if c.is_whitespace() {
5507 return CharKind::Whitespace;
5508 }
5509
5510 if ignore_punctuation {
5511 CharKind::Word
5512 } else {
5513 CharKind::Punctuation
5514 }
5515 }
5516
5517 pub fn kind(&self, c: char) -> CharKind {
5518 self.kind_with(c, self.ignore_punctuation)
5519 }
5520}
5521
5522/// Find all of the ranges of whitespace that occur at the ends of lines
5523/// in the given rope.
5524///
5525/// This could also be done with a regex search, but this implementation
5526/// avoids copying text.
5527pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5528 let mut ranges = Vec::new();
5529
5530 let mut offset = 0;
5531 let mut prev_chunk_trailing_whitespace_range = 0..0;
5532 for chunk in rope.chunks() {
5533 let mut prev_line_trailing_whitespace_range = 0..0;
5534 for (i, line) in chunk.split('\n').enumerate() {
5535 let line_end_offset = offset + line.len();
5536 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5537 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5538
5539 if i == 0 && trimmed_line_len == 0 {
5540 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5541 }
5542 if !prev_line_trailing_whitespace_range.is_empty() {
5543 ranges.push(prev_line_trailing_whitespace_range);
5544 }
5545
5546 offset = line_end_offset + 1;
5547 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5548 }
5549
5550 offset -= 1;
5551 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5552 }
5553
5554 if !prev_chunk_trailing_whitespace_range.is_empty() {
5555 ranges.push(prev_chunk_trailing_whitespace_range);
5556 }
5557
5558 ranges
5559}