1use crate::{
2 DebugVariableCapture, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
63 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
64 Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76#[derive(Debug)]
77pub struct DebugVariableRanges {
78 pub buffer_id: BufferId,
79 pub range: Range<usize>,
80}
81
82/// A label for the background task spawned by the buffer to compute
83/// a diff against the contents of its file.
84pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
85
86/// Indicate whether a [`Buffer`] has permissions to edit.
87#[derive(PartialEq, Clone, Copy, Debug)]
88pub enum Capability {
89 /// The buffer is a mutable replica.
90 ReadWrite,
91 /// The buffer is a read-only replica.
92 ReadOnly,
93}
94
95pub type BufferRow = u32;
96
97/// An in-memory representation of a source code file, including its text,
98/// syntax trees, git status, and diagnostics.
99pub struct Buffer {
100 text: TextBuffer,
101 branch_state: Option<BufferBranchState>,
102 /// Filesystem state, `None` when there is no path.
103 file: Option<Arc<dyn File>>,
104 /// The mtime of the file when this buffer was last loaded from
105 /// or saved to disk.
106 saved_mtime: Option<MTime>,
107 /// The version vector when this buffer was last loaded from
108 /// or saved to disk.
109 saved_version: clock::Global,
110 preview_version: clock::Global,
111 transaction_depth: usize,
112 was_dirty_before_starting_transaction: Option<bool>,
113 reload_task: Option<Task<Result<()>>>,
114 language: Option<Arc<Language>>,
115 autoindent_requests: Vec<Arc<AutoindentRequest>>,
116 pending_autoindent: Option<Task<()>>,
117 sync_parse_timeout: Duration,
118 syntax_map: Mutex<SyntaxMap>,
119 reparse: Option<Task<()>>,
120 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
121 non_text_state_update_count: usize,
122 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
123 remote_selections: TreeMap<ReplicaId, SelectionSet>,
124 diagnostics_timestamp: clock::Lamport,
125 completion_triggers: BTreeSet<String>,
126 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
127 completion_triggers_timestamp: clock::Lamport,
128 deferred_ops: OperationQueue<Operation>,
129 capability: Capability,
130 has_conflict: bool,
131 /// Memoize calls to has_changes_since(saved_version).
132 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
133 has_unsaved_edits: Cell<(clock::Global, bool)>,
134 change_bits: Vec<rc::Weak<Cell<bool>>>,
135 _subscriptions: Vec<gpui::Subscription>,
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq)]
139pub enum ParseStatus {
140 Idle,
141 Parsing,
142}
143
144struct BufferBranchState {
145 base_buffer: Entity<Buffer>,
146 merged_operations: Vec<Lamport>,
147}
148
149/// An immutable, cheaply cloneable representation of a fixed
150/// state of a buffer.
151pub struct BufferSnapshot {
152 pub text: text::BufferSnapshot,
153 pub(crate) syntax: SyntaxSnapshot,
154 file: Option<Arc<dyn File>>,
155 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
156 remote_selections: TreeMap<ReplicaId, SelectionSet>,
157 language: Option<Arc<Language>>,
158 non_text_state_update_count: usize,
159}
160
161/// The kind and amount of indentation in a particular line. For now,
162/// assumes that indentation is all the same character.
163#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
164pub struct IndentSize {
165 /// The number of bytes that comprise the indentation.
166 pub len: u32,
167 /// The kind of whitespace used for indentation.
168 pub kind: IndentKind,
169}
170
171/// A whitespace character that's used for indentation.
172#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
173pub enum IndentKind {
174 /// An ASCII space character.
175 #[default]
176 Space,
177 /// An ASCII tab character.
178 Tab,
179}
180
181/// The shape of a selection cursor.
182#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
183#[serde(rename_all = "snake_case")]
184pub enum CursorShape {
185 /// A vertical bar
186 #[default]
187 Bar,
188 /// A block that surrounds the following character
189 Block,
190 /// An underline that runs along the following character
191 Underline,
192 /// A box drawn around the following character
193 Hollow,
194}
195
196#[derive(Clone, Debug)]
197struct SelectionSet {
198 line_mode: bool,
199 cursor_shape: CursorShape,
200 selections: Arc<[Selection<Anchor>]>,
201 lamport_timestamp: clock::Lamport,
202}
203
204/// A diagnostic associated with a certain range of a buffer.
205#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
206pub struct Diagnostic {
207 /// The name of the service that produced this diagnostic.
208 pub source: Option<String>,
209 /// A machine-readable code that identifies this diagnostic.
210 pub code: Option<NumberOrString>,
211 /// Whether this diagnostic is a hint, warning, or error.
212 pub severity: DiagnosticSeverity,
213 /// The human-readable message associated with this diagnostic.
214 pub message: String,
215 /// An id that identifies the group to which this diagnostic belongs.
216 ///
217 /// When a language server produces a diagnostic with
218 /// one or more associated diagnostics, those diagnostics are all
219 /// assigned a single group ID.
220 pub group_id: usize,
221 /// Whether this diagnostic is the primary diagnostic for its group.
222 ///
223 /// In a given group, the primary diagnostic is the top-level diagnostic
224 /// returned by the language server. The non-primary diagnostics are the
225 /// associated diagnostics.
226 pub is_primary: bool,
227 /// Whether this diagnostic is considered to originate from an analysis of
228 /// files on disk, as opposed to any unsaved buffer contents. This is a
229 /// property of a given diagnostic source, and is configured for a given
230 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
231 /// for the language server.
232 pub is_disk_based: bool,
233 /// Whether this diagnostic marks unnecessary code.
234 pub is_unnecessary: bool,
235 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
236 pub data: Option<Value>,
237}
238
239/// An operation used to synchronize this buffer with its other replicas.
240#[derive(Clone, Debug, PartialEq)]
241pub enum Operation {
242 /// A text operation.
243 Buffer(text::Operation),
244
245 /// An update to the buffer's diagnostics.
246 UpdateDiagnostics {
247 /// The id of the language server that produced the new diagnostics.
248 server_id: LanguageServerId,
249 /// The diagnostics.
250 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
251 /// The buffer's lamport timestamp.
252 lamport_timestamp: clock::Lamport,
253 },
254
255 /// An update to the most recent selections in this buffer.
256 UpdateSelections {
257 /// The selections.
258 selections: Arc<[Selection<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 /// Whether the selections are in 'line mode'.
262 line_mode: bool,
263 /// The [`CursorShape`] associated with these selections.
264 cursor_shape: CursorShape,
265 },
266
267 /// An update to the characters that should trigger autocompletion
268 /// for this buffer.
269 UpdateCompletionTriggers {
270 /// The characters that trigger autocompletion.
271 triggers: Vec<String>,
272 /// The buffer's lamport timestamp.
273 lamport_timestamp: clock::Lamport,
274 /// The language server ID.
275 server_id: LanguageServerId,
276 },
277}
278
279/// An event that occurs in a buffer.
280#[derive(Clone, Debug, PartialEq)]
281pub enum BufferEvent {
282 /// The buffer was changed in a way that must be
283 /// propagated to its other replicas.
284 Operation {
285 operation: Operation,
286 is_local: bool,
287 },
288 /// The buffer was edited.
289 Edited,
290 /// The buffer's `dirty` bit changed.
291 DirtyChanged,
292 /// The buffer was saved.
293 Saved,
294 /// The buffer's file was changed on disk.
295 FileHandleChanged,
296 /// The buffer was reloaded.
297 Reloaded,
298 /// The buffer is in need of a reload
299 ReloadNeeded,
300 /// The buffer's language was changed.
301 LanguageChanged,
302 /// The buffer's syntax trees were updated.
303 Reparsed,
304 /// The buffer's diagnostics were updated.
305 DiagnosticsUpdated,
306 /// The buffer gained or lost editing capabilities.
307 CapabilityChanged,
308 /// The buffer was explicitly requested to close.
309 Closed,
310 /// The buffer was discarded when closing.
311 Discarded,
312}
313
314/// The file associated with a buffer.
315pub trait File: Send + Sync + Any {
316 /// Returns the [`LocalFile`] associated with this file, if the
317 /// file is local.
318 fn as_local(&self) -> Option<&dyn LocalFile>;
319
320 /// Returns whether this file is local.
321 fn is_local(&self) -> bool {
322 self.as_local().is_some()
323 }
324
325 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
326 /// only available in some states, such as modification time.
327 fn disk_state(&self) -> DiskState;
328
329 /// Returns the path of this file relative to the worktree's root directory.
330 fn path(&self) -> &Arc<Path>;
331
332 /// Returns the path of this file relative to the worktree's parent directory (this means it
333 /// includes the name of the worktree's root folder).
334 fn full_path(&self, cx: &App) -> PathBuf;
335
336 /// Returns the last component of this handle's absolute path. If this handle refers to the root
337 /// of its worktree, then this method will return the name of the worktree itself.
338 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
339
340 /// Returns the id of the worktree to which this file belongs.
341 ///
342 /// This is needed for looking up project-specific settings.
343 fn worktree_id(&self, cx: &App) -> WorktreeId;
344
345 /// Converts this file into a protobuf message.
346 fn to_proto(&self, cx: &App) -> rpc::proto::File;
347
348 /// Return whether Zed considers this to be a private file.
349 fn is_private(&self) -> bool;
350}
351
352/// The file's storage status - whether it's stored (`Present`), and if so when it was last
353/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
354/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
355/// indicator for new files.
356#[derive(Copy, Clone, Debug, PartialEq)]
357pub enum DiskState {
358 /// File created in Zed that has not been saved.
359 New,
360 /// File present on the filesystem.
361 Present { mtime: MTime },
362 /// Deleted file that was previously present.
363 Deleted,
364}
365
366impl DiskState {
367 /// Returns the file's last known modification time on disk.
368 pub fn mtime(self) -> Option<MTime> {
369 match self {
370 DiskState::New => None,
371 DiskState::Present { mtime } => Some(mtime),
372 DiskState::Deleted => None,
373 }
374 }
375
376 pub fn exists(&self) -> bool {
377 match self {
378 DiskState::New => false,
379 DiskState::Present { .. } => true,
380 DiskState::Deleted => false,
381 }
382 }
383}
384
385/// The file associated with a buffer, in the case where the file is on the local disk.
386pub trait LocalFile: File {
387 /// Returns the absolute path of this file
388 fn abs_path(&self, cx: &App) -> PathBuf;
389
390 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
391 fn load(&self, cx: &App) -> Task<Result<String>>;
392
393 /// Loads the file's contents from disk.
394 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
395}
396
397/// The auto-indent behavior associated with an editing operation.
398/// For some editing operations, each affected line of text has its
399/// indentation recomputed. For other operations, the entire block
400/// of edited text is adjusted uniformly.
401#[derive(Clone, Debug)]
402pub enum AutoindentMode {
403 /// Indent each line of inserted text.
404 EachLine,
405 /// Apply the same indentation adjustment to all of the lines
406 /// in a given insertion.
407 Block {
408 /// The original indentation column of the first line of each
409 /// insertion, if it has been copied.
410 ///
411 /// Knowing this makes it possible to preserve the relative indentation
412 /// of every line in the insertion from when it was copied.
413 ///
414 /// If the original indent column is `a`, and the first line of insertion
415 /// is then auto-indented to column `b`, then every other line of
416 /// the insertion will be auto-indented to column `b - a`
417 original_indent_columns: Vec<Option<u32>>,
418 },
419}
420
421#[derive(Clone)]
422struct AutoindentRequest {
423 before_edit: BufferSnapshot,
424 entries: Vec<AutoindentRequestEntry>,
425 is_block_mode: bool,
426 ignore_empty_lines: bool,
427}
428
429#[derive(Debug, Clone)]
430struct AutoindentRequestEntry {
431 /// A range of the buffer whose indentation should be adjusted.
432 range: Range<Anchor>,
433 /// Whether or not these lines should be considered brand new, for the
434 /// purpose of auto-indent. When text is not new, its indentation will
435 /// only be adjusted if the suggested indentation level has *changed*
436 /// since the edit was made.
437 first_line_is_new: bool,
438 indent_size: IndentSize,
439 original_indent_column: Option<u32>,
440}
441
442#[derive(Debug)]
443struct IndentSuggestion {
444 basis_row: u32,
445 delta: Ordering,
446 within_error: bool,
447}
448
449struct BufferChunkHighlights<'a> {
450 captures: SyntaxMapCaptures<'a>,
451 next_capture: Option<SyntaxMapCapture<'a>>,
452 stack: Vec<(usize, HighlightId)>,
453 highlight_maps: Vec<HighlightMap>,
454}
455
456/// An iterator that yields chunks of a buffer's text, along with their
457/// syntax highlights and diagnostic status.
458pub struct BufferChunks<'a> {
459 buffer_snapshot: Option<&'a BufferSnapshot>,
460 range: Range<usize>,
461 chunks: text::Chunks<'a>,
462 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
463 error_depth: usize,
464 warning_depth: usize,
465 information_depth: usize,
466 hint_depth: usize,
467 unnecessary_depth: usize,
468 highlights: Option<BufferChunkHighlights<'a>>,
469}
470
471/// A chunk of a buffer's text, along with its syntax highlight and
472/// diagnostic status.
473#[derive(Clone, Debug, Default)]
474pub struct Chunk<'a> {
475 /// The text of the chunk.
476 pub text: &'a str,
477 /// The syntax highlighting style of the chunk.
478 pub syntax_highlight_id: Option<HighlightId>,
479 /// The highlight style that has been applied to this chunk in
480 /// the editor.
481 pub highlight_style: Option<HighlightStyle>,
482 /// The severity of diagnostic associated with this chunk, if any.
483 pub diagnostic_severity: Option<DiagnosticSeverity>,
484 /// Whether this chunk of text is marked as unnecessary.
485 pub is_unnecessary: bool,
486 /// Whether this chunk of text was originally a tab character.
487 pub is_tab: bool,
488}
489
490/// A set of edits to a given version of a buffer, computed asynchronously.
491#[derive(Debug)]
492pub struct Diff {
493 pub base_version: clock::Global,
494 pub line_ending: LineEnding,
495 pub edits: Vec<(Range<usize>, Arc<str>)>,
496}
497
498#[derive(Clone, Copy)]
499pub(crate) struct DiagnosticEndpoint {
500 offset: usize,
501 is_start: bool,
502 severity: DiagnosticSeverity,
503 is_unnecessary: bool,
504}
505
506/// A class of characters, used for characterizing a run of text.
507#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
508pub enum CharKind {
509 /// Whitespace.
510 Whitespace,
511 /// Punctuation.
512 Punctuation,
513 /// Word.
514 Word,
515}
516
517/// A runnable is a set of data about a region that could be resolved into a task
518pub struct Runnable {
519 pub tags: SmallVec<[RunnableTag; 1]>,
520 pub language: Arc<Language>,
521 pub buffer: BufferId,
522}
523
524#[derive(Default, Clone, Debug)]
525pub struct HighlightedText {
526 pub text: SharedString,
527 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
528}
529
530#[derive(Default, Debug)]
531struct HighlightedTextBuilder {
532 pub text: String,
533 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
534}
535
536impl HighlightedText {
537 pub fn from_buffer_range<T: ToOffset>(
538 range: Range<T>,
539 snapshot: &text::BufferSnapshot,
540 syntax_snapshot: &SyntaxSnapshot,
541 override_style: Option<HighlightStyle>,
542 syntax_theme: &SyntaxTheme,
543 ) -> Self {
544 let mut highlighted_text = HighlightedTextBuilder::default();
545 highlighted_text.add_text_from_buffer_range(
546 range,
547 snapshot,
548 syntax_snapshot,
549 override_style,
550 syntax_theme,
551 );
552 highlighted_text.build()
553 }
554
555 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
556 gpui::StyledText::new(self.text.clone())
557 .with_default_highlights(default_style, self.highlights.iter().cloned())
558 }
559
560 /// Returns the first line without leading whitespace unless highlighted
561 /// and a boolean indicating if there are more lines after
562 pub fn first_line_preview(self) -> (Self, bool) {
563 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
564 let first_line = &self.text[..newline_ix];
565
566 // Trim leading whitespace, unless an edit starts prior to it.
567 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
568 if let Some((first_highlight_range, _)) = self.highlights.first() {
569 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
570 }
571
572 let preview_text = &first_line[preview_start_ix..];
573 let preview_highlights = self
574 .highlights
575 .into_iter()
576 .take_while(|(range, _)| range.start < newline_ix)
577 .filter_map(|(mut range, highlight)| {
578 range.start = range.start.saturating_sub(preview_start_ix);
579 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
580 if range.is_empty() {
581 None
582 } else {
583 Some((range, highlight))
584 }
585 });
586
587 let preview = Self {
588 text: SharedString::new(preview_text),
589 highlights: preview_highlights.collect(),
590 };
591
592 (preview, self.text.len() > newline_ix)
593 }
594}
595
596impl HighlightedTextBuilder {
597 pub fn build(self) -> HighlightedText {
598 HighlightedText {
599 text: self.text.into(),
600 highlights: self.highlights,
601 }
602 }
603
604 pub fn add_text_from_buffer_range<T: ToOffset>(
605 &mut self,
606 range: Range<T>,
607 snapshot: &text::BufferSnapshot,
608 syntax_snapshot: &SyntaxSnapshot,
609 override_style: Option<HighlightStyle>,
610 syntax_theme: &SyntaxTheme,
611 ) {
612 let range = range.to_offset(snapshot);
613 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
614 let start = self.text.len();
615 self.text.push_str(chunk.text);
616 let end = self.text.len();
617
618 if let Some(mut highlight_style) = chunk
619 .syntax_highlight_id
620 .and_then(|id| id.style(syntax_theme))
621 {
622 if let Some(override_style) = override_style {
623 highlight_style.highlight(override_style);
624 }
625 self.highlights.push((start..end, highlight_style));
626 } else if let Some(override_style) = override_style {
627 self.highlights.push((start..end, override_style));
628 }
629 }
630 }
631
632 fn highlighted_chunks<'a>(
633 range: Range<usize>,
634 snapshot: &'a text::BufferSnapshot,
635 syntax_snapshot: &'a SyntaxSnapshot,
636 ) -> BufferChunks<'a> {
637 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
638 grammar.highlights_query.as_ref()
639 });
640
641 let highlight_maps = captures
642 .grammars()
643 .iter()
644 .map(|grammar| grammar.highlight_map())
645 .collect();
646
647 BufferChunks::new(
648 snapshot.as_rope(),
649 range,
650 Some((captures, highlight_maps)),
651 false,
652 None,
653 )
654 }
655}
656
657#[derive(Clone)]
658pub struct EditPreview {
659 old_snapshot: text::BufferSnapshot,
660 applied_edits_snapshot: text::BufferSnapshot,
661 syntax_snapshot: SyntaxSnapshot,
662}
663
664impl EditPreview {
665 pub fn highlight_edits(
666 &self,
667 current_snapshot: &BufferSnapshot,
668 edits: &[(Range<Anchor>, String)],
669 include_deletions: bool,
670 cx: &App,
671 ) -> HighlightedText {
672 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
673 return HighlightedText::default();
674 };
675
676 let mut highlighted_text = HighlightedTextBuilder::default();
677
678 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
679
680 let insertion_highlight_style = HighlightStyle {
681 background_color: Some(cx.theme().status().created_background),
682 ..Default::default()
683 };
684 let deletion_highlight_style = HighlightStyle {
685 background_color: Some(cx.theme().status().deleted_background),
686 ..Default::default()
687 };
688 let syntax_theme = cx.theme().syntax();
689
690 for (range, edit_text) in edits {
691 let edit_new_end_in_preview_snapshot = range
692 .end
693 .bias_right(&self.old_snapshot)
694 .to_offset(&self.applied_edits_snapshot);
695 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
696
697 let unchanged_range_in_preview_snapshot =
698 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
699 if !unchanged_range_in_preview_snapshot.is_empty() {
700 highlighted_text.add_text_from_buffer_range(
701 unchanged_range_in_preview_snapshot,
702 &self.applied_edits_snapshot,
703 &self.syntax_snapshot,
704 None,
705 &syntax_theme,
706 );
707 }
708
709 let range_in_current_snapshot = range.to_offset(current_snapshot);
710 if include_deletions && !range_in_current_snapshot.is_empty() {
711 highlighted_text.add_text_from_buffer_range(
712 range_in_current_snapshot,
713 ¤t_snapshot.text,
714 ¤t_snapshot.syntax,
715 Some(deletion_highlight_style),
716 &syntax_theme,
717 );
718 }
719
720 if !edit_text.is_empty() {
721 highlighted_text.add_text_from_buffer_range(
722 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
723 &self.applied_edits_snapshot,
724 &self.syntax_snapshot,
725 Some(insertion_highlight_style),
726 &syntax_theme,
727 );
728 }
729
730 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
731 }
732
733 highlighted_text.add_text_from_buffer_range(
734 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
735 &self.applied_edits_snapshot,
736 &self.syntax_snapshot,
737 None,
738 &syntax_theme,
739 );
740
741 highlighted_text.build()
742 }
743
744 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
745 let (first, _) = edits.first()?;
746 let (last, _) = edits.last()?;
747
748 let start = first
749 .start
750 .bias_left(&self.old_snapshot)
751 .to_point(&self.applied_edits_snapshot);
752 let end = last
753 .end
754 .bias_right(&self.old_snapshot)
755 .to_point(&self.applied_edits_snapshot);
756
757 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
758 let range = Point::new(start.row, 0)
759 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
760
761 Some(range.to_offset(&self.applied_edits_snapshot))
762 }
763}
764
765#[derive(Clone, Debug, PartialEq, Eq)]
766pub struct BracketMatch {
767 pub open_range: Range<usize>,
768 pub close_range: Range<usize>,
769 pub newline_only: bool,
770}
771
772impl Buffer {
773 /// Create a new buffer with the given base text.
774 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
775 Self::build(
776 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
777 None,
778 Capability::ReadWrite,
779 )
780 }
781
782 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
783 pub fn local_normalized(
784 base_text_normalized: Rope,
785 line_ending: LineEnding,
786 cx: &Context<Self>,
787 ) -> Self {
788 Self::build(
789 TextBuffer::new_normalized(
790 0,
791 cx.entity_id().as_non_zero_u64().into(),
792 line_ending,
793 base_text_normalized,
794 ),
795 None,
796 Capability::ReadWrite,
797 )
798 }
799
800 /// Create a new buffer that is a replica of a remote buffer.
801 pub fn remote(
802 remote_id: BufferId,
803 replica_id: ReplicaId,
804 capability: Capability,
805 base_text: impl Into<String>,
806 ) -> Self {
807 Self::build(
808 TextBuffer::new(replica_id, remote_id, base_text.into()),
809 None,
810 capability,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer, populating its
815 /// state from the given protobuf message.
816 pub fn from_proto(
817 replica_id: ReplicaId,
818 capability: Capability,
819 message: proto::BufferState,
820 file: Option<Arc<dyn File>>,
821 ) -> Result<Self> {
822 let buffer_id = BufferId::new(message.id)
823 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
824 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
825 let mut this = Self::build(buffer, file, capability);
826 this.text.set_line_ending(proto::deserialize_line_ending(
827 rpc::proto::LineEnding::from_i32(message.line_ending)
828 .ok_or_else(|| anyhow!("missing line_ending"))?,
829 ));
830 this.saved_version = proto::deserialize_version(&message.saved_version);
831 this.saved_mtime = message.saved_mtime.map(|time| time.into());
832 Ok(this)
833 }
834
835 /// Serialize the buffer's state to a protobuf message.
836 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
837 proto::BufferState {
838 id: self.remote_id().into(),
839 file: self.file.as_ref().map(|f| f.to_proto(cx)),
840 base_text: self.base_text().to_string(),
841 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
842 saved_version: proto::serialize_version(&self.saved_version),
843 saved_mtime: self.saved_mtime.map(|time| time.into()),
844 }
845 }
846
847 /// Serialize as protobufs all of the changes to the buffer since the given version.
848 pub fn serialize_ops(
849 &self,
850 since: Option<clock::Global>,
851 cx: &App,
852 ) -> Task<Vec<proto::Operation>> {
853 let mut operations = Vec::new();
854 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
855
856 operations.extend(self.remote_selections.iter().map(|(_, set)| {
857 proto::serialize_operation(&Operation::UpdateSelections {
858 selections: set.selections.clone(),
859 lamport_timestamp: set.lamport_timestamp,
860 line_mode: set.line_mode,
861 cursor_shape: set.cursor_shape,
862 })
863 }));
864
865 for (server_id, diagnostics) in &self.diagnostics {
866 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
867 lamport_timestamp: self.diagnostics_timestamp,
868 server_id: *server_id,
869 diagnostics: diagnostics.iter().cloned().collect(),
870 }));
871 }
872
873 for (server_id, completions) in &self.completion_triggers_per_language_server {
874 operations.push(proto::serialize_operation(
875 &Operation::UpdateCompletionTriggers {
876 triggers: completions.iter().cloned().collect(),
877 lamport_timestamp: self.completion_triggers_timestamp,
878 server_id: *server_id,
879 },
880 ));
881 }
882
883 let text_operations = self.text.operations().clone();
884 cx.background_spawn(async move {
885 let since = since.unwrap_or_default();
886 operations.extend(
887 text_operations
888 .iter()
889 .filter(|(_, op)| !since.observed(op.timestamp()))
890 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
891 );
892 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
893 operations
894 })
895 }
896
897 /// Assign a language to the buffer, returning the buffer.
898 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
899 self.set_language(Some(language), cx);
900 self
901 }
902
903 /// Returns the [`Capability`] of this buffer.
904 pub fn capability(&self) -> Capability {
905 self.capability
906 }
907
908 /// Whether this buffer can only be read.
909 pub fn read_only(&self) -> bool {
910 self.capability == Capability::ReadOnly
911 }
912
913 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
914 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
915 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
916 let snapshot = buffer.snapshot();
917 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
918 Self {
919 saved_mtime,
920 saved_version: buffer.version(),
921 preview_version: buffer.version(),
922 reload_task: None,
923 transaction_depth: 0,
924 was_dirty_before_starting_transaction: None,
925 has_unsaved_edits: Cell::new((buffer.version(), false)),
926 text: buffer,
927 branch_state: None,
928 file,
929 capability,
930 syntax_map,
931 reparse: None,
932 non_text_state_update_count: 0,
933 sync_parse_timeout: Duration::from_millis(1),
934 parse_status: async_watch::channel(ParseStatus::Idle),
935 autoindent_requests: Default::default(),
936 pending_autoindent: Default::default(),
937 language: None,
938 remote_selections: Default::default(),
939 diagnostics: Default::default(),
940 diagnostics_timestamp: Default::default(),
941 completion_triggers: Default::default(),
942 completion_triggers_per_language_server: Default::default(),
943 completion_triggers_timestamp: Default::default(),
944 deferred_ops: OperationQueue::new(),
945 has_conflict: false,
946 change_bits: Default::default(),
947 _subscriptions: Vec::new(),
948 }
949 }
950
951 pub fn build_snapshot(
952 text: Rope,
953 language: Option<Arc<Language>>,
954 language_registry: Option<Arc<LanguageRegistry>>,
955 cx: &mut App,
956 ) -> impl Future<Output = BufferSnapshot> + use<> {
957 let entity_id = cx.reserve_entity::<Self>().entity_id();
958 let buffer_id = entity_id.as_non_zero_u64().into();
959 async move {
960 let text =
961 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
962 let mut syntax = SyntaxMap::new(&text).snapshot();
963 if let Some(language) = language.clone() {
964 let text = text.clone();
965 let language = language.clone();
966 let language_registry = language_registry.clone();
967 syntax.reparse(&text, language_registry, language);
968 }
969 BufferSnapshot {
970 text,
971 syntax,
972 file: None,
973 diagnostics: Default::default(),
974 remote_selections: Default::default(),
975 language,
976 non_text_state_update_count: 0,
977 }
978 }
979 }
980
981 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
982 let entity_id = cx.reserve_entity::<Self>().entity_id();
983 let buffer_id = entity_id.as_non_zero_u64().into();
984 let text =
985 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
986 let syntax = SyntaxMap::new(&text).snapshot();
987 BufferSnapshot {
988 text,
989 syntax,
990 file: None,
991 diagnostics: Default::default(),
992 remote_selections: Default::default(),
993 language: None,
994 non_text_state_update_count: 0,
995 }
996 }
997
998 #[cfg(any(test, feature = "test-support"))]
999 pub fn build_snapshot_sync(
1000 text: Rope,
1001 language: Option<Arc<Language>>,
1002 language_registry: Option<Arc<LanguageRegistry>>,
1003 cx: &mut App,
1004 ) -> BufferSnapshot {
1005 let entity_id = cx.reserve_entity::<Self>().entity_id();
1006 let buffer_id = entity_id.as_non_zero_u64().into();
1007 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1008 let mut syntax = SyntaxMap::new(&text).snapshot();
1009 if let Some(language) = language.clone() {
1010 let text = text.clone();
1011 let language = language.clone();
1012 let language_registry = language_registry.clone();
1013 syntax.reparse(&text, language_registry, language);
1014 }
1015 BufferSnapshot {
1016 text,
1017 syntax,
1018 file: None,
1019 diagnostics: Default::default(),
1020 remote_selections: Default::default(),
1021 language,
1022 non_text_state_update_count: 0,
1023 }
1024 }
1025
1026 /// Retrieve a snapshot of the buffer's current state. This is computationally
1027 /// cheap, and allows reading from the buffer on a background thread.
1028 pub fn snapshot(&self) -> BufferSnapshot {
1029 let text = self.text.snapshot();
1030 let mut syntax_map = self.syntax_map.lock();
1031 syntax_map.interpolate(&text);
1032 let syntax = syntax_map.snapshot();
1033
1034 BufferSnapshot {
1035 text,
1036 syntax,
1037 file: self.file.clone(),
1038 remote_selections: self.remote_selections.clone(),
1039 diagnostics: self.diagnostics.clone(),
1040 language: self.language.clone(),
1041 non_text_state_update_count: self.non_text_state_update_count,
1042 }
1043 }
1044
1045 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1046 let this = cx.entity();
1047 cx.new(|cx| {
1048 let mut branch = Self {
1049 branch_state: Some(BufferBranchState {
1050 base_buffer: this.clone(),
1051 merged_operations: Default::default(),
1052 }),
1053 language: self.language.clone(),
1054 has_conflict: self.has_conflict,
1055 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1056 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1057 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1058 };
1059 if let Some(language_registry) = self.language_registry() {
1060 branch.set_language_registry(language_registry);
1061 }
1062
1063 // Reparse the branch buffer so that we get syntax highlighting immediately.
1064 branch.reparse(cx);
1065
1066 branch
1067 })
1068 }
1069
1070 pub fn preview_edits(
1071 &self,
1072 edits: Arc<[(Range<Anchor>, String)]>,
1073 cx: &App,
1074 ) -> Task<EditPreview> {
1075 let registry = self.language_registry();
1076 let language = self.language().cloned();
1077 let old_snapshot = self.text.snapshot();
1078 let mut branch_buffer = self.text.branch();
1079 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1080 cx.background_spawn(async move {
1081 if !edits.is_empty() {
1082 if let Some(language) = language.clone() {
1083 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1084 }
1085
1086 branch_buffer.edit(edits.iter().cloned());
1087 let snapshot = branch_buffer.snapshot();
1088 syntax_snapshot.interpolate(&snapshot);
1089
1090 if let Some(language) = language {
1091 syntax_snapshot.reparse(&snapshot, registry, language);
1092 }
1093 }
1094 EditPreview {
1095 old_snapshot,
1096 applied_edits_snapshot: branch_buffer.snapshot(),
1097 syntax_snapshot,
1098 }
1099 })
1100 }
1101
1102 /// Applies all of the changes in this buffer that intersect any of the
1103 /// given `ranges` to its base buffer.
1104 ///
1105 /// If `ranges` is empty, then all changes will be applied. This buffer must
1106 /// be a branch buffer to call this method.
1107 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1108 let Some(base_buffer) = self.base_buffer() else {
1109 debug_panic!("not a branch buffer");
1110 return;
1111 };
1112
1113 let mut ranges = if ranges.is_empty() {
1114 &[0..usize::MAX]
1115 } else {
1116 ranges.as_slice()
1117 }
1118 .into_iter()
1119 .peekable();
1120
1121 let mut edits = Vec::new();
1122 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1123 let mut is_included = false;
1124 while let Some(range) = ranges.peek() {
1125 if range.end < edit.new.start {
1126 ranges.next().unwrap();
1127 } else {
1128 if range.start <= edit.new.end {
1129 is_included = true;
1130 }
1131 break;
1132 }
1133 }
1134
1135 if is_included {
1136 edits.push((
1137 edit.old.clone(),
1138 self.text_for_range(edit.new.clone()).collect::<String>(),
1139 ));
1140 }
1141 }
1142
1143 let operation = base_buffer.update(cx, |base_buffer, cx| {
1144 // cx.emit(BufferEvent::DiffBaseChanged);
1145 base_buffer.edit(edits, None, cx)
1146 });
1147
1148 if let Some(operation) = operation {
1149 if let Some(BufferBranchState {
1150 merged_operations, ..
1151 }) = &mut self.branch_state
1152 {
1153 merged_operations.push(operation);
1154 }
1155 }
1156 }
1157
1158 fn on_base_buffer_event(
1159 &mut self,
1160 _: Entity<Buffer>,
1161 event: &BufferEvent,
1162 cx: &mut Context<Self>,
1163 ) {
1164 let BufferEvent::Operation { operation, .. } = event else {
1165 return;
1166 };
1167 let Some(BufferBranchState {
1168 merged_operations, ..
1169 }) = &mut self.branch_state
1170 else {
1171 return;
1172 };
1173
1174 let mut operation_to_undo = None;
1175 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1176 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1177 merged_operations.remove(ix);
1178 operation_to_undo = Some(operation.timestamp);
1179 }
1180 }
1181
1182 self.apply_ops([operation.clone()], cx);
1183
1184 if let Some(timestamp) = operation_to_undo {
1185 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1186 self.undo_operations(counts, cx);
1187 }
1188 }
1189
1190 #[cfg(test)]
1191 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1192 &self.text
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's raw text, without any
1196 /// language-related state like the syntax tree or diagnostics.
1197 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1198 self.text.snapshot()
1199 }
1200
1201 /// The file associated with the buffer, if any.
1202 pub fn file(&self) -> Option<&Arc<dyn File>> {
1203 self.file.as_ref()
1204 }
1205
1206 /// The version of the buffer that was last saved or reloaded from disk.
1207 pub fn saved_version(&self) -> &clock::Global {
1208 &self.saved_version
1209 }
1210
1211 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1212 pub fn saved_mtime(&self) -> Option<MTime> {
1213 self.saved_mtime
1214 }
1215
1216 /// Assign a language to the buffer.
1217 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1218 self.non_text_state_update_count += 1;
1219 self.syntax_map.lock().clear(&self.text);
1220 self.language = language;
1221 self.was_changed();
1222 self.reparse(cx);
1223 cx.emit(BufferEvent::LanguageChanged);
1224 }
1225
1226 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1227 /// other languages if parts of the buffer are written in different languages.
1228 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1229 self.syntax_map
1230 .lock()
1231 .set_language_registry(language_registry);
1232 }
1233
1234 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1235 self.syntax_map.lock().language_registry()
1236 }
1237
1238 /// Assign the buffer a new [`Capability`].
1239 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1240 self.capability = capability;
1241 cx.emit(BufferEvent::CapabilityChanged)
1242 }
1243
1244 /// This method is called to signal that the buffer has been saved.
1245 pub fn did_save(
1246 &mut self,
1247 version: clock::Global,
1248 mtime: Option<MTime>,
1249 cx: &mut Context<Self>,
1250 ) {
1251 self.saved_version = version;
1252 self.has_unsaved_edits
1253 .set((self.saved_version().clone(), false));
1254 self.has_conflict = false;
1255 self.saved_mtime = mtime;
1256 self.was_changed();
1257 cx.emit(BufferEvent::Saved);
1258 cx.notify();
1259 }
1260
1261 /// This method is called to signal that the buffer has been discarded.
1262 pub fn discarded(&self, cx: &mut Context<Self>) {
1263 cx.emit(BufferEvent::Discarded);
1264 cx.notify();
1265 }
1266
1267 /// Reloads the contents of the buffer from disk.
1268 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1269 let (tx, rx) = futures::channel::oneshot::channel();
1270 let prev_version = self.text.version();
1271 self.reload_task = Some(cx.spawn(async move |this, cx| {
1272 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1273 let file = this.file.as_ref()?.as_local()?;
1274
1275 Some((file.disk_state().mtime(), file.load(cx)))
1276 })?
1277 else {
1278 return Ok(());
1279 };
1280
1281 let new_text = new_text.await?;
1282 let diff = this
1283 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1284 .await;
1285 this.update(cx, |this, cx| {
1286 if this.version() == diff.base_version {
1287 this.finalize_last_transaction();
1288 this.apply_diff(diff, cx);
1289 tx.send(this.finalize_last_transaction().cloned()).ok();
1290 this.has_conflict = false;
1291 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1292 } else {
1293 if !diff.edits.is_empty()
1294 || this
1295 .edits_since::<usize>(&diff.base_version)
1296 .next()
1297 .is_some()
1298 {
1299 this.has_conflict = true;
1300 }
1301
1302 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1303 }
1304
1305 this.reload_task.take();
1306 })
1307 }));
1308 rx
1309 }
1310
1311 /// This method is called to signal that the buffer has been reloaded.
1312 pub fn did_reload(
1313 &mut self,
1314 version: clock::Global,
1315 line_ending: LineEnding,
1316 mtime: Option<MTime>,
1317 cx: &mut Context<Self>,
1318 ) {
1319 self.saved_version = version;
1320 self.has_unsaved_edits
1321 .set((self.saved_version.clone(), false));
1322 self.text.set_line_ending(line_ending);
1323 self.saved_mtime = mtime;
1324 cx.emit(BufferEvent::Reloaded);
1325 cx.notify();
1326 }
1327
1328 /// Updates the [`File`] backing this buffer. This should be called when
1329 /// the file has changed or has been deleted.
1330 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1331 let was_dirty = self.is_dirty();
1332 let mut file_changed = false;
1333
1334 if let Some(old_file) = self.file.as_ref() {
1335 if new_file.path() != old_file.path() {
1336 file_changed = true;
1337 }
1338
1339 let old_state = old_file.disk_state();
1340 let new_state = new_file.disk_state();
1341 if old_state != new_state {
1342 file_changed = true;
1343 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1344 cx.emit(BufferEvent::ReloadNeeded)
1345 }
1346 }
1347 } else {
1348 file_changed = true;
1349 };
1350
1351 self.file = Some(new_file);
1352 if file_changed {
1353 self.was_changed();
1354 self.non_text_state_update_count += 1;
1355 if was_dirty != self.is_dirty() {
1356 cx.emit(BufferEvent::DirtyChanged);
1357 }
1358 cx.emit(BufferEvent::FileHandleChanged);
1359 cx.notify();
1360 }
1361 }
1362
1363 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1364 Some(self.branch_state.as_ref()?.base_buffer.clone())
1365 }
1366
1367 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1368 pub fn language(&self) -> Option<&Arc<Language>> {
1369 self.language.as_ref()
1370 }
1371
1372 /// Returns the [`Language`] at the given location.
1373 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1374 let offset = position.to_offset(self);
1375 self.syntax_map
1376 .lock()
1377 .layers_for_range(offset..offset, &self.text, false)
1378 .last()
1379 .map(|info| info.language.clone())
1380 .or_else(|| self.language.clone())
1381 }
1382
1383 /// Returns each [`Language`] for the active syntax layers at the given location.
1384 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1385 let offset = position.to_offset(self);
1386 let mut languages: Vec<Arc<Language>> = self
1387 .syntax_map
1388 .lock()
1389 .layers_for_range(offset..offset, &self.text, false)
1390 .map(|info| info.language.clone())
1391 .collect();
1392
1393 if languages.is_empty() {
1394 if let Some(buffer_language) = self.language() {
1395 languages.push(buffer_language.clone());
1396 }
1397 }
1398
1399 languages
1400 }
1401
1402 /// An integer version number that accounts for all updates besides
1403 /// the buffer's text itself (which is versioned via a version vector).
1404 pub fn non_text_state_update_count(&self) -> usize {
1405 self.non_text_state_update_count
1406 }
1407
1408 /// Whether the buffer is being parsed in the background.
1409 #[cfg(any(test, feature = "test-support"))]
1410 pub fn is_parsing(&self) -> bool {
1411 self.reparse.is_some()
1412 }
1413
1414 /// Indicates whether the buffer contains any regions that may be
1415 /// written in a language that hasn't been loaded yet.
1416 pub fn contains_unknown_injections(&self) -> bool {
1417 self.syntax_map.lock().contains_unknown_injections()
1418 }
1419
1420 #[cfg(test)]
1421 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1422 self.sync_parse_timeout = timeout;
1423 }
1424
1425 /// Called after an edit to synchronize the buffer's main parse tree with
1426 /// the buffer's new underlying state.
1427 ///
1428 /// Locks the syntax map and interpolates the edits since the last reparse
1429 /// into the foreground syntax tree.
1430 ///
1431 /// Then takes a stable snapshot of the syntax map before unlocking it.
1432 /// The snapshot with the interpolated edits is sent to a background thread,
1433 /// where we ask Tree-sitter to perform an incremental parse.
1434 ///
1435 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1436 /// waiting on the parse to complete. As soon as it completes, we proceed
1437 /// synchronously, unless a 1ms timeout elapses.
1438 ///
1439 /// If we time out waiting on the parse, we spawn a second task waiting
1440 /// until the parse does complete and return with the interpolated tree still
1441 /// in the foreground. When the background parse completes, call back into
1442 /// the main thread and assign the foreground parse state.
1443 ///
1444 /// If the buffer or grammar changed since the start of the background parse,
1445 /// initiate an additional reparse recursively. To avoid concurrent parses
1446 /// for the same buffer, we only initiate a new parse if we are not already
1447 /// parsing in the background.
1448 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1449 if self.reparse.is_some() {
1450 return;
1451 }
1452 let language = if let Some(language) = self.language.clone() {
1453 language
1454 } else {
1455 return;
1456 };
1457
1458 let text = self.text_snapshot();
1459 let parsed_version = self.version();
1460
1461 let mut syntax_map = self.syntax_map.lock();
1462 syntax_map.interpolate(&text);
1463 let language_registry = syntax_map.language_registry();
1464 let mut syntax_snapshot = syntax_map.snapshot();
1465 drop(syntax_map);
1466
1467 let parse_task = cx.background_spawn({
1468 let language = language.clone();
1469 let language_registry = language_registry.clone();
1470 async move {
1471 syntax_snapshot.reparse(&text, language_registry, language);
1472 syntax_snapshot
1473 }
1474 });
1475
1476 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1477 match cx
1478 .background_executor()
1479 .block_with_timeout(self.sync_parse_timeout, parse_task)
1480 {
1481 Ok(new_syntax_snapshot) => {
1482 self.did_finish_parsing(new_syntax_snapshot, cx);
1483 self.reparse = None;
1484 }
1485 Err(parse_task) => {
1486 self.reparse = Some(cx.spawn(async move |this, cx| {
1487 let new_syntax_map = parse_task.await;
1488 this.update(cx, move |this, cx| {
1489 let grammar_changed =
1490 this.language.as_ref().map_or(true, |current_language| {
1491 !Arc::ptr_eq(&language, current_language)
1492 });
1493 let language_registry_changed = new_syntax_map
1494 .contains_unknown_injections()
1495 && language_registry.map_or(false, |registry| {
1496 registry.version() != new_syntax_map.language_registry_version()
1497 });
1498 let parse_again = language_registry_changed
1499 || grammar_changed
1500 || this.version.changed_since(&parsed_version);
1501 this.did_finish_parsing(new_syntax_map, cx);
1502 this.reparse = None;
1503 if parse_again {
1504 this.reparse(cx);
1505 }
1506 })
1507 .ok();
1508 }));
1509 }
1510 }
1511 }
1512
1513 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1514 self.was_changed();
1515 self.non_text_state_update_count += 1;
1516 self.syntax_map.lock().did_parse(syntax_snapshot);
1517 self.request_autoindent(cx);
1518 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1519 cx.emit(BufferEvent::Reparsed);
1520 cx.notify();
1521 }
1522
1523 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1524 self.parse_status.1.clone()
1525 }
1526
1527 /// Assign to the buffer a set of diagnostics created by a given language server.
1528 pub fn update_diagnostics(
1529 &mut self,
1530 server_id: LanguageServerId,
1531 diagnostics: DiagnosticSet,
1532 cx: &mut Context<Self>,
1533 ) {
1534 let lamport_timestamp = self.text.lamport_clock.tick();
1535 let op = Operation::UpdateDiagnostics {
1536 server_id,
1537 diagnostics: diagnostics.iter().cloned().collect(),
1538 lamport_timestamp,
1539 };
1540 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1541 self.send_operation(op, true, cx);
1542 }
1543
1544 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1545 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1546 return None;
1547 };
1548 Some(&self.diagnostics[idx].1)
1549 }
1550
1551 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1552 if let Some(indent_sizes) = self.compute_autoindents() {
1553 let indent_sizes = cx.background_spawn(indent_sizes);
1554 match cx
1555 .background_executor()
1556 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1557 {
1558 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1559 Err(indent_sizes) => {
1560 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1561 let indent_sizes = indent_sizes.await;
1562 this.update(cx, |this, cx| {
1563 this.apply_autoindents(indent_sizes, cx);
1564 })
1565 .ok();
1566 }));
1567 }
1568 }
1569 } else {
1570 self.autoindent_requests.clear();
1571 }
1572 }
1573
1574 fn compute_autoindents(
1575 &self,
1576 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1577 let max_rows_between_yields = 100;
1578 let snapshot = self.snapshot();
1579 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1580 return None;
1581 }
1582
1583 let autoindent_requests = self.autoindent_requests.clone();
1584 Some(async move {
1585 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1586 for request in autoindent_requests {
1587 // Resolve each edited range to its row in the current buffer and in the
1588 // buffer before this batch of edits.
1589 let mut row_ranges = Vec::new();
1590 let mut old_to_new_rows = BTreeMap::new();
1591 let mut language_indent_sizes_by_new_row = Vec::new();
1592 for entry in &request.entries {
1593 let position = entry.range.start;
1594 let new_row = position.to_point(&snapshot).row;
1595 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1596 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1597
1598 if !entry.first_line_is_new {
1599 let old_row = position.to_point(&request.before_edit).row;
1600 old_to_new_rows.insert(old_row, new_row);
1601 }
1602 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1603 }
1604
1605 // Build a map containing the suggested indentation for each of the edited lines
1606 // with respect to the state of the buffer before these edits. This map is keyed
1607 // by the rows for these lines in the current state of the buffer.
1608 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1609 let old_edited_ranges =
1610 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1611 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1612 let mut language_indent_size = IndentSize::default();
1613 for old_edited_range in old_edited_ranges {
1614 let suggestions = request
1615 .before_edit
1616 .suggest_autoindents(old_edited_range.clone())
1617 .into_iter()
1618 .flatten();
1619 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1620 if let Some(suggestion) = suggestion {
1621 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1622
1623 // Find the indent size based on the language for this row.
1624 while let Some((row, size)) = language_indent_sizes.peek() {
1625 if *row > new_row {
1626 break;
1627 }
1628 language_indent_size = *size;
1629 language_indent_sizes.next();
1630 }
1631
1632 let suggested_indent = old_to_new_rows
1633 .get(&suggestion.basis_row)
1634 .and_then(|from_row| {
1635 Some(old_suggestions.get(from_row).copied()?.0)
1636 })
1637 .unwrap_or_else(|| {
1638 request
1639 .before_edit
1640 .indent_size_for_line(suggestion.basis_row)
1641 })
1642 .with_delta(suggestion.delta, language_indent_size);
1643 old_suggestions
1644 .insert(new_row, (suggested_indent, suggestion.within_error));
1645 }
1646 }
1647 yield_now().await;
1648 }
1649
1650 // Compute new suggestions for each line, but only include them in the result
1651 // if they differ from the old suggestion for that line.
1652 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1653 let mut language_indent_size = IndentSize::default();
1654 for (row_range, original_indent_column) in row_ranges {
1655 let new_edited_row_range = if request.is_block_mode {
1656 row_range.start..row_range.start + 1
1657 } else {
1658 row_range.clone()
1659 };
1660
1661 let suggestions = snapshot
1662 .suggest_autoindents(new_edited_row_range.clone())
1663 .into_iter()
1664 .flatten();
1665 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1666 if let Some(suggestion) = suggestion {
1667 // Find the indent size based on the language for this row.
1668 while let Some((row, size)) = language_indent_sizes.peek() {
1669 if *row > new_row {
1670 break;
1671 }
1672 language_indent_size = *size;
1673 language_indent_sizes.next();
1674 }
1675
1676 let suggested_indent = indent_sizes
1677 .get(&suggestion.basis_row)
1678 .copied()
1679 .map(|e| e.0)
1680 .unwrap_or_else(|| {
1681 snapshot.indent_size_for_line(suggestion.basis_row)
1682 })
1683 .with_delta(suggestion.delta, language_indent_size);
1684
1685 if old_suggestions.get(&new_row).map_or(
1686 true,
1687 |(old_indentation, was_within_error)| {
1688 suggested_indent != *old_indentation
1689 && (!suggestion.within_error || *was_within_error)
1690 },
1691 ) {
1692 indent_sizes.insert(
1693 new_row,
1694 (suggested_indent, request.ignore_empty_lines),
1695 );
1696 }
1697 }
1698 }
1699
1700 if let (true, Some(original_indent_column)) =
1701 (request.is_block_mode, original_indent_column)
1702 {
1703 let new_indent =
1704 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1705 *indent
1706 } else {
1707 snapshot.indent_size_for_line(row_range.start)
1708 };
1709 let delta = new_indent.len as i64 - original_indent_column as i64;
1710 if delta != 0 {
1711 for row in row_range.skip(1) {
1712 indent_sizes.entry(row).or_insert_with(|| {
1713 let mut size = snapshot.indent_size_for_line(row);
1714 if size.kind == new_indent.kind {
1715 match delta.cmp(&0) {
1716 Ordering::Greater => size.len += delta as u32,
1717 Ordering::Less => {
1718 size.len = size.len.saturating_sub(-delta as u32)
1719 }
1720 Ordering::Equal => {}
1721 }
1722 }
1723 (size, request.ignore_empty_lines)
1724 });
1725 }
1726 }
1727 }
1728
1729 yield_now().await;
1730 }
1731 }
1732
1733 indent_sizes
1734 .into_iter()
1735 .filter_map(|(row, (indent, ignore_empty_lines))| {
1736 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1737 None
1738 } else {
1739 Some((row, indent))
1740 }
1741 })
1742 .collect()
1743 })
1744 }
1745
1746 fn apply_autoindents(
1747 &mut self,
1748 indent_sizes: BTreeMap<u32, IndentSize>,
1749 cx: &mut Context<Self>,
1750 ) {
1751 self.autoindent_requests.clear();
1752
1753 let edits: Vec<_> = indent_sizes
1754 .into_iter()
1755 .filter_map(|(row, indent_size)| {
1756 let current_size = indent_size_for_line(self, row);
1757 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1758 })
1759 .collect();
1760
1761 let preserve_preview = self.preserve_preview();
1762 self.edit(edits, None, cx);
1763 if preserve_preview {
1764 self.refresh_preview();
1765 }
1766 }
1767
1768 /// Create a minimal edit that will cause the given row to be indented
1769 /// with the given size. After applying this edit, the length of the line
1770 /// will always be at least `new_size.len`.
1771 pub fn edit_for_indent_size_adjustment(
1772 row: u32,
1773 current_size: IndentSize,
1774 new_size: IndentSize,
1775 ) -> Option<(Range<Point>, String)> {
1776 if new_size.kind == current_size.kind {
1777 match new_size.len.cmp(¤t_size.len) {
1778 Ordering::Greater => {
1779 let point = Point::new(row, 0);
1780 Some((
1781 point..point,
1782 iter::repeat(new_size.char())
1783 .take((new_size.len - current_size.len) as usize)
1784 .collect::<String>(),
1785 ))
1786 }
1787
1788 Ordering::Less => Some((
1789 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1790 String::new(),
1791 )),
1792
1793 Ordering::Equal => None,
1794 }
1795 } else {
1796 Some((
1797 Point::new(row, 0)..Point::new(row, current_size.len),
1798 iter::repeat(new_size.char())
1799 .take(new_size.len as usize)
1800 .collect::<String>(),
1801 ))
1802 }
1803 }
1804
1805 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1806 /// and the given new text.
1807 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1808 let old_text = self.as_rope().clone();
1809 let base_version = self.version();
1810 cx.background_executor()
1811 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1812 let old_text = old_text.to_string();
1813 let line_ending = LineEnding::detect(&new_text);
1814 LineEnding::normalize(&mut new_text);
1815 let edits = text_diff(&old_text, &new_text);
1816 Diff {
1817 base_version,
1818 line_ending,
1819 edits,
1820 }
1821 })
1822 }
1823
1824 /// Spawns a background task that searches the buffer for any whitespace
1825 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1826 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1827 let old_text = self.as_rope().clone();
1828 let line_ending = self.line_ending();
1829 let base_version = self.version();
1830 cx.background_spawn(async move {
1831 let ranges = trailing_whitespace_ranges(&old_text);
1832 let empty = Arc::<str>::from("");
1833 Diff {
1834 base_version,
1835 line_ending,
1836 edits: ranges
1837 .into_iter()
1838 .map(|range| (range, empty.clone()))
1839 .collect(),
1840 }
1841 })
1842 }
1843
1844 /// Ensures that the buffer ends with a single newline character, and
1845 /// no other whitespace.
1846 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1847 let len = self.len();
1848 let mut offset = len;
1849 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1850 let non_whitespace_len = chunk
1851 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1852 .len();
1853 offset -= chunk.len();
1854 offset += non_whitespace_len;
1855 if non_whitespace_len != 0 {
1856 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1857 return;
1858 }
1859 break;
1860 }
1861 }
1862 self.edit([(offset..len, "\n")], None, cx);
1863 }
1864
1865 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1866 /// calculated, then adjust the diff to account for those changes, and discard any
1867 /// parts of the diff that conflict with those changes.
1868 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1869 let snapshot = self.snapshot();
1870 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1871 let mut delta = 0;
1872 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1873 while let Some(edit_since) = edits_since.peek() {
1874 // If the edit occurs after a diff hunk, then it does not
1875 // affect that hunk.
1876 if edit_since.old.start > range.end {
1877 break;
1878 }
1879 // If the edit precedes the diff hunk, then adjust the hunk
1880 // to reflect the edit.
1881 else if edit_since.old.end < range.start {
1882 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1883 edits_since.next();
1884 }
1885 // If the edit intersects a diff hunk, then discard that hunk.
1886 else {
1887 return None;
1888 }
1889 }
1890
1891 let start = (range.start as i64 + delta) as usize;
1892 let end = (range.end as i64 + delta) as usize;
1893 Some((start..end, new_text))
1894 });
1895
1896 self.start_transaction();
1897 self.text.set_line_ending(diff.line_ending);
1898 self.edit(adjusted_edits, None, cx);
1899 self.end_transaction(cx)
1900 }
1901
1902 fn has_unsaved_edits(&self) -> bool {
1903 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1904
1905 if last_version == self.version {
1906 self.has_unsaved_edits
1907 .set((last_version, has_unsaved_edits));
1908 return has_unsaved_edits;
1909 }
1910
1911 let has_edits = self.has_edits_since(&self.saved_version);
1912 self.has_unsaved_edits
1913 .set((self.version.clone(), has_edits));
1914 has_edits
1915 }
1916
1917 /// Checks if the buffer has unsaved changes.
1918 pub fn is_dirty(&self) -> bool {
1919 if self.capability == Capability::ReadOnly {
1920 return false;
1921 }
1922 if self.has_conflict {
1923 return true;
1924 }
1925 match self.file.as_ref().map(|f| f.disk_state()) {
1926 Some(DiskState::New) | Some(DiskState::Deleted) => {
1927 !self.is_empty() && self.has_unsaved_edits()
1928 }
1929 _ => self.has_unsaved_edits(),
1930 }
1931 }
1932
1933 /// Checks if the buffer and its file have both changed since the buffer
1934 /// was last saved or reloaded.
1935 pub fn has_conflict(&self) -> bool {
1936 if self.has_conflict {
1937 return true;
1938 }
1939 let Some(file) = self.file.as_ref() else {
1940 return false;
1941 };
1942 match file.disk_state() {
1943 DiskState::New => false,
1944 DiskState::Present { mtime } => match self.saved_mtime {
1945 Some(saved_mtime) => {
1946 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1947 }
1948 None => true,
1949 },
1950 DiskState::Deleted => false,
1951 }
1952 }
1953
1954 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1955 pub fn subscribe(&mut self) -> Subscription {
1956 self.text.subscribe()
1957 }
1958
1959 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1960 ///
1961 /// This allows downstream code to check if the buffer's text has changed without
1962 /// waiting for an effect cycle, which would be required if using eents.
1963 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1964 if let Err(ix) = self
1965 .change_bits
1966 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1967 {
1968 self.change_bits.insert(ix, bit);
1969 }
1970 }
1971
1972 fn was_changed(&mut self) {
1973 self.change_bits.retain(|change_bit| {
1974 change_bit.upgrade().map_or(false, |bit| {
1975 bit.replace(true);
1976 true
1977 })
1978 });
1979 }
1980
1981 /// Starts a transaction, if one is not already in-progress. When undoing or
1982 /// redoing edits, all of the edits performed within a transaction are undone
1983 /// or redone together.
1984 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1985 self.start_transaction_at(Instant::now())
1986 }
1987
1988 /// Starts a transaction, providing the current time. Subsequent transactions
1989 /// that occur within a short period of time will be grouped together. This
1990 /// is controlled by the buffer's undo grouping duration.
1991 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1992 self.transaction_depth += 1;
1993 if self.was_dirty_before_starting_transaction.is_none() {
1994 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1995 }
1996 self.text.start_transaction_at(now)
1997 }
1998
1999 /// Terminates the current transaction, if this is the outermost transaction.
2000 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2001 self.end_transaction_at(Instant::now(), cx)
2002 }
2003
2004 /// Terminates the current transaction, providing the current time. Subsequent transactions
2005 /// that occur within a short period of time will be grouped together. This
2006 /// is controlled by the buffer's undo grouping duration.
2007 pub fn end_transaction_at(
2008 &mut self,
2009 now: Instant,
2010 cx: &mut Context<Self>,
2011 ) -> Option<TransactionId> {
2012 assert!(self.transaction_depth > 0);
2013 self.transaction_depth -= 1;
2014 let was_dirty = if self.transaction_depth == 0 {
2015 self.was_dirty_before_starting_transaction.take().unwrap()
2016 } else {
2017 false
2018 };
2019 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2020 self.did_edit(&start_version, was_dirty, cx);
2021 Some(transaction_id)
2022 } else {
2023 None
2024 }
2025 }
2026
2027 /// Manually add a transaction to the buffer's undo history.
2028 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2029 self.text.push_transaction(transaction, now);
2030 }
2031
2032 /// Prevent the last transaction from being grouped with any subsequent transactions,
2033 /// even if they occur with the buffer's undo grouping duration.
2034 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2035 self.text.finalize_last_transaction()
2036 }
2037
2038 /// Manually group all changes since a given transaction.
2039 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2040 self.text.group_until_transaction(transaction_id);
2041 }
2042
2043 /// Manually remove a transaction from the buffer's undo history
2044 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2045 self.text.forget_transaction(transaction_id)
2046 }
2047
2048 /// Retrieve a transaction from the buffer's undo history
2049 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2050 self.text.get_transaction(transaction_id)
2051 }
2052
2053 /// Manually merge two transactions in the buffer's undo history.
2054 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2055 self.text.merge_transactions(transaction, destination);
2056 }
2057
2058 /// Waits for the buffer to receive operations with the given timestamps.
2059 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2060 &mut self,
2061 edit_ids: It,
2062 ) -> impl Future<Output = Result<()>> + use<It> {
2063 self.text.wait_for_edits(edit_ids)
2064 }
2065
2066 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2067 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2068 &mut self,
2069 anchors: It,
2070 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2071 self.text.wait_for_anchors(anchors)
2072 }
2073
2074 /// Waits for the buffer to receive operations up to the given version.
2075 pub fn wait_for_version(
2076 &mut self,
2077 version: clock::Global,
2078 ) -> impl Future<Output = Result<()>> + use<> {
2079 self.text.wait_for_version(version)
2080 }
2081
2082 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2083 /// [`Buffer::wait_for_version`] to resolve with an error.
2084 pub fn give_up_waiting(&mut self) {
2085 self.text.give_up_waiting();
2086 }
2087
2088 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2089 pub fn set_active_selections(
2090 &mut self,
2091 selections: Arc<[Selection<Anchor>]>,
2092 line_mode: bool,
2093 cursor_shape: CursorShape,
2094 cx: &mut Context<Self>,
2095 ) {
2096 let lamport_timestamp = self.text.lamport_clock.tick();
2097 self.remote_selections.insert(
2098 self.text.replica_id(),
2099 SelectionSet {
2100 selections: selections.clone(),
2101 lamport_timestamp,
2102 line_mode,
2103 cursor_shape,
2104 },
2105 );
2106 self.send_operation(
2107 Operation::UpdateSelections {
2108 selections,
2109 line_mode,
2110 lamport_timestamp,
2111 cursor_shape,
2112 },
2113 true,
2114 cx,
2115 );
2116 self.non_text_state_update_count += 1;
2117 cx.notify();
2118 }
2119
2120 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2121 /// this replica.
2122 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2123 if self
2124 .remote_selections
2125 .get(&self.text.replica_id())
2126 .map_or(true, |set| !set.selections.is_empty())
2127 {
2128 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2129 }
2130 }
2131
2132 /// Replaces the buffer's entire text.
2133 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2134 where
2135 T: Into<Arc<str>>,
2136 {
2137 self.autoindent_requests.clear();
2138 self.edit([(0..self.len(), text)], None, cx)
2139 }
2140
2141 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2142 /// delete, and a string of text to insert at that location.
2143 ///
2144 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2145 /// request for the edited ranges, which will be processed when the buffer finishes
2146 /// parsing.
2147 ///
2148 /// Parsing takes place at the end of a transaction, and may compute synchronously
2149 /// or asynchronously, depending on the changes.
2150 pub fn edit<I, S, T>(
2151 &mut self,
2152 edits_iter: I,
2153 autoindent_mode: Option<AutoindentMode>,
2154 cx: &mut Context<Self>,
2155 ) -> Option<clock::Lamport>
2156 where
2157 I: IntoIterator<Item = (Range<S>, T)>,
2158 S: ToOffset,
2159 T: Into<Arc<str>>,
2160 {
2161 // Skip invalid edits and coalesce contiguous ones.
2162 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2163
2164 for (range, new_text) in edits_iter {
2165 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2166
2167 if range.start > range.end {
2168 mem::swap(&mut range.start, &mut range.end);
2169 }
2170 let new_text = new_text.into();
2171 if !new_text.is_empty() || !range.is_empty() {
2172 if let Some((prev_range, prev_text)) = edits.last_mut() {
2173 if prev_range.end >= range.start {
2174 prev_range.end = cmp::max(prev_range.end, range.end);
2175 *prev_text = format!("{prev_text}{new_text}").into();
2176 } else {
2177 edits.push((range, new_text));
2178 }
2179 } else {
2180 edits.push((range, new_text));
2181 }
2182 }
2183 }
2184 if edits.is_empty() {
2185 return None;
2186 }
2187
2188 self.start_transaction();
2189 self.pending_autoindent.take();
2190 let autoindent_request = autoindent_mode
2191 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2192
2193 let edit_operation = self.text.edit(edits.iter().cloned());
2194 let edit_id = edit_operation.timestamp();
2195
2196 if let Some((before_edit, mode)) = autoindent_request {
2197 let mut delta = 0isize;
2198 let entries = edits
2199 .into_iter()
2200 .enumerate()
2201 .zip(&edit_operation.as_edit().unwrap().new_text)
2202 .map(|((ix, (range, _)), new_text)| {
2203 let new_text_length = new_text.len();
2204 let old_start = range.start.to_point(&before_edit);
2205 let new_start = (delta + range.start as isize) as usize;
2206 let range_len = range.end - range.start;
2207 delta += new_text_length as isize - range_len as isize;
2208
2209 // Decide what range of the insertion to auto-indent, and whether
2210 // the first line of the insertion should be considered a newly-inserted line
2211 // or an edit to an existing line.
2212 let mut range_of_insertion_to_indent = 0..new_text_length;
2213 let mut first_line_is_new = true;
2214
2215 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2216 let old_line_end = before_edit.line_len(old_start.row);
2217
2218 if old_start.column > old_line_start {
2219 first_line_is_new = false;
2220 }
2221
2222 if !new_text.contains('\n')
2223 && (old_start.column + (range_len as u32) < old_line_end
2224 || old_line_end == old_line_start)
2225 {
2226 first_line_is_new = false;
2227 }
2228
2229 // When inserting text starting with a newline, avoid auto-indenting the
2230 // previous line.
2231 if new_text.starts_with('\n') {
2232 range_of_insertion_to_indent.start += 1;
2233 first_line_is_new = true;
2234 }
2235
2236 let mut original_indent_column = None;
2237 if let AutoindentMode::Block {
2238 original_indent_columns,
2239 } = &mode
2240 {
2241 original_indent_column = Some(if new_text.starts_with('\n') {
2242 indent_size_for_text(
2243 new_text[range_of_insertion_to_indent.clone()].chars(),
2244 )
2245 .len
2246 } else {
2247 original_indent_columns
2248 .get(ix)
2249 .copied()
2250 .flatten()
2251 .unwrap_or_else(|| {
2252 indent_size_for_text(
2253 new_text[range_of_insertion_to_indent.clone()].chars(),
2254 )
2255 .len
2256 })
2257 });
2258
2259 // Avoid auto-indenting the line after the edit.
2260 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2261 range_of_insertion_to_indent.end -= 1;
2262 }
2263 }
2264
2265 AutoindentRequestEntry {
2266 first_line_is_new,
2267 original_indent_column,
2268 indent_size: before_edit.language_indent_size_at(range.start, cx),
2269 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2270 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2271 }
2272 })
2273 .collect();
2274
2275 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2276 before_edit,
2277 entries,
2278 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2279 ignore_empty_lines: false,
2280 }));
2281 }
2282
2283 self.end_transaction(cx);
2284 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2285 Some(edit_id)
2286 }
2287
2288 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2289 self.was_changed();
2290
2291 if self.edits_since::<usize>(old_version).next().is_none() {
2292 return;
2293 }
2294
2295 self.reparse(cx);
2296 cx.emit(BufferEvent::Edited);
2297 if was_dirty != self.is_dirty() {
2298 cx.emit(BufferEvent::DirtyChanged);
2299 }
2300 cx.notify();
2301 }
2302
2303 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2304 where
2305 I: IntoIterator<Item = Range<T>>,
2306 T: ToOffset + Copy,
2307 {
2308 let before_edit = self.snapshot();
2309 let entries = ranges
2310 .into_iter()
2311 .map(|range| AutoindentRequestEntry {
2312 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2313 first_line_is_new: true,
2314 indent_size: before_edit.language_indent_size_at(range.start, cx),
2315 original_indent_column: None,
2316 })
2317 .collect();
2318 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2319 before_edit,
2320 entries,
2321 is_block_mode: false,
2322 ignore_empty_lines: true,
2323 }));
2324 self.request_autoindent(cx);
2325 }
2326
2327 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2328 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2329 pub fn insert_empty_line(
2330 &mut self,
2331 position: impl ToPoint,
2332 space_above: bool,
2333 space_below: bool,
2334 cx: &mut Context<Self>,
2335 ) -> Point {
2336 let mut position = position.to_point(self);
2337
2338 self.start_transaction();
2339
2340 self.edit(
2341 [(position..position, "\n")],
2342 Some(AutoindentMode::EachLine),
2343 cx,
2344 );
2345
2346 if position.column > 0 {
2347 position += Point::new(1, 0);
2348 }
2349
2350 if !self.is_line_blank(position.row) {
2351 self.edit(
2352 [(position..position, "\n")],
2353 Some(AutoindentMode::EachLine),
2354 cx,
2355 );
2356 }
2357
2358 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2359 self.edit(
2360 [(position..position, "\n")],
2361 Some(AutoindentMode::EachLine),
2362 cx,
2363 );
2364 position.row += 1;
2365 }
2366
2367 if space_below
2368 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2369 {
2370 self.edit(
2371 [(position..position, "\n")],
2372 Some(AutoindentMode::EachLine),
2373 cx,
2374 );
2375 }
2376
2377 self.end_transaction(cx);
2378
2379 position
2380 }
2381
2382 /// Applies the given remote operations to the buffer.
2383 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2384 self.pending_autoindent.take();
2385 let was_dirty = self.is_dirty();
2386 let old_version = self.version.clone();
2387 let mut deferred_ops = Vec::new();
2388 let buffer_ops = ops
2389 .into_iter()
2390 .filter_map(|op| match op {
2391 Operation::Buffer(op) => Some(op),
2392 _ => {
2393 if self.can_apply_op(&op) {
2394 self.apply_op(op, cx);
2395 } else {
2396 deferred_ops.push(op);
2397 }
2398 None
2399 }
2400 })
2401 .collect::<Vec<_>>();
2402 for operation in buffer_ops.iter() {
2403 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2404 }
2405 self.text.apply_ops(buffer_ops);
2406 self.deferred_ops.insert(deferred_ops);
2407 self.flush_deferred_ops(cx);
2408 self.did_edit(&old_version, was_dirty, cx);
2409 // Notify independently of whether the buffer was edited as the operations could include a
2410 // selection update.
2411 cx.notify();
2412 }
2413
2414 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2415 let mut deferred_ops = Vec::new();
2416 for op in self.deferred_ops.drain().iter().cloned() {
2417 if self.can_apply_op(&op) {
2418 self.apply_op(op, cx);
2419 } else {
2420 deferred_ops.push(op);
2421 }
2422 }
2423 self.deferred_ops.insert(deferred_ops);
2424 }
2425
2426 pub fn has_deferred_ops(&self) -> bool {
2427 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2428 }
2429
2430 fn can_apply_op(&self, operation: &Operation) -> bool {
2431 match operation {
2432 Operation::Buffer(_) => {
2433 unreachable!("buffer operations should never be applied at this layer")
2434 }
2435 Operation::UpdateDiagnostics {
2436 diagnostics: diagnostic_set,
2437 ..
2438 } => diagnostic_set.iter().all(|diagnostic| {
2439 self.text.can_resolve(&diagnostic.range.start)
2440 && self.text.can_resolve(&diagnostic.range.end)
2441 }),
2442 Operation::UpdateSelections { selections, .. } => selections
2443 .iter()
2444 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2445 Operation::UpdateCompletionTriggers { .. } => true,
2446 }
2447 }
2448
2449 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2450 match operation {
2451 Operation::Buffer(_) => {
2452 unreachable!("buffer operations should never be applied at this layer")
2453 }
2454 Operation::UpdateDiagnostics {
2455 server_id,
2456 diagnostics: diagnostic_set,
2457 lamport_timestamp,
2458 } => {
2459 let snapshot = self.snapshot();
2460 self.apply_diagnostic_update(
2461 server_id,
2462 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2463 lamport_timestamp,
2464 cx,
2465 );
2466 }
2467 Operation::UpdateSelections {
2468 selections,
2469 lamport_timestamp,
2470 line_mode,
2471 cursor_shape,
2472 } => {
2473 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2474 if set.lamport_timestamp > lamport_timestamp {
2475 return;
2476 }
2477 }
2478
2479 self.remote_selections.insert(
2480 lamport_timestamp.replica_id,
2481 SelectionSet {
2482 selections,
2483 lamport_timestamp,
2484 line_mode,
2485 cursor_shape,
2486 },
2487 );
2488 self.text.lamport_clock.observe(lamport_timestamp);
2489 self.non_text_state_update_count += 1;
2490 }
2491 Operation::UpdateCompletionTriggers {
2492 triggers,
2493 lamport_timestamp,
2494 server_id,
2495 } => {
2496 if triggers.is_empty() {
2497 self.completion_triggers_per_language_server
2498 .remove(&server_id);
2499 self.completion_triggers = self
2500 .completion_triggers_per_language_server
2501 .values()
2502 .flat_map(|triggers| triggers.into_iter().cloned())
2503 .collect();
2504 } else {
2505 self.completion_triggers_per_language_server
2506 .insert(server_id, triggers.iter().cloned().collect());
2507 self.completion_triggers.extend(triggers);
2508 }
2509 self.text.lamport_clock.observe(lamport_timestamp);
2510 }
2511 }
2512 }
2513
2514 fn apply_diagnostic_update(
2515 &mut self,
2516 server_id: LanguageServerId,
2517 diagnostics: DiagnosticSet,
2518 lamport_timestamp: clock::Lamport,
2519 cx: &mut Context<Self>,
2520 ) {
2521 if lamport_timestamp > self.diagnostics_timestamp {
2522 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2523 if diagnostics.is_empty() {
2524 if let Ok(ix) = ix {
2525 self.diagnostics.remove(ix);
2526 }
2527 } else {
2528 match ix {
2529 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2530 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2531 };
2532 }
2533 self.diagnostics_timestamp = lamport_timestamp;
2534 self.non_text_state_update_count += 1;
2535 self.text.lamport_clock.observe(lamport_timestamp);
2536 cx.notify();
2537 cx.emit(BufferEvent::DiagnosticsUpdated);
2538 }
2539 }
2540
2541 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2542 self.was_changed();
2543 cx.emit(BufferEvent::Operation {
2544 operation,
2545 is_local,
2546 });
2547 }
2548
2549 /// Removes the selections for a given peer.
2550 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2551 self.remote_selections.remove(&replica_id);
2552 cx.notify();
2553 }
2554
2555 /// Undoes the most recent transaction.
2556 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2557 let was_dirty = self.is_dirty();
2558 let old_version = self.version.clone();
2559
2560 if let Some((transaction_id, operation)) = self.text.undo() {
2561 self.send_operation(Operation::Buffer(operation), true, cx);
2562 self.did_edit(&old_version, was_dirty, cx);
2563 Some(transaction_id)
2564 } else {
2565 None
2566 }
2567 }
2568
2569 /// Manually undoes a specific transaction in the buffer's undo history.
2570 pub fn undo_transaction(
2571 &mut self,
2572 transaction_id: TransactionId,
2573 cx: &mut Context<Self>,
2574 ) -> bool {
2575 let was_dirty = self.is_dirty();
2576 let old_version = self.version.clone();
2577 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2578 self.send_operation(Operation::Buffer(operation), true, cx);
2579 self.did_edit(&old_version, was_dirty, cx);
2580 true
2581 } else {
2582 false
2583 }
2584 }
2585
2586 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2587 pub fn undo_to_transaction(
2588 &mut self,
2589 transaction_id: TransactionId,
2590 cx: &mut Context<Self>,
2591 ) -> bool {
2592 let was_dirty = self.is_dirty();
2593 let old_version = self.version.clone();
2594
2595 let operations = self.text.undo_to_transaction(transaction_id);
2596 let undone = !operations.is_empty();
2597 for operation in operations {
2598 self.send_operation(Operation::Buffer(operation), true, cx);
2599 }
2600 if undone {
2601 self.did_edit(&old_version, was_dirty, cx)
2602 }
2603 undone
2604 }
2605
2606 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2607 let was_dirty = self.is_dirty();
2608 let operation = self.text.undo_operations(counts);
2609 let old_version = self.version.clone();
2610 self.send_operation(Operation::Buffer(operation), true, cx);
2611 self.did_edit(&old_version, was_dirty, cx);
2612 }
2613
2614 /// Manually redoes a specific transaction in the buffer's redo history.
2615 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2616 let was_dirty = self.is_dirty();
2617 let old_version = self.version.clone();
2618
2619 if let Some((transaction_id, operation)) = self.text.redo() {
2620 self.send_operation(Operation::Buffer(operation), true, cx);
2621 self.did_edit(&old_version, was_dirty, cx);
2622 Some(transaction_id)
2623 } else {
2624 None
2625 }
2626 }
2627
2628 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2629 pub fn redo_to_transaction(
2630 &mut self,
2631 transaction_id: TransactionId,
2632 cx: &mut Context<Self>,
2633 ) -> bool {
2634 let was_dirty = self.is_dirty();
2635 let old_version = self.version.clone();
2636
2637 let operations = self.text.redo_to_transaction(transaction_id);
2638 let redone = !operations.is_empty();
2639 for operation in operations {
2640 self.send_operation(Operation::Buffer(operation), true, cx);
2641 }
2642 if redone {
2643 self.did_edit(&old_version, was_dirty, cx)
2644 }
2645 redone
2646 }
2647
2648 /// Override current completion triggers with the user-provided completion triggers.
2649 pub fn set_completion_triggers(
2650 &mut self,
2651 server_id: LanguageServerId,
2652 triggers: BTreeSet<String>,
2653 cx: &mut Context<Self>,
2654 ) {
2655 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2656 if triggers.is_empty() {
2657 self.completion_triggers_per_language_server
2658 .remove(&server_id);
2659 self.completion_triggers = self
2660 .completion_triggers_per_language_server
2661 .values()
2662 .flat_map(|triggers| triggers.into_iter().cloned())
2663 .collect();
2664 } else {
2665 self.completion_triggers_per_language_server
2666 .insert(server_id, triggers.clone());
2667 self.completion_triggers.extend(triggers.iter().cloned());
2668 }
2669 self.send_operation(
2670 Operation::UpdateCompletionTriggers {
2671 triggers: triggers.iter().cloned().collect(),
2672 lamport_timestamp: self.completion_triggers_timestamp,
2673 server_id,
2674 },
2675 true,
2676 cx,
2677 );
2678 cx.notify();
2679 }
2680
2681 /// Returns a list of strings which trigger a completion menu for this language.
2682 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2683 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2684 &self.completion_triggers
2685 }
2686
2687 /// Call this directly after performing edits to prevent the preview tab
2688 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2689 /// to return false until there are additional edits.
2690 pub fn refresh_preview(&mut self) {
2691 self.preview_version = self.version.clone();
2692 }
2693
2694 /// Whether we should preserve the preview status of a tab containing this buffer.
2695 pub fn preserve_preview(&self) -> bool {
2696 !self.has_edits_since(&self.preview_version)
2697 }
2698}
2699
2700#[doc(hidden)]
2701#[cfg(any(test, feature = "test-support"))]
2702impl Buffer {
2703 pub fn edit_via_marked_text(
2704 &mut self,
2705 marked_string: &str,
2706 autoindent_mode: Option<AutoindentMode>,
2707 cx: &mut Context<Self>,
2708 ) {
2709 let edits = self.edits_for_marked_text(marked_string);
2710 self.edit(edits, autoindent_mode, cx);
2711 }
2712
2713 pub fn set_group_interval(&mut self, group_interval: Duration) {
2714 self.text.set_group_interval(group_interval);
2715 }
2716
2717 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2718 where
2719 T: rand::Rng,
2720 {
2721 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2722 let mut last_end = None;
2723 for _ in 0..old_range_count {
2724 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2725 break;
2726 }
2727
2728 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2729 let mut range = self.random_byte_range(new_start, rng);
2730 if rng.gen_bool(0.2) {
2731 mem::swap(&mut range.start, &mut range.end);
2732 }
2733 last_end = Some(range.end);
2734
2735 let new_text_len = rng.gen_range(0..10);
2736 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2737 new_text = new_text.to_uppercase();
2738
2739 edits.push((range, new_text));
2740 }
2741 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2742 self.edit(edits, None, cx);
2743 }
2744
2745 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2746 let was_dirty = self.is_dirty();
2747 let old_version = self.version.clone();
2748
2749 let ops = self.text.randomly_undo_redo(rng);
2750 if !ops.is_empty() {
2751 for op in ops {
2752 self.send_operation(Operation::Buffer(op), true, cx);
2753 self.did_edit(&old_version, was_dirty, cx);
2754 }
2755 }
2756 }
2757}
2758
2759impl EventEmitter<BufferEvent> for Buffer {}
2760
2761impl Deref for Buffer {
2762 type Target = TextBuffer;
2763
2764 fn deref(&self) -> &Self::Target {
2765 &self.text
2766 }
2767}
2768
2769impl BufferSnapshot {
2770 /// Returns [`IndentSize`] for a given line that respects user settings and
2771 /// language preferences.
2772 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2773 indent_size_for_line(self, row)
2774 }
2775
2776 /// Returns [`IndentSize`] for a given position that respects user settings
2777 /// and language preferences.
2778 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2779 let settings = language_settings(
2780 self.language_at(position).map(|l| l.name()),
2781 self.file(),
2782 cx,
2783 );
2784 if settings.hard_tabs {
2785 IndentSize::tab()
2786 } else {
2787 IndentSize::spaces(settings.tab_size.get())
2788 }
2789 }
2790
2791 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2792 /// is passed in as `single_indent_size`.
2793 pub fn suggested_indents(
2794 &self,
2795 rows: impl Iterator<Item = u32>,
2796 single_indent_size: IndentSize,
2797 ) -> BTreeMap<u32, IndentSize> {
2798 let mut result = BTreeMap::new();
2799
2800 for row_range in contiguous_ranges(rows, 10) {
2801 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2802 Some(suggestions) => suggestions,
2803 _ => break,
2804 };
2805
2806 for (row, suggestion) in row_range.zip(suggestions) {
2807 let indent_size = if let Some(suggestion) = suggestion {
2808 result
2809 .get(&suggestion.basis_row)
2810 .copied()
2811 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2812 .with_delta(suggestion.delta, single_indent_size)
2813 } else {
2814 self.indent_size_for_line(row)
2815 };
2816
2817 result.insert(row, indent_size);
2818 }
2819 }
2820
2821 result
2822 }
2823
2824 fn suggest_autoindents(
2825 &self,
2826 row_range: Range<u32>,
2827 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2828 let config = &self.language.as_ref()?.config;
2829 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2830
2831 // Find the suggested indentation ranges based on the syntax tree.
2832 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2833 let end = Point::new(row_range.end, 0);
2834 let range = (start..end).to_offset(&self.text);
2835 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2836 Some(&grammar.indents_config.as_ref()?.query)
2837 });
2838 let indent_configs = matches
2839 .grammars()
2840 .iter()
2841 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2842 .collect::<Vec<_>>();
2843
2844 let mut indent_ranges = Vec::<Range<Point>>::new();
2845 let mut outdent_positions = Vec::<Point>::new();
2846 while let Some(mat) = matches.peek() {
2847 let mut start: Option<Point> = None;
2848 let mut end: Option<Point> = None;
2849
2850 let config = &indent_configs[mat.grammar_index];
2851 for capture in mat.captures {
2852 if capture.index == config.indent_capture_ix {
2853 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2854 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2855 } else if Some(capture.index) == config.start_capture_ix {
2856 start = Some(Point::from_ts_point(capture.node.end_position()));
2857 } else if Some(capture.index) == config.end_capture_ix {
2858 end = Some(Point::from_ts_point(capture.node.start_position()));
2859 } else if Some(capture.index) == config.outdent_capture_ix {
2860 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2861 }
2862 }
2863
2864 matches.advance();
2865 if let Some((start, end)) = start.zip(end) {
2866 if start.row == end.row {
2867 continue;
2868 }
2869
2870 let range = start..end;
2871 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2872 Err(ix) => indent_ranges.insert(ix, range),
2873 Ok(ix) => {
2874 let prev_range = &mut indent_ranges[ix];
2875 prev_range.end = prev_range.end.max(range.end);
2876 }
2877 }
2878 }
2879 }
2880
2881 let mut error_ranges = Vec::<Range<Point>>::new();
2882 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2883 grammar.error_query.as_ref()
2884 });
2885 while let Some(mat) = matches.peek() {
2886 let node = mat.captures[0].node;
2887 let start = Point::from_ts_point(node.start_position());
2888 let end = Point::from_ts_point(node.end_position());
2889 let range = start..end;
2890 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2891 Ok(ix) | Err(ix) => ix,
2892 };
2893 let mut end_ix = ix;
2894 while let Some(existing_range) = error_ranges.get(end_ix) {
2895 if existing_range.end < end {
2896 end_ix += 1;
2897 } else {
2898 break;
2899 }
2900 }
2901 error_ranges.splice(ix..end_ix, [range]);
2902 matches.advance();
2903 }
2904
2905 outdent_positions.sort();
2906 for outdent_position in outdent_positions {
2907 // find the innermost indent range containing this outdent_position
2908 // set its end to the outdent position
2909 if let Some(range_to_truncate) = indent_ranges
2910 .iter_mut()
2911 .filter(|indent_range| indent_range.contains(&outdent_position))
2912 .next_back()
2913 {
2914 range_to_truncate.end = outdent_position;
2915 }
2916 }
2917
2918 // Find the suggested indentation increases and decreased based on regexes.
2919 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2920 self.for_each_line(
2921 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2922 ..Point::new(row_range.end, 0),
2923 |row, line| {
2924 if config
2925 .decrease_indent_pattern
2926 .as_ref()
2927 .map_or(false, |regex| regex.is_match(line))
2928 {
2929 indent_change_rows.push((row, Ordering::Less));
2930 }
2931 if config
2932 .increase_indent_pattern
2933 .as_ref()
2934 .map_or(false, |regex| regex.is_match(line))
2935 {
2936 indent_change_rows.push((row + 1, Ordering::Greater));
2937 }
2938 },
2939 );
2940
2941 let mut indent_changes = indent_change_rows.into_iter().peekable();
2942 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2943 prev_non_blank_row.unwrap_or(0)
2944 } else {
2945 row_range.start.saturating_sub(1)
2946 };
2947 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2948 Some(row_range.map(move |row| {
2949 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2950
2951 let mut indent_from_prev_row = false;
2952 let mut outdent_from_prev_row = false;
2953 let mut outdent_to_row = u32::MAX;
2954 let mut from_regex = false;
2955
2956 while let Some((indent_row, delta)) = indent_changes.peek() {
2957 match indent_row.cmp(&row) {
2958 Ordering::Equal => match delta {
2959 Ordering::Less => {
2960 from_regex = true;
2961 outdent_from_prev_row = true
2962 }
2963 Ordering::Greater => {
2964 indent_from_prev_row = true;
2965 from_regex = true
2966 }
2967 _ => {}
2968 },
2969
2970 Ordering::Greater => break,
2971 Ordering::Less => {}
2972 }
2973
2974 indent_changes.next();
2975 }
2976
2977 for range in &indent_ranges {
2978 if range.start.row >= row {
2979 break;
2980 }
2981 if range.start.row == prev_row && range.end > row_start {
2982 indent_from_prev_row = true;
2983 }
2984 if range.end > prev_row_start && range.end <= row_start {
2985 outdent_to_row = outdent_to_row.min(range.start.row);
2986 }
2987 }
2988
2989 let within_error = error_ranges
2990 .iter()
2991 .any(|e| e.start.row < row && e.end > row_start);
2992
2993 let suggestion = if outdent_to_row == prev_row
2994 || (outdent_from_prev_row && indent_from_prev_row)
2995 {
2996 Some(IndentSuggestion {
2997 basis_row: prev_row,
2998 delta: Ordering::Equal,
2999 within_error: within_error && !from_regex,
3000 })
3001 } else if indent_from_prev_row {
3002 Some(IndentSuggestion {
3003 basis_row: prev_row,
3004 delta: Ordering::Greater,
3005 within_error: within_error && !from_regex,
3006 })
3007 } else if outdent_to_row < prev_row {
3008 Some(IndentSuggestion {
3009 basis_row: outdent_to_row,
3010 delta: Ordering::Equal,
3011 within_error: within_error && !from_regex,
3012 })
3013 } else if outdent_from_prev_row {
3014 Some(IndentSuggestion {
3015 basis_row: prev_row,
3016 delta: Ordering::Less,
3017 within_error: within_error && !from_regex,
3018 })
3019 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3020 {
3021 Some(IndentSuggestion {
3022 basis_row: prev_row,
3023 delta: Ordering::Equal,
3024 within_error: within_error && !from_regex,
3025 })
3026 } else {
3027 None
3028 };
3029
3030 prev_row = row;
3031 prev_row_start = row_start;
3032 suggestion
3033 }))
3034 }
3035
3036 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3037 while row > 0 {
3038 row -= 1;
3039 if !self.is_line_blank(row) {
3040 return Some(row);
3041 }
3042 }
3043 None
3044 }
3045
3046 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3047 let captures = self.syntax.captures(range, &self.text, |grammar| {
3048 grammar.highlights_query.as_ref()
3049 });
3050 let highlight_maps = captures
3051 .grammars()
3052 .iter()
3053 .map(|grammar| grammar.highlight_map())
3054 .collect();
3055 (captures, highlight_maps)
3056 }
3057
3058 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3059 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3060 /// returned in chunks where each chunk has a single syntax highlighting style and
3061 /// diagnostic status.
3062 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3063 let range = range.start.to_offset(self)..range.end.to_offset(self);
3064
3065 let mut syntax = None;
3066 if language_aware {
3067 syntax = Some(self.get_highlights(range.clone()));
3068 }
3069 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3070 let diagnostics = language_aware;
3071 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3072 }
3073
3074 pub fn highlighted_text_for_range<T: ToOffset>(
3075 &self,
3076 range: Range<T>,
3077 override_style: Option<HighlightStyle>,
3078 syntax_theme: &SyntaxTheme,
3079 ) -> HighlightedText {
3080 HighlightedText::from_buffer_range(
3081 range,
3082 &self.text,
3083 &self.syntax,
3084 override_style,
3085 syntax_theme,
3086 )
3087 }
3088
3089 /// Invokes the given callback for each line of text in the given range of the buffer.
3090 /// Uses callback to avoid allocating a string for each line.
3091 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3092 let mut line = String::new();
3093 let mut row = range.start.row;
3094 for chunk in self
3095 .as_rope()
3096 .chunks_in_range(range.to_offset(self))
3097 .chain(["\n"])
3098 {
3099 for (newline_ix, text) in chunk.split('\n').enumerate() {
3100 if newline_ix > 0 {
3101 callback(row, &line);
3102 row += 1;
3103 line.clear();
3104 }
3105 line.push_str(text);
3106 }
3107 }
3108 }
3109
3110 /// Iterates over every [`SyntaxLayer`] in the buffer.
3111 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3112 self.syntax
3113 .layers_for_range(0..self.len(), &self.text, true)
3114 }
3115
3116 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3117 let offset = position.to_offset(self);
3118 self.syntax
3119 .layers_for_range(offset..offset, &self.text, false)
3120 .filter(|l| l.node().end_byte() > offset)
3121 .last()
3122 }
3123
3124 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3125 &self,
3126 range: Range<D>,
3127 ) -> Option<SyntaxLayer> {
3128 let range = range.to_offset(self);
3129 return self
3130 .syntax
3131 .layers_for_range(range, &self.text, false)
3132 .max_by(|a, b| {
3133 if a.depth != b.depth {
3134 a.depth.cmp(&b.depth)
3135 } else if a.offset.0 != b.offset.0 {
3136 a.offset.0.cmp(&b.offset.0)
3137 } else {
3138 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3139 }
3140 });
3141 }
3142
3143 /// Returns the main [`Language`].
3144 pub fn language(&self) -> Option<&Arc<Language>> {
3145 self.language.as_ref()
3146 }
3147
3148 /// Returns the [`Language`] at the given location.
3149 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3150 self.syntax_layer_at(position)
3151 .map(|info| info.language)
3152 .or(self.language.as_ref())
3153 }
3154
3155 /// Returns the settings for the language at the given location.
3156 pub fn settings_at<'a, D: ToOffset>(
3157 &'a self,
3158 position: D,
3159 cx: &'a App,
3160 ) -> Cow<'a, LanguageSettings> {
3161 language_settings(
3162 self.language_at(position).map(|l| l.name()),
3163 self.file.as_ref(),
3164 cx,
3165 )
3166 }
3167
3168 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3169 CharClassifier::new(self.language_scope_at(point))
3170 }
3171
3172 /// Returns the [`LanguageScope`] at the given location.
3173 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3174 let offset = position.to_offset(self);
3175 let mut scope = None;
3176 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3177
3178 // Use the layer that has the smallest node intersecting the given point.
3179 for layer in self
3180 .syntax
3181 .layers_for_range(offset..offset, &self.text, false)
3182 {
3183 let mut cursor = layer.node().walk();
3184
3185 let mut range = None;
3186 loop {
3187 let child_range = cursor.node().byte_range();
3188 if !child_range.contains(&offset) {
3189 break;
3190 }
3191
3192 range = Some(child_range);
3193 if cursor.goto_first_child_for_byte(offset).is_none() {
3194 break;
3195 }
3196 }
3197
3198 if let Some(range) = range {
3199 if smallest_range_and_depth.as_ref().map_or(
3200 true,
3201 |(smallest_range, smallest_range_depth)| {
3202 if layer.depth > *smallest_range_depth {
3203 true
3204 } else if layer.depth == *smallest_range_depth {
3205 range.len() < smallest_range.len()
3206 } else {
3207 false
3208 }
3209 },
3210 ) {
3211 smallest_range_and_depth = Some((range, layer.depth));
3212 scope = Some(LanguageScope {
3213 language: layer.language.clone(),
3214 override_id: layer.override_id(offset, &self.text),
3215 });
3216 }
3217 }
3218 }
3219
3220 scope.or_else(|| {
3221 self.language.clone().map(|language| LanguageScope {
3222 language,
3223 override_id: None,
3224 })
3225 })
3226 }
3227
3228 /// Returns a tuple of the range and character kind of the word
3229 /// surrounding the given position.
3230 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3231 let mut start = start.to_offset(self);
3232 let mut end = start;
3233 let mut next_chars = self.chars_at(start).peekable();
3234 let mut prev_chars = self.reversed_chars_at(start).peekable();
3235
3236 let classifier = self.char_classifier_at(start);
3237 let word_kind = cmp::max(
3238 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3239 next_chars.peek().copied().map(|c| classifier.kind(c)),
3240 );
3241
3242 for ch in prev_chars {
3243 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3244 start -= ch.len_utf8();
3245 } else {
3246 break;
3247 }
3248 }
3249
3250 for ch in next_chars {
3251 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3252 end += ch.len_utf8();
3253 } else {
3254 break;
3255 }
3256 }
3257
3258 (start..end, word_kind)
3259 }
3260
3261 /// Returns the closest syntax node enclosing the given range.
3262 pub fn syntax_ancestor<'a, T: ToOffset>(
3263 &'a self,
3264 range: Range<T>,
3265 ) -> Option<tree_sitter::Node<'a>> {
3266 let range = range.start.to_offset(self)..range.end.to_offset(self);
3267 let mut result: Option<tree_sitter::Node<'a>> = None;
3268 'outer: for layer in self
3269 .syntax
3270 .layers_for_range(range.clone(), &self.text, true)
3271 {
3272 let mut cursor = layer.node().walk();
3273
3274 // Descend to the first leaf that touches the start of the range,
3275 // and if the range is non-empty, extends beyond the start.
3276 while cursor.goto_first_child_for_byte(range.start).is_some() {
3277 if !range.is_empty() && cursor.node().end_byte() == range.start {
3278 cursor.goto_next_sibling();
3279 }
3280 }
3281
3282 // Ascend to the smallest ancestor that strictly contains the range.
3283 loop {
3284 let node_range = cursor.node().byte_range();
3285 if node_range.start <= range.start
3286 && node_range.end >= range.end
3287 && node_range.len() > range.len()
3288 {
3289 break;
3290 }
3291 if !cursor.goto_parent() {
3292 continue 'outer;
3293 }
3294 }
3295
3296 let left_node = cursor.node();
3297 let mut layer_result = left_node;
3298
3299 // For an empty range, try to find another node immediately to the right of the range.
3300 if left_node.end_byte() == range.start {
3301 let mut right_node = None;
3302 while !cursor.goto_next_sibling() {
3303 if !cursor.goto_parent() {
3304 break;
3305 }
3306 }
3307
3308 while cursor.node().start_byte() == range.start {
3309 right_node = Some(cursor.node());
3310 if !cursor.goto_first_child() {
3311 break;
3312 }
3313 }
3314
3315 // If there is a candidate node on both sides of the (empty) range, then
3316 // decide between the two by favoring a named node over an anonymous token.
3317 // If both nodes are the same in that regard, favor the right one.
3318 if let Some(right_node) = right_node {
3319 if right_node.is_named() || !left_node.is_named() {
3320 layer_result = right_node;
3321 }
3322 }
3323 }
3324
3325 if let Some(previous_result) = &result {
3326 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3327 continue;
3328 }
3329 }
3330 result = Some(layer_result);
3331 }
3332
3333 result
3334 }
3335
3336 /// Returns the outline for the buffer.
3337 ///
3338 /// This method allows passing an optional [`SyntaxTheme`] to
3339 /// syntax-highlight the returned symbols.
3340 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3341 self.outline_items_containing(0..self.len(), true, theme)
3342 .map(Outline::new)
3343 }
3344
3345 /// Returns all the symbols that contain the given position.
3346 ///
3347 /// This method allows passing an optional [`SyntaxTheme`] to
3348 /// syntax-highlight the returned symbols.
3349 pub fn symbols_containing<T: ToOffset>(
3350 &self,
3351 position: T,
3352 theme: Option<&SyntaxTheme>,
3353 ) -> Option<Vec<OutlineItem<Anchor>>> {
3354 let position = position.to_offset(self);
3355 let mut items = self.outline_items_containing(
3356 position.saturating_sub(1)..self.len().min(position + 1),
3357 false,
3358 theme,
3359 )?;
3360 let mut prev_depth = None;
3361 items.retain(|item| {
3362 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3363 prev_depth = Some(item.depth);
3364 result
3365 });
3366 Some(items)
3367 }
3368
3369 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3370 let range = range.to_offset(self);
3371 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3372 grammar.outline_config.as_ref().map(|c| &c.query)
3373 });
3374 let configs = matches
3375 .grammars()
3376 .iter()
3377 .map(|g| g.outline_config.as_ref().unwrap())
3378 .collect::<Vec<_>>();
3379
3380 while let Some(mat) = matches.peek() {
3381 let config = &configs[mat.grammar_index];
3382 let containing_item_node = maybe!({
3383 let item_node = mat.captures.iter().find_map(|cap| {
3384 if cap.index == config.item_capture_ix {
3385 Some(cap.node)
3386 } else {
3387 None
3388 }
3389 })?;
3390
3391 let item_byte_range = item_node.byte_range();
3392 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3393 None
3394 } else {
3395 Some(item_node)
3396 }
3397 });
3398
3399 if let Some(item_node) = containing_item_node {
3400 return Some(
3401 Point::from_ts_point(item_node.start_position())
3402 ..Point::from_ts_point(item_node.end_position()),
3403 );
3404 }
3405
3406 matches.advance();
3407 }
3408 None
3409 }
3410
3411 pub fn outline_items_containing<T: ToOffset>(
3412 &self,
3413 range: Range<T>,
3414 include_extra_context: bool,
3415 theme: Option<&SyntaxTheme>,
3416 ) -> Option<Vec<OutlineItem<Anchor>>> {
3417 let range = range.to_offset(self);
3418 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3419 grammar.outline_config.as_ref().map(|c| &c.query)
3420 });
3421 let configs = matches
3422 .grammars()
3423 .iter()
3424 .map(|g| g.outline_config.as_ref().unwrap())
3425 .collect::<Vec<_>>();
3426
3427 let mut items = Vec::new();
3428 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3429 while let Some(mat) = matches.peek() {
3430 let config = &configs[mat.grammar_index];
3431 if let Some(item) =
3432 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3433 {
3434 items.push(item);
3435 } else if let Some(capture) = mat
3436 .captures
3437 .iter()
3438 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3439 {
3440 let capture_range = capture.node.start_position()..capture.node.end_position();
3441 let mut capture_row_range =
3442 capture_range.start.row as u32..capture_range.end.row as u32;
3443 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3444 {
3445 capture_row_range.end -= 1;
3446 }
3447 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3448 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3449 last_row_range.end = capture_row_range.end;
3450 } else {
3451 annotation_row_ranges.push(capture_row_range);
3452 }
3453 } else {
3454 annotation_row_ranges.push(capture_row_range);
3455 }
3456 }
3457 matches.advance();
3458 }
3459
3460 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3461
3462 // Assign depths based on containment relationships and convert to anchors.
3463 let mut item_ends_stack = Vec::<Point>::new();
3464 let mut anchor_items = Vec::new();
3465 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3466 for item in items {
3467 while let Some(last_end) = item_ends_stack.last().copied() {
3468 if last_end < item.range.end {
3469 item_ends_stack.pop();
3470 } else {
3471 break;
3472 }
3473 }
3474
3475 let mut annotation_row_range = None;
3476 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3477 let row_preceding_item = item.range.start.row.saturating_sub(1);
3478 if next_annotation_row_range.end < row_preceding_item {
3479 annotation_row_ranges.next();
3480 } else {
3481 if next_annotation_row_range.end == row_preceding_item {
3482 annotation_row_range = Some(next_annotation_row_range.clone());
3483 annotation_row_ranges.next();
3484 }
3485 break;
3486 }
3487 }
3488
3489 anchor_items.push(OutlineItem {
3490 depth: item_ends_stack.len(),
3491 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3492 text: item.text,
3493 highlight_ranges: item.highlight_ranges,
3494 name_ranges: item.name_ranges,
3495 body_range: item.body_range.map(|body_range| {
3496 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3497 }),
3498 annotation_range: annotation_row_range.map(|annotation_range| {
3499 self.anchor_after(Point::new(annotation_range.start, 0))
3500 ..self.anchor_before(Point::new(
3501 annotation_range.end,
3502 self.line_len(annotation_range.end),
3503 ))
3504 }),
3505 });
3506 item_ends_stack.push(item.range.end);
3507 }
3508
3509 Some(anchor_items)
3510 }
3511
3512 fn next_outline_item(
3513 &self,
3514 config: &OutlineConfig,
3515 mat: &SyntaxMapMatch,
3516 range: &Range<usize>,
3517 include_extra_context: bool,
3518 theme: Option<&SyntaxTheme>,
3519 ) -> Option<OutlineItem<Point>> {
3520 let item_node = mat.captures.iter().find_map(|cap| {
3521 if cap.index == config.item_capture_ix {
3522 Some(cap.node)
3523 } else {
3524 None
3525 }
3526 })?;
3527
3528 let item_byte_range = item_node.byte_range();
3529 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3530 return None;
3531 }
3532 let item_point_range = Point::from_ts_point(item_node.start_position())
3533 ..Point::from_ts_point(item_node.end_position());
3534
3535 let mut open_point = None;
3536 let mut close_point = None;
3537 let mut buffer_ranges = Vec::new();
3538 for capture in mat.captures {
3539 let node_is_name;
3540 if capture.index == config.name_capture_ix {
3541 node_is_name = true;
3542 } else if Some(capture.index) == config.context_capture_ix
3543 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3544 {
3545 node_is_name = false;
3546 } else {
3547 if Some(capture.index) == config.open_capture_ix {
3548 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3549 } else if Some(capture.index) == config.close_capture_ix {
3550 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3551 }
3552
3553 continue;
3554 }
3555
3556 let mut range = capture.node.start_byte()..capture.node.end_byte();
3557 let start = capture.node.start_position();
3558 if capture.node.end_position().row > start.row {
3559 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3560 }
3561
3562 if !range.is_empty() {
3563 buffer_ranges.push((range, node_is_name));
3564 }
3565 }
3566 if buffer_ranges.is_empty() {
3567 return None;
3568 }
3569 let mut text = String::new();
3570 let mut highlight_ranges = Vec::new();
3571 let mut name_ranges = Vec::new();
3572 let mut chunks = self.chunks(
3573 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3574 true,
3575 );
3576 let mut last_buffer_range_end = 0;
3577
3578 for (buffer_range, is_name) in buffer_ranges {
3579 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3580 if space_added {
3581 text.push(' ');
3582 }
3583 let before_append_len = text.len();
3584 let mut offset = buffer_range.start;
3585 chunks.seek(buffer_range.clone());
3586 for mut chunk in chunks.by_ref() {
3587 if chunk.text.len() > buffer_range.end - offset {
3588 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3589 offset = buffer_range.end;
3590 } else {
3591 offset += chunk.text.len();
3592 }
3593 let style = chunk
3594 .syntax_highlight_id
3595 .zip(theme)
3596 .and_then(|(highlight, theme)| highlight.style(theme));
3597 if let Some(style) = style {
3598 let start = text.len();
3599 let end = start + chunk.text.len();
3600 highlight_ranges.push((start..end, style));
3601 }
3602 text.push_str(chunk.text);
3603 if offset >= buffer_range.end {
3604 break;
3605 }
3606 }
3607 if is_name {
3608 let after_append_len = text.len();
3609 let start = if space_added && !name_ranges.is_empty() {
3610 before_append_len - 1
3611 } else {
3612 before_append_len
3613 };
3614 name_ranges.push(start..after_append_len);
3615 }
3616 last_buffer_range_end = buffer_range.end;
3617 }
3618
3619 Some(OutlineItem {
3620 depth: 0, // We'll calculate the depth later
3621 range: item_point_range,
3622 text,
3623 highlight_ranges,
3624 name_ranges,
3625 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3626 annotation_range: None,
3627 })
3628 }
3629
3630 pub fn function_body_fold_ranges<T: ToOffset>(
3631 &self,
3632 within: Range<T>,
3633 ) -> impl Iterator<Item = Range<usize>> + '_ {
3634 self.text_object_ranges(within, TreeSitterOptions::default())
3635 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3636 }
3637
3638 /// For each grammar in the language, runs the provided
3639 /// [`tree_sitter::Query`] against the given range.
3640 pub fn matches(
3641 &self,
3642 range: Range<usize>,
3643 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3644 ) -> SyntaxMapMatches {
3645 self.syntax.matches(range, self, query)
3646 }
3647
3648 pub fn all_bracket_ranges(
3649 &self,
3650 range: Range<usize>,
3651 ) -> impl Iterator<Item = BracketMatch> + '_ {
3652 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3653 grammar.brackets_config.as_ref().map(|c| &c.query)
3654 });
3655 let configs = matches
3656 .grammars()
3657 .iter()
3658 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3659 .collect::<Vec<_>>();
3660
3661 iter::from_fn(move || {
3662 while let Some(mat) = matches.peek() {
3663 let mut open = None;
3664 let mut close = None;
3665 let config = &configs[mat.grammar_index];
3666 let pattern = &config.patterns[mat.pattern_index];
3667 for capture in mat.captures {
3668 if capture.index == config.open_capture_ix {
3669 open = Some(capture.node.byte_range());
3670 } else if capture.index == config.close_capture_ix {
3671 close = Some(capture.node.byte_range());
3672 }
3673 }
3674
3675 matches.advance();
3676
3677 let Some((open_range, close_range)) = open.zip(close) else {
3678 continue;
3679 };
3680
3681 let bracket_range = open_range.start..=close_range.end;
3682 if !bracket_range.overlaps(&range) {
3683 continue;
3684 }
3685
3686 return Some(BracketMatch {
3687 open_range,
3688 close_range,
3689 newline_only: pattern.newline_only,
3690 });
3691 }
3692 None
3693 })
3694 }
3695
3696 /// Returns bracket range pairs overlapping or adjacent to `range`
3697 pub fn bracket_ranges<T: ToOffset>(
3698 &self,
3699 range: Range<T>,
3700 ) -> impl Iterator<Item = BracketMatch> + '_ {
3701 // Find bracket pairs that *inclusively* contain the given range.
3702 let range = range.start.to_offset(self).saturating_sub(1)
3703 ..self.len().min(range.end.to_offset(self) + 1);
3704 self.all_bracket_ranges(range)
3705 .filter(|pair| !pair.newline_only)
3706 }
3707
3708 pub fn text_object_ranges<T: ToOffset>(
3709 &self,
3710 range: Range<T>,
3711 options: TreeSitterOptions,
3712 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3713 let range = range.start.to_offset(self).saturating_sub(1)
3714 ..self.len().min(range.end.to_offset(self) + 1);
3715
3716 let mut matches =
3717 self.syntax
3718 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3719 grammar.text_object_config.as_ref().map(|c| &c.query)
3720 });
3721
3722 let configs = matches
3723 .grammars()
3724 .iter()
3725 .map(|grammar| grammar.text_object_config.as_ref())
3726 .collect::<Vec<_>>();
3727
3728 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3729
3730 iter::from_fn(move || {
3731 loop {
3732 while let Some(capture) = captures.pop() {
3733 if capture.0.overlaps(&range) {
3734 return Some(capture);
3735 }
3736 }
3737
3738 let mat = matches.peek()?;
3739
3740 let Some(config) = configs[mat.grammar_index].as_ref() else {
3741 matches.advance();
3742 continue;
3743 };
3744
3745 for capture in mat.captures {
3746 let Some(ix) = config
3747 .text_objects_by_capture_ix
3748 .binary_search_by_key(&capture.index, |e| e.0)
3749 .ok()
3750 else {
3751 continue;
3752 };
3753 let text_object = config.text_objects_by_capture_ix[ix].1;
3754 let byte_range = capture.node.byte_range();
3755
3756 let mut found = false;
3757 for (range, existing) in captures.iter_mut() {
3758 if existing == &text_object {
3759 range.start = range.start.min(byte_range.start);
3760 range.end = range.end.max(byte_range.end);
3761 found = true;
3762 break;
3763 }
3764 }
3765
3766 if !found {
3767 captures.push((byte_range, text_object));
3768 }
3769 }
3770
3771 matches.advance();
3772 }
3773 })
3774 }
3775
3776 /// Returns enclosing bracket ranges containing the given range
3777 pub fn enclosing_bracket_ranges<T: ToOffset>(
3778 &self,
3779 range: Range<T>,
3780 ) -> impl Iterator<Item = BracketMatch> + '_ {
3781 let range = range.start.to_offset(self)..range.end.to_offset(self);
3782
3783 self.bracket_ranges(range.clone()).filter(move |pair| {
3784 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3785 })
3786 }
3787
3788 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3789 ///
3790 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3791 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3792 &self,
3793 range: Range<T>,
3794 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3795 ) -> Option<(Range<usize>, Range<usize>)> {
3796 let range = range.start.to_offset(self)..range.end.to_offset(self);
3797
3798 // Get the ranges of the innermost pair of brackets.
3799 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3800
3801 for pair in self.enclosing_bracket_ranges(range.clone()) {
3802 if let Some(range_filter) = range_filter {
3803 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3804 continue;
3805 }
3806 }
3807
3808 let len = pair.close_range.end - pair.open_range.start;
3809
3810 if let Some((existing_open, existing_close)) = &result {
3811 let existing_len = existing_close.end - existing_open.start;
3812 if len > existing_len {
3813 continue;
3814 }
3815 }
3816
3817 result = Some((pair.open_range, pair.close_range));
3818 }
3819
3820 result
3821 }
3822
3823 /// Returns anchor ranges for any matches of the redaction query.
3824 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3825 /// will be run on the relevant section of the buffer.
3826 pub fn redacted_ranges<T: ToOffset>(
3827 &self,
3828 range: Range<T>,
3829 ) -> impl Iterator<Item = Range<usize>> + '_ {
3830 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3831 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3832 grammar
3833 .redactions_config
3834 .as_ref()
3835 .map(|config| &config.query)
3836 });
3837
3838 let configs = syntax_matches
3839 .grammars()
3840 .iter()
3841 .map(|grammar| grammar.redactions_config.as_ref())
3842 .collect::<Vec<_>>();
3843
3844 iter::from_fn(move || {
3845 let redacted_range = syntax_matches
3846 .peek()
3847 .and_then(|mat| {
3848 configs[mat.grammar_index].and_then(|config| {
3849 mat.captures
3850 .iter()
3851 .find(|capture| capture.index == config.redaction_capture_ix)
3852 })
3853 })
3854 .map(|mat| mat.node.byte_range());
3855 syntax_matches.advance();
3856 redacted_range
3857 })
3858 }
3859
3860 pub fn injections_intersecting_range<T: ToOffset>(
3861 &self,
3862 range: Range<T>,
3863 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3864 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3865
3866 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3867 grammar
3868 .injection_config
3869 .as_ref()
3870 .map(|config| &config.query)
3871 });
3872
3873 let configs = syntax_matches
3874 .grammars()
3875 .iter()
3876 .map(|grammar| grammar.injection_config.as_ref())
3877 .collect::<Vec<_>>();
3878
3879 iter::from_fn(move || {
3880 let ranges = syntax_matches.peek().and_then(|mat| {
3881 let config = &configs[mat.grammar_index]?;
3882 let content_capture_range = mat.captures.iter().find_map(|capture| {
3883 if capture.index == config.content_capture_ix {
3884 Some(capture.node.byte_range())
3885 } else {
3886 None
3887 }
3888 })?;
3889 let language = self.language_at(content_capture_range.start)?;
3890 Some((content_capture_range, language))
3891 });
3892 syntax_matches.advance();
3893 ranges
3894 })
3895 }
3896
3897 pub fn debug_variable_ranges(
3898 &self,
3899 offset_range: Range<usize>,
3900 ) -> impl Iterator<Item = DebugVariableRanges> + '_ {
3901 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3902 grammar
3903 .debug_variables_config
3904 .as_ref()
3905 .map(|config| &config.query)
3906 });
3907
3908 let configs = syntax_matches
3909 .grammars()
3910 .iter()
3911 .map(|grammar| grammar.debug_variables_config.as_ref())
3912 .collect::<Vec<_>>();
3913
3914 iter::from_fn(move || {
3915 loop {
3916 let mat = syntax_matches.peek()?;
3917
3918 let variable_ranges = configs[mat.grammar_index].and_then(|config| {
3919 let full_range = mat.captures.iter().fold(
3920 Range {
3921 start: usize::MAX,
3922 end: 0,
3923 },
3924 |mut acc, next| {
3925 let byte_range = next.node.byte_range();
3926 if acc.start > byte_range.start {
3927 acc.start = byte_range.start;
3928 }
3929 if acc.end < byte_range.end {
3930 acc.end = byte_range.end;
3931 }
3932 acc
3933 },
3934 );
3935 if full_range.start > full_range.end {
3936 // We did not find a full spanning range of this match.
3937 return None;
3938 }
3939
3940 let captures = mat.captures.iter().filter_map(|capture| {
3941 Some((
3942 capture,
3943 config.captures.get(capture.index as usize).cloned()?,
3944 ))
3945 });
3946
3947 let mut variable_range = None;
3948 for (query, capture) in captures {
3949 if let DebugVariableCapture::Variable = capture {
3950 let _ = variable_range.insert(query.node.byte_range());
3951 }
3952 }
3953
3954 Some(DebugVariableRanges {
3955 buffer_id: self.remote_id(),
3956 range: variable_range?,
3957 })
3958 });
3959
3960 syntax_matches.advance();
3961 if variable_ranges.is_some() {
3962 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3963 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3964 return variable_ranges;
3965 }
3966 }
3967 })
3968 }
3969
3970 pub fn runnable_ranges(
3971 &self,
3972 offset_range: Range<usize>,
3973 ) -> impl Iterator<Item = RunnableRange> + '_ {
3974 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3975 grammar.runnable_config.as_ref().map(|config| &config.query)
3976 });
3977
3978 let test_configs = syntax_matches
3979 .grammars()
3980 .iter()
3981 .map(|grammar| grammar.runnable_config.as_ref())
3982 .collect::<Vec<_>>();
3983
3984 iter::from_fn(move || {
3985 loop {
3986 let mat = syntax_matches.peek()?;
3987
3988 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3989 let mut run_range = None;
3990 let full_range = mat.captures.iter().fold(
3991 Range {
3992 start: usize::MAX,
3993 end: 0,
3994 },
3995 |mut acc, next| {
3996 let byte_range = next.node.byte_range();
3997 if acc.start > byte_range.start {
3998 acc.start = byte_range.start;
3999 }
4000 if acc.end < byte_range.end {
4001 acc.end = byte_range.end;
4002 }
4003 acc
4004 },
4005 );
4006 if full_range.start > full_range.end {
4007 // We did not find a full spanning range of this match.
4008 return None;
4009 }
4010 let extra_captures: SmallVec<[_; 1]> =
4011 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4012 test_configs
4013 .extra_captures
4014 .get(capture.index as usize)
4015 .cloned()
4016 .and_then(|tag_name| match tag_name {
4017 RunnableCapture::Named(name) => {
4018 Some((capture.node.byte_range(), name))
4019 }
4020 RunnableCapture::Run => {
4021 let _ = run_range.insert(capture.node.byte_range());
4022 None
4023 }
4024 })
4025 }));
4026 let run_range = run_range?;
4027 let tags = test_configs
4028 .query
4029 .property_settings(mat.pattern_index)
4030 .iter()
4031 .filter_map(|property| {
4032 if *property.key == *"tag" {
4033 property
4034 .value
4035 .as_ref()
4036 .map(|value| RunnableTag(value.to_string().into()))
4037 } else {
4038 None
4039 }
4040 })
4041 .collect();
4042 let extra_captures = extra_captures
4043 .into_iter()
4044 .map(|(range, name)| {
4045 (
4046 name.to_string(),
4047 self.text_for_range(range.clone()).collect::<String>(),
4048 )
4049 })
4050 .collect();
4051 // All tags should have the same range.
4052 Some(RunnableRange {
4053 run_range,
4054 full_range,
4055 runnable: Runnable {
4056 tags,
4057 language: mat.language,
4058 buffer: self.remote_id(),
4059 },
4060 extra_captures,
4061 buffer_id: self.remote_id(),
4062 })
4063 });
4064
4065 syntax_matches.advance();
4066 if test_range.is_some() {
4067 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4068 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4069 return test_range;
4070 }
4071 }
4072 })
4073 }
4074
4075 /// Returns selections for remote peers intersecting the given range.
4076 #[allow(clippy::type_complexity)]
4077 pub fn selections_in_range(
4078 &self,
4079 range: Range<Anchor>,
4080 include_local: bool,
4081 ) -> impl Iterator<
4082 Item = (
4083 ReplicaId,
4084 bool,
4085 CursorShape,
4086 impl Iterator<Item = &Selection<Anchor>> + '_,
4087 ),
4088 > + '_ {
4089 self.remote_selections
4090 .iter()
4091 .filter(move |(replica_id, set)| {
4092 (include_local || **replica_id != self.text.replica_id())
4093 && !set.selections.is_empty()
4094 })
4095 .map(move |(replica_id, set)| {
4096 let start_ix = match set.selections.binary_search_by(|probe| {
4097 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4098 }) {
4099 Ok(ix) | Err(ix) => ix,
4100 };
4101 let end_ix = match set.selections.binary_search_by(|probe| {
4102 probe.start.cmp(&range.end, self).then(Ordering::Less)
4103 }) {
4104 Ok(ix) | Err(ix) => ix,
4105 };
4106
4107 (
4108 *replica_id,
4109 set.line_mode,
4110 set.cursor_shape,
4111 set.selections[start_ix..end_ix].iter(),
4112 )
4113 })
4114 }
4115
4116 /// Returns if the buffer contains any diagnostics.
4117 pub fn has_diagnostics(&self) -> bool {
4118 !self.diagnostics.is_empty()
4119 }
4120
4121 /// Returns all the diagnostics intersecting the given range.
4122 pub fn diagnostics_in_range<'a, T, O>(
4123 &'a self,
4124 search_range: Range<T>,
4125 reversed: bool,
4126 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4127 where
4128 T: 'a + Clone + ToOffset,
4129 O: 'a + FromAnchor,
4130 {
4131 let mut iterators: Vec<_> = self
4132 .diagnostics
4133 .iter()
4134 .map(|(_, collection)| {
4135 collection
4136 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4137 .peekable()
4138 })
4139 .collect();
4140
4141 std::iter::from_fn(move || {
4142 let (next_ix, _) = iterators
4143 .iter_mut()
4144 .enumerate()
4145 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4146 .min_by(|(_, a), (_, b)| {
4147 let cmp = a
4148 .range
4149 .start
4150 .cmp(&b.range.start, self)
4151 // when range is equal, sort by diagnostic severity
4152 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4153 // and stabilize order with group_id
4154 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4155 if reversed { cmp.reverse() } else { cmp }
4156 })?;
4157 iterators[next_ix]
4158 .next()
4159 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4160 diagnostic,
4161 range: FromAnchor::from_anchor(&range.start, self)
4162 ..FromAnchor::from_anchor(&range.end, self),
4163 })
4164 })
4165 }
4166
4167 /// Returns all the diagnostic groups associated with the given
4168 /// language server ID. If no language server ID is provided,
4169 /// all diagnostics groups are returned.
4170 pub fn diagnostic_groups(
4171 &self,
4172 language_server_id: Option<LanguageServerId>,
4173 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4174 let mut groups = Vec::new();
4175
4176 if let Some(language_server_id) = language_server_id {
4177 if let Ok(ix) = self
4178 .diagnostics
4179 .binary_search_by_key(&language_server_id, |e| e.0)
4180 {
4181 self.diagnostics[ix]
4182 .1
4183 .groups(language_server_id, &mut groups, self);
4184 }
4185 } else {
4186 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4187 diagnostics.groups(*language_server_id, &mut groups, self);
4188 }
4189 }
4190
4191 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4192 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4193 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4194 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4195 });
4196
4197 groups
4198 }
4199
4200 /// Returns an iterator over the diagnostics for the given group.
4201 pub fn diagnostic_group<O>(
4202 &self,
4203 group_id: usize,
4204 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4205 where
4206 O: FromAnchor + 'static,
4207 {
4208 self.diagnostics
4209 .iter()
4210 .flat_map(move |(_, set)| set.group(group_id, self))
4211 }
4212
4213 /// An integer version number that accounts for all updates besides
4214 /// the buffer's text itself (which is versioned via a version vector).
4215 pub fn non_text_state_update_count(&self) -> usize {
4216 self.non_text_state_update_count
4217 }
4218
4219 /// Returns a snapshot of underlying file.
4220 pub fn file(&self) -> Option<&Arc<dyn File>> {
4221 self.file.as_ref()
4222 }
4223
4224 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4225 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4226 if let Some(file) = self.file() {
4227 if file.path().file_name().is_none() || include_root {
4228 Some(file.full_path(cx))
4229 } else {
4230 Some(file.path().to_path_buf())
4231 }
4232 } else {
4233 None
4234 }
4235 }
4236
4237 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4238 let query_str = query.fuzzy_contents;
4239 if query_str.map_or(false, |query| query.is_empty()) {
4240 return BTreeMap::default();
4241 }
4242
4243 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4244 language,
4245 override_id: None,
4246 }));
4247
4248 let mut query_ix = 0;
4249 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4250 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4251
4252 let mut words = BTreeMap::default();
4253 let mut current_word_start_ix = None;
4254 let mut chunk_ix = query.range.start;
4255 for chunk in self.chunks(query.range, false) {
4256 for (i, c) in chunk.text.char_indices() {
4257 let ix = chunk_ix + i;
4258 if classifier.is_word(c) {
4259 if current_word_start_ix.is_none() {
4260 current_word_start_ix = Some(ix);
4261 }
4262
4263 if let Some(query_chars) = &query_chars {
4264 if query_ix < query_len {
4265 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4266 query_ix += 1;
4267 }
4268 }
4269 }
4270 continue;
4271 } else if let Some(word_start) = current_word_start_ix.take() {
4272 if query_ix == query_len {
4273 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4274 let mut word_text = self.text_for_range(word_start..ix).peekable();
4275 let first_char = word_text
4276 .peek()
4277 .and_then(|first_chunk| first_chunk.chars().next());
4278 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4279 if !query.skip_digits
4280 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4281 {
4282 words.insert(word_text.collect(), word_range);
4283 }
4284 }
4285 }
4286 query_ix = 0;
4287 }
4288 chunk_ix += chunk.text.len();
4289 }
4290
4291 words
4292 }
4293}
4294
4295pub struct WordsQuery<'a> {
4296 /// Only returns words with all chars from the fuzzy string in them.
4297 pub fuzzy_contents: Option<&'a str>,
4298 /// Skips words that start with a digit.
4299 pub skip_digits: bool,
4300 /// Buffer offset range, to look for words.
4301 pub range: Range<usize>,
4302}
4303
4304fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4305 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4306}
4307
4308fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4309 let mut result = IndentSize::spaces(0);
4310 for c in text {
4311 let kind = match c {
4312 ' ' => IndentKind::Space,
4313 '\t' => IndentKind::Tab,
4314 _ => break,
4315 };
4316 if result.len == 0 {
4317 result.kind = kind;
4318 }
4319 result.len += 1;
4320 }
4321 result
4322}
4323
4324impl Clone for BufferSnapshot {
4325 fn clone(&self) -> Self {
4326 Self {
4327 text: self.text.clone(),
4328 syntax: self.syntax.clone(),
4329 file: self.file.clone(),
4330 remote_selections: self.remote_selections.clone(),
4331 diagnostics: self.diagnostics.clone(),
4332 language: self.language.clone(),
4333 non_text_state_update_count: self.non_text_state_update_count,
4334 }
4335 }
4336}
4337
4338impl Deref for BufferSnapshot {
4339 type Target = text::BufferSnapshot;
4340
4341 fn deref(&self) -> &Self::Target {
4342 &self.text
4343 }
4344}
4345
4346unsafe impl Send for BufferChunks<'_> {}
4347
4348impl<'a> BufferChunks<'a> {
4349 pub(crate) fn new(
4350 text: &'a Rope,
4351 range: Range<usize>,
4352 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4353 diagnostics: bool,
4354 buffer_snapshot: Option<&'a BufferSnapshot>,
4355 ) -> Self {
4356 let mut highlights = None;
4357 if let Some((captures, highlight_maps)) = syntax {
4358 highlights = Some(BufferChunkHighlights {
4359 captures,
4360 next_capture: None,
4361 stack: Default::default(),
4362 highlight_maps,
4363 })
4364 }
4365
4366 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4367 let chunks = text.chunks_in_range(range.clone());
4368
4369 let mut this = BufferChunks {
4370 range,
4371 buffer_snapshot,
4372 chunks,
4373 diagnostic_endpoints,
4374 error_depth: 0,
4375 warning_depth: 0,
4376 information_depth: 0,
4377 hint_depth: 0,
4378 unnecessary_depth: 0,
4379 highlights,
4380 };
4381 this.initialize_diagnostic_endpoints();
4382 this
4383 }
4384
4385 /// Seeks to the given byte offset in the buffer.
4386 pub fn seek(&mut self, range: Range<usize>) {
4387 let old_range = std::mem::replace(&mut self.range, range.clone());
4388 self.chunks.set_range(self.range.clone());
4389 if let Some(highlights) = self.highlights.as_mut() {
4390 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4391 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4392 highlights
4393 .stack
4394 .retain(|(end_offset, _)| *end_offset > range.start);
4395 if let Some(capture) = &highlights.next_capture {
4396 if range.start >= capture.node.start_byte() {
4397 let next_capture_end = capture.node.end_byte();
4398 if range.start < next_capture_end {
4399 highlights.stack.push((
4400 next_capture_end,
4401 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4402 ));
4403 }
4404 highlights.next_capture.take();
4405 }
4406 }
4407 } else if let Some(snapshot) = self.buffer_snapshot {
4408 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4409 *highlights = BufferChunkHighlights {
4410 captures,
4411 next_capture: None,
4412 stack: Default::default(),
4413 highlight_maps,
4414 };
4415 } else {
4416 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4417 // Seeking such BufferChunks is not supported.
4418 debug_assert!(
4419 false,
4420 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4421 );
4422 }
4423
4424 highlights.captures.set_byte_range(self.range.clone());
4425 self.initialize_diagnostic_endpoints();
4426 }
4427 }
4428
4429 fn initialize_diagnostic_endpoints(&mut self) {
4430 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4431 if let Some(buffer) = self.buffer_snapshot {
4432 let mut diagnostic_endpoints = Vec::new();
4433 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4434 diagnostic_endpoints.push(DiagnosticEndpoint {
4435 offset: entry.range.start,
4436 is_start: true,
4437 severity: entry.diagnostic.severity,
4438 is_unnecessary: entry.diagnostic.is_unnecessary,
4439 });
4440 diagnostic_endpoints.push(DiagnosticEndpoint {
4441 offset: entry.range.end,
4442 is_start: false,
4443 severity: entry.diagnostic.severity,
4444 is_unnecessary: entry.diagnostic.is_unnecessary,
4445 });
4446 }
4447 diagnostic_endpoints
4448 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4449 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4450 self.hint_depth = 0;
4451 self.error_depth = 0;
4452 self.warning_depth = 0;
4453 self.information_depth = 0;
4454 }
4455 }
4456 }
4457
4458 /// The current byte offset in the buffer.
4459 pub fn offset(&self) -> usize {
4460 self.range.start
4461 }
4462
4463 pub fn range(&self) -> Range<usize> {
4464 self.range.clone()
4465 }
4466
4467 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4468 let depth = match endpoint.severity {
4469 DiagnosticSeverity::ERROR => &mut self.error_depth,
4470 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4471 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4472 DiagnosticSeverity::HINT => &mut self.hint_depth,
4473 _ => return,
4474 };
4475 if endpoint.is_start {
4476 *depth += 1;
4477 } else {
4478 *depth -= 1;
4479 }
4480
4481 if endpoint.is_unnecessary {
4482 if endpoint.is_start {
4483 self.unnecessary_depth += 1;
4484 } else {
4485 self.unnecessary_depth -= 1;
4486 }
4487 }
4488 }
4489
4490 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4491 if self.error_depth > 0 {
4492 Some(DiagnosticSeverity::ERROR)
4493 } else if self.warning_depth > 0 {
4494 Some(DiagnosticSeverity::WARNING)
4495 } else if self.information_depth > 0 {
4496 Some(DiagnosticSeverity::INFORMATION)
4497 } else if self.hint_depth > 0 {
4498 Some(DiagnosticSeverity::HINT)
4499 } else {
4500 None
4501 }
4502 }
4503
4504 fn current_code_is_unnecessary(&self) -> bool {
4505 self.unnecessary_depth > 0
4506 }
4507}
4508
4509impl<'a> Iterator for BufferChunks<'a> {
4510 type Item = Chunk<'a>;
4511
4512 fn next(&mut self) -> Option<Self::Item> {
4513 let mut next_capture_start = usize::MAX;
4514 let mut next_diagnostic_endpoint = usize::MAX;
4515
4516 if let Some(highlights) = self.highlights.as_mut() {
4517 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4518 if *parent_capture_end <= self.range.start {
4519 highlights.stack.pop();
4520 } else {
4521 break;
4522 }
4523 }
4524
4525 if highlights.next_capture.is_none() {
4526 highlights.next_capture = highlights.captures.next();
4527 }
4528
4529 while let Some(capture) = highlights.next_capture.as_ref() {
4530 if self.range.start < capture.node.start_byte() {
4531 next_capture_start = capture.node.start_byte();
4532 break;
4533 } else {
4534 let highlight_id =
4535 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4536 highlights
4537 .stack
4538 .push((capture.node.end_byte(), highlight_id));
4539 highlights.next_capture = highlights.captures.next();
4540 }
4541 }
4542 }
4543
4544 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4545 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4546 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4547 if endpoint.offset <= self.range.start {
4548 self.update_diagnostic_depths(endpoint);
4549 diagnostic_endpoints.next();
4550 } else {
4551 next_diagnostic_endpoint = endpoint.offset;
4552 break;
4553 }
4554 }
4555 }
4556 self.diagnostic_endpoints = diagnostic_endpoints;
4557
4558 if let Some(chunk) = self.chunks.peek() {
4559 let chunk_start = self.range.start;
4560 let mut chunk_end = (self.chunks.offset() + chunk.len())
4561 .min(next_capture_start)
4562 .min(next_diagnostic_endpoint);
4563 let mut highlight_id = None;
4564 if let Some(highlights) = self.highlights.as_ref() {
4565 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4566 chunk_end = chunk_end.min(*parent_capture_end);
4567 highlight_id = Some(*parent_highlight_id);
4568 }
4569 }
4570
4571 let slice =
4572 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4573 self.range.start = chunk_end;
4574 if self.range.start == self.chunks.offset() + chunk.len() {
4575 self.chunks.next().unwrap();
4576 }
4577
4578 Some(Chunk {
4579 text: slice,
4580 syntax_highlight_id: highlight_id,
4581 diagnostic_severity: self.current_diagnostic_severity(),
4582 is_unnecessary: self.current_code_is_unnecessary(),
4583 ..Default::default()
4584 })
4585 } else {
4586 None
4587 }
4588 }
4589}
4590
4591impl operation_queue::Operation for Operation {
4592 fn lamport_timestamp(&self) -> clock::Lamport {
4593 match self {
4594 Operation::Buffer(_) => {
4595 unreachable!("buffer operations should never be deferred at this layer")
4596 }
4597 Operation::UpdateDiagnostics {
4598 lamport_timestamp, ..
4599 }
4600 | Operation::UpdateSelections {
4601 lamport_timestamp, ..
4602 }
4603 | Operation::UpdateCompletionTriggers {
4604 lamport_timestamp, ..
4605 } => *lamport_timestamp,
4606 }
4607 }
4608}
4609
4610impl Default for Diagnostic {
4611 fn default() -> Self {
4612 Self {
4613 source: Default::default(),
4614 code: None,
4615 severity: DiagnosticSeverity::ERROR,
4616 message: Default::default(),
4617 group_id: 0,
4618 is_primary: false,
4619 is_disk_based: false,
4620 is_unnecessary: false,
4621 data: None,
4622 }
4623 }
4624}
4625
4626impl IndentSize {
4627 /// Returns an [`IndentSize`] representing the given spaces.
4628 pub fn spaces(len: u32) -> Self {
4629 Self {
4630 len,
4631 kind: IndentKind::Space,
4632 }
4633 }
4634
4635 /// Returns an [`IndentSize`] representing a tab.
4636 pub fn tab() -> Self {
4637 Self {
4638 len: 1,
4639 kind: IndentKind::Tab,
4640 }
4641 }
4642
4643 /// An iterator over the characters represented by this [`IndentSize`].
4644 pub fn chars(&self) -> impl Iterator<Item = char> {
4645 iter::repeat(self.char()).take(self.len as usize)
4646 }
4647
4648 /// The character representation of this [`IndentSize`].
4649 pub fn char(&self) -> char {
4650 match self.kind {
4651 IndentKind::Space => ' ',
4652 IndentKind::Tab => '\t',
4653 }
4654 }
4655
4656 /// Consumes the current [`IndentSize`] and returns a new one that has
4657 /// been shrunk or enlarged by the given size along the given direction.
4658 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4659 match direction {
4660 Ordering::Less => {
4661 if self.kind == size.kind && self.len >= size.len {
4662 self.len -= size.len;
4663 }
4664 }
4665 Ordering::Equal => {}
4666 Ordering::Greater => {
4667 if self.len == 0 {
4668 self = size;
4669 } else if self.kind == size.kind {
4670 self.len += size.len;
4671 }
4672 }
4673 }
4674 self
4675 }
4676
4677 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4678 match self.kind {
4679 IndentKind::Space => self.len as usize,
4680 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4681 }
4682 }
4683}
4684
4685#[cfg(any(test, feature = "test-support"))]
4686pub struct TestFile {
4687 pub path: Arc<Path>,
4688 pub root_name: String,
4689 pub local_root: Option<PathBuf>,
4690}
4691
4692#[cfg(any(test, feature = "test-support"))]
4693impl File for TestFile {
4694 fn path(&self) -> &Arc<Path> {
4695 &self.path
4696 }
4697
4698 fn full_path(&self, _: &gpui::App) -> PathBuf {
4699 PathBuf::from(&self.root_name).join(self.path.as_ref())
4700 }
4701
4702 fn as_local(&self) -> Option<&dyn LocalFile> {
4703 if self.local_root.is_some() {
4704 Some(self)
4705 } else {
4706 None
4707 }
4708 }
4709
4710 fn disk_state(&self) -> DiskState {
4711 unimplemented!()
4712 }
4713
4714 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4715 self.path().file_name().unwrap_or(self.root_name.as_ref())
4716 }
4717
4718 fn worktree_id(&self, _: &App) -> WorktreeId {
4719 WorktreeId::from_usize(0)
4720 }
4721
4722 fn to_proto(&self, _: &App) -> rpc::proto::File {
4723 unimplemented!()
4724 }
4725
4726 fn is_private(&self) -> bool {
4727 false
4728 }
4729}
4730
4731#[cfg(any(test, feature = "test-support"))]
4732impl LocalFile for TestFile {
4733 fn abs_path(&self, _cx: &App) -> PathBuf {
4734 PathBuf::from(self.local_root.as_ref().unwrap())
4735 .join(&self.root_name)
4736 .join(self.path.as_ref())
4737 }
4738
4739 fn load(&self, _cx: &App) -> Task<Result<String>> {
4740 unimplemented!()
4741 }
4742
4743 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4744 unimplemented!()
4745 }
4746}
4747
4748pub(crate) fn contiguous_ranges(
4749 values: impl Iterator<Item = u32>,
4750 max_len: usize,
4751) -> impl Iterator<Item = Range<u32>> {
4752 let mut values = values;
4753 let mut current_range: Option<Range<u32>> = None;
4754 std::iter::from_fn(move || {
4755 loop {
4756 if let Some(value) = values.next() {
4757 if let Some(range) = &mut current_range {
4758 if value == range.end && range.len() < max_len {
4759 range.end += 1;
4760 continue;
4761 }
4762 }
4763
4764 let prev_range = current_range.clone();
4765 current_range = Some(value..(value + 1));
4766 if prev_range.is_some() {
4767 return prev_range;
4768 }
4769 } else {
4770 return current_range.take();
4771 }
4772 }
4773 })
4774}
4775
4776#[derive(Default, Debug)]
4777pub struct CharClassifier {
4778 scope: Option<LanguageScope>,
4779 for_completion: bool,
4780 ignore_punctuation: bool,
4781}
4782
4783impl CharClassifier {
4784 pub fn new(scope: Option<LanguageScope>) -> Self {
4785 Self {
4786 scope,
4787 for_completion: false,
4788 ignore_punctuation: false,
4789 }
4790 }
4791
4792 pub fn for_completion(self, for_completion: bool) -> Self {
4793 Self {
4794 for_completion,
4795 ..self
4796 }
4797 }
4798
4799 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4800 Self {
4801 ignore_punctuation,
4802 ..self
4803 }
4804 }
4805
4806 pub fn is_whitespace(&self, c: char) -> bool {
4807 self.kind(c) == CharKind::Whitespace
4808 }
4809
4810 pub fn is_word(&self, c: char) -> bool {
4811 self.kind(c) == CharKind::Word
4812 }
4813
4814 pub fn is_punctuation(&self, c: char) -> bool {
4815 self.kind(c) == CharKind::Punctuation
4816 }
4817
4818 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4819 if c.is_alphanumeric() || c == '_' {
4820 return CharKind::Word;
4821 }
4822
4823 if let Some(scope) = &self.scope {
4824 let characters = if self.for_completion {
4825 scope.completion_query_characters()
4826 } else {
4827 scope.word_characters()
4828 };
4829 if let Some(characters) = characters {
4830 if characters.contains(&c) {
4831 return CharKind::Word;
4832 }
4833 }
4834 }
4835
4836 if c.is_whitespace() {
4837 return CharKind::Whitespace;
4838 }
4839
4840 if ignore_punctuation {
4841 CharKind::Word
4842 } else {
4843 CharKind::Punctuation
4844 }
4845 }
4846
4847 pub fn kind(&self, c: char) -> CharKind {
4848 self.kind_with(c, self.ignore_punctuation)
4849 }
4850}
4851
4852/// Find all of the ranges of whitespace that occur at the ends of lines
4853/// in the given rope.
4854///
4855/// This could also be done with a regex search, but this implementation
4856/// avoids copying text.
4857pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4858 let mut ranges = Vec::new();
4859
4860 let mut offset = 0;
4861 let mut prev_chunk_trailing_whitespace_range = 0..0;
4862 for chunk in rope.chunks() {
4863 let mut prev_line_trailing_whitespace_range = 0..0;
4864 for (i, line) in chunk.split('\n').enumerate() {
4865 let line_end_offset = offset + line.len();
4866 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4867 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4868
4869 if i == 0 && trimmed_line_len == 0 {
4870 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4871 }
4872 if !prev_line_trailing_whitespace_range.is_empty() {
4873 ranges.push(prev_line_trailing_whitespace_range);
4874 }
4875
4876 offset = line_end_offset + 1;
4877 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4878 }
4879
4880 offset -= 1;
4881 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4882 }
4883
4884 if !prev_chunk_trailing_whitespace_range.is_empty() {
4885 ranges.push(prev_chunk_trailing_whitespace_range);
4886 }
4887
4888 ranges
4889}