1use crate::{
2 DebugVariableCapture, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76#[derive(Debug)]
77pub struct DebugVariableRanges {
78 pub buffer_id: BufferId,
79 pub range: Range<usize>,
80}
81
82/// A label for the background task spawned by the buffer to compute
83/// a diff against the contents of its file.
84pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
85
86/// Indicate whether a [`Buffer`] has permissions to edit.
87#[derive(PartialEq, Clone, Copy, Debug)]
88pub enum Capability {
89 /// The buffer is a mutable replica.
90 ReadWrite,
91 /// The buffer is a read-only replica.
92 ReadOnly,
93}
94
95pub type BufferRow = u32;
96
97/// An in-memory representation of a source code file, including its text,
98/// syntax trees, git status, and diagnostics.
99pub struct Buffer {
100 text: TextBuffer,
101 branch_state: Option<BufferBranchState>,
102 /// Filesystem state, `None` when there is no path.
103 file: Option<Arc<dyn File>>,
104 /// The mtime of the file when this buffer was last loaded from
105 /// or saved to disk.
106 saved_mtime: Option<MTime>,
107 /// The version vector when this buffer was last loaded from
108 /// or saved to disk.
109 saved_version: clock::Global,
110 preview_version: clock::Global,
111 transaction_depth: usize,
112 was_dirty_before_starting_transaction: Option<bool>,
113 reload_task: Option<Task<Result<()>>>,
114 language: Option<Arc<Language>>,
115 autoindent_requests: Vec<Arc<AutoindentRequest>>,
116 pending_autoindent: Option<Task<()>>,
117 sync_parse_timeout: Duration,
118 syntax_map: Mutex<SyntaxMap>,
119 reparse: Option<Task<()>>,
120 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
121 non_text_state_update_count: usize,
122 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
123 remote_selections: TreeMap<ReplicaId, SelectionSet>,
124 diagnostics_timestamp: clock::Lamport,
125 completion_triggers: BTreeSet<String>,
126 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
127 completion_triggers_timestamp: clock::Lamport,
128 deferred_ops: OperationQueue<Operation>,
129 capability: Capability,
130 has_conflict: bool,
131 /// Memoize calls to has_changes_since(saved_version).
132 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
133 has_unsaved_edits: Cell<(clock::Global, bool)>,
134 change_bits: Vec<rc::Weak<Cell<bool>>>,
135 _subscriptions: Vec<gpui::Subscription>,
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq)]
139pub enum ParseStatus {
140 Idle,
141 Parsing,
142}
143
144struct BufferBranchState {
145 base_buffer: Entity<Buffer>,
146 merged_operations: Vec<Lamport>,
147}
148
149/// An immutable, cheaply cloneable representation of a fixed
150/// state of a buffer.
151pub struct BufferSnapshot {
152 pub text: text::BufferSnapshot,
153 pub(crate) syntax: SyntaxSnapshot,
154 file: Option<Arc<dyn File>>,
155 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
156 remote_selections: TreeMap<ReplicaId, SelectionSet>,
157 language: Option<Arc<Language>>,
158 non_text_state_update_count: usize,
159}
160
161/// The kind and amount of indentation in a particular line. For now,
162/// assumes that indentation is all the same character.
163#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
164pub struct IndentSize {
165 /// The number of bytes that comprise the indentation.
166 pub len: u32,
167 /// The kind of whitespace used for indentation.
168 pub kind: IndentKind,
169}
170
171/// A whitespace character that's used for indentation.
172#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
173pub enum IndentKind {
174 /// An ASCII space character.
175 #[default]
176 Space,
177 /// An ASCII tab character.
178 Tab,
179}
180
181/// The shape of a selection cursor.
182#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
183#[serde(rename_all = "snake_case")]
184pub enum CursorShape {
185 /// A vertical bar
186 #[default]
187 Bar,
188 /// A block that surrounds the following character
189 Block,
190 /// An underline that runs along the following character
191 Underline,
192 /// A box drawn around the following character
193 Hollow,
194}
195
196#[derive(Clone, Debug)]
197struct SelectionSet {
198 line_mode: bool,
199 cursor_shape: CursorShape,
200 selections: Arc<[Selection<Anchor>]>,
201 lamport_timestamp: clock::Lamport,
202}
203
204/// A diagnostic associated with a certain range of a buffer.
205#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
206pub struct Diagnostic {
207 /// The name of the service that produced this diagnostic.
208 pub source: Option<String>,
209 /// A machine-readable code that identifies this diagnostic.
210 pub code: Option<NumberOrString>,
211 pub code_description: Option<lsp::Url>,
212 /// Whether this diagnostic is a hint, warning, or error.
213 pub severity: DiagnosticSeverity,
214 /// The human-readable message associated with this diagnostic.
215 pub message: String,
216 /// The human-readable message (in markdown format)
217 pub markdown: Option<String>,
218 /// An id that identifies the group to which this diagnostic belongs.
219 ///
220 /// When a language server produces a diagnostic with
221 /// one or more associated diagnostics, those diagnostics are all
222 /// assigned a single group ID.
223 pub group_id: usize,
224 /// Whether this diagnostic is the primary diagnostic for its group.
225 ///
226 /// In a given group, the primary diagnostic is the top-level diagnostic
227 /// returned by the language server. The non-primary diagnostics are the
228 /// associated diagnostics.
229 pub is_primary: bool,
230 /// Whether this diagnostic is considered to originate from an analysis of
231 /// files on disk, as opposed to any unsaved buffer contents. This is a
232 /// property of a given diagnostic source, and is configured for a given
233 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
234 /// for the language server.
235 pub is_disk_based: bool,
236 /// Whether this diagnostic marks unnecessary code.
237 pub is_unnecessary: bool,
238 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
239 pub data: Option<Value>,
240}
241
242/// An operation used to synchronize this buffer with its other replicas.
243#[derive(Clone, Debug, PartialEq)]
244pub enum Operation {
245 /// A text operation.
246 Buffer(text::Operation),
247
248 /// An update to the buffer's diagnostics.
249 UpdateDiagnostics {
250 /// The id of the language server that produced the new diagnostics.
251 server_id: LanguageServerId,
252 /// The diagnostics.
253 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
254 /// The buffer's lamport timestamp.
255 lamport_timestamp: clock::Lamport,
256 },
257
258 /// An update to the most recent selections in this buffer.
259 UpdateSelections {
260 /// The selections.
261 selections: Arc<[Selection<Anchor>]>,
262 /// The buffer's lamport timestamp.
263 lamport_timestamp: clock::Lamport,
264 /// Whether the selections are in 'line mode'.
265 line_mode: bool,
266 /// The [`CursorShape`] associated with these selections.
267 cursor_shape: CursorShape,
268 },
269
270 /// An update to the characters that should trigger autocompletion
271 /// for this buffer.
272 UpdateCompletionTriggers {
273 /// The characters that trigger autocompletion.
274 triggers: Vec<String>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// The language server ID.
278 server_id: LanguageServerId,
279 },
280}
281
282/// An event that occurs in a buffer.
283#[derive(Clone, Debug, PartialEq)]
284pub enum BufferEvent {
285 /// The buffer was changed in a way that must be
286 /// propagated to its other replicas.
287 Operation {
288 operation: Operation,
289 is_local: bool,
290 },
291 /// The buffer was edited.
292 Edited,
293 /// The buffer's `dirty` bit changed.
294 DirtyChanged,
295 /// The buffer was saved.
296 Saved,
297 /// The buffer's file was changed on disk.
298 FileHandleChanged,
299 /// The buffer was reloaded.
300 Reloaded,
301 /// The buffer is in need of a reload
302 ReloadNeeded,
303 /// The buffer's language was changed.
304 LanguageChanged,
305 /// The buffer's syntax trees were updated.
306 Reparsed,
307 /// The buffer's diagnostics were updated.
308 DiagnosticsUpdated,
309 /// The buffer gained or lost editing capabilities.
310 CapabilityChanged,
311 /// The buffer was explicitly requested to close.
312 Closed,
313 /// The buffer was discarded when closing.
314 Discarded,
315}
316
317/// The file associated with a buffer.
318pub trait File: Send + Sync + Any {
319 /// Returns the [`LocalFile`] associated with this file, if the
320 /// file is local.
321 fn as_local(&self) -> Option<&dyn LocalFile>;
322
323 /// Returns whether this file is local.
324 fn is_local(&self) -> bool {
325 self.as_local().is_some()
326 }
327
328 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
329 /// only available in some states, such as modification time.
330 fn disk_state(&self) -> DiskState;
331
332 /// Returns the path of this file relative to the worktree's root directory.
333 fn path(&self) -> &Arc<Path>;
334
335 /// Returns the path of this file relative to the worktree's parent directory (this means it
336 /// includes the name of the worktree's root folder).
337 fn full_path(&self, cx: &App) -> PathBuf;
338
339 /// Returns the last component of this handle's absolute path. If this handle refers to the root
340 /// of its worktree, then this method will return the name of the worktree itself.
341 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
342
343 /// Returns the id of the worktree to which this file belongs.
344 ///
345 /// This is needed for looking up project-specific settings.
346 fn worktree_id(&self, cx: &App) -> WorktreeId;
347
348 /// Converts this file into a protobuf message.
349 fn to_proto(&self, cx: &App) -> rpc::proto::File;
350
351 /// Return whether Zed considers this to be a private file.
352 fn is_private(&self) -> bool;
353}
354
355/// The file's storage status - whether it's stored (`Present`), and if so when it was last
356/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
357/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
358/// indicator for new files.
359#[derive(Copy, Clone, Debug, PartialEq)]
360pub enum DiskState {
361 /// File created in Zed that has not been saved.
362 New,
363 /// File present on the filesystem.
364 Present { mtime: MTime },
365 /// Deleted file that was previously present.
366 Deleted,
367}
368
369impl DiskState {
370 /// Returns the file's last known modification time on disk.
371 pub fn mtime(self) -> Option<MTime> {
372 match self {
373 DiskState::New => None,
374 DiskState::Present { mtime } => Some(mtime),
375 DiskState::Deleted => None,
376 }
377 }
378
379 pub fn exists(&self) -> bool {
380 match self {
381 DiskState::New => false,
382 DiskState::Present { .. } => true,
383 DiskState::Deleted => false,
384 }
385 }
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &App) -> PathBuf;
392
393 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
394 fn load(&self, cx: &App) -> Task<Result<String>>;
395
396 /// Loads the file's contents from disk.
397 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
398}
399
400/// The auto-indent behavior associated with an editing operation.
401/// For some editing operations, each affected line of text has its
402/// indentation recomputed. For other operations, the entire block
403/// of edited text is adjusted uniformly.
404#[derive(Clone, Debug)]
405pub enum AutoindentMode {
406 /// Indent each line of inserted text.
407 EachLine,
408 /// Apply the same indentation adjustment to all of the lines
409 /// in a given insertion.
410 Block {
411 /// The original indentation column of the first line of each
412 /// insertion, if it has been copied.
413 ///
414 /// Knowing this makes it possible to preserve the relative indentation
415 /// of every line in the insertion from when it was copied.
416 ///
417 /// If the original indent column is `a`, and the first line of insertion
418 /// is then auto-indented to column `b`, then every other line of
419 /// the insertion will be auto-indented to column `b - a`
420 original_indent_columns: Vec<Option<u32>>,
421 },
422}
423
424#[derive(Clone)]
425struct AutoindentRequest {
426 before_edit: BufferSnapshot,
427 entries: Vec<AutoindentRequestEntry>,
428 is_block_mode: bool,
429 ignore_empty_lines: bool,
430}
431
432#[derive(Debug, Clone)]
433struct AutoindentRequestEntry {
434 /// A range of the buffer whose indentation should be adjusted.
435 range: Range<Anchor>,
436 /// Whether or not these lines should be considered brand new, for the
437 /// purpose of auto-indent. When text is not new, its indentation will
438 /// only be adjusted if the suggested indentation level has *changed*
439 /// since the edit was made.
440 first_line_is_new: bool,
441 indent_size: IndentSize,
442 original_indent_column: Option<u32>,
443}
444
445#[derive(Debug)]
446struct IndentSuggestion {
447 basis_row: u32,
448 delta: Ordering,
449 within_error: bool,
450}
451
452struct BufferChunkHighlights<'a> {
453 captures: SyntaxMapCaptures<'a>,
454 next_capture: Option<SyntaxMapCapture<'a>>,
455 stack: Vec<(usize, HighlightId)>,
456 highlight_maps: Vec<HighlightMap>,
457}
458
459/// An iterator that yields chunks of a buffer's text, along with their
460/// syntax highlights and diagnostic status.
461pub struct BufferChunks<'a> {
462 buffer_snapshot: Option<&'a BufferSnapshot>,
463 range: Range<usize>,
464 chunks: text::Chunks<'a>,
465 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
466 error_depth: usize,
467 warning_depth: usize,
468 information_depth: usize,
469 hint_depth: usize,
470 unnecessary_depth: usize,
471 highlights: Option<BufferChunkHighlights<'a>>,
472}
473
474/// A chunk of a buffer's text, along with its syntax highlight and
475/// diagnostic status.
476#[derive(Clone, Debug, Default)]
477pub struct Chunk<'a> {
478 /// The text of the chunk.
479 pub text: &'a str,
480 /// The syntax highlighting style of the chunk.
481 pub syntax_highlight_id: Option<HighlightId>,
482 /// The highlight style that has been applied to this chunk in
483 /// the editor.
484 pub highlight_style: Option<HighlightStyle>,
485 /// The severity of diagnostic associated with this chunk, if any.
486 pub diagnostic_severity: Option<DiagnosticSeverity>,
487 /// Whether this chunk of text is marked as unnecessary.
488 pub is_unnecessary: bool,
489 /// Whether this chunk of text was originally a tab character.
490 pub is_tab: bool,
491}
492
493/// A set of edits to a given version of a buffer, computed asynchronously.
494#[derive(Debug)]
495pub struct Diff {
496 pub base_version: clock::Global,
497 pub line_ending: LineEnding,
498 pub edits: Vec<(Range<usize>, Arc<str>)>,
499}
500
501#[derive(Clone, Copy)]
502pub(crate) struct DiagnosticEndpoint {
503 offset: usize,
504 is_start: bool,
505 severity: DiagnosticSeverity,
506 is_unnecessary: bool,
507}
508
509/// A class of characters, used for characterizing a run of text.
510#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
511pub enum CharKind {
512 /// Whitespace.
513 Whitespace,
514 /// Punctuation.
515 Punctuation,
516 /// Word.
517 Word,
518}
519
520/// A runnable is a set of data about a region that could be resolved into a task
521pub struct Runnable {
522 pub tags: SmallVec<[RunnableTag; 1]>,
523 pub language: Arc<Language>,
524 pub buffer: BufferId,
525}
526
527#[derive(Default, Clone, Debug)]
528pub struct HighlightedText {
529 pub text: SharedString,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533#[derive(Default, Debug)]
534struct HighlightedTextBuilder {
535 pub text: String,
536 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
537}
538
539impl HighlightedText {
540 pub fn from_buffer_range<T: ToOffset>(
541 range: Range<T>,
542 snapshot: &text::BufferSnapshot,
543 syntax_snapshot: &SyntaxSnapshot,
544 override_style: Option<HighlightStyle>,
545 syntax_theme: &SyntaxTheme,
546 ) -> Self {
547 let mut highlighted_text = HighlightedTextBuilder::default();
548 highlighted_text.add_text_from_buffer_range(
549 range,
550 snapshot,
551 syntax_snapshot,
552 override_style,
553 syntax_theme,
554 );
555 highlighted_text.build()
556 }
557
558 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
559 gpui::StyledText::new(self.text.clone())
560 .with_default_highlights(default_style, self.highlights.iter().cloned())
561 }
562
563 /// Returns the first line without leading whitespace unless highlighted
564 /// and a boolean indicating if there are more lines after
565 pub fn first_line_preview(self) -> (Self, bool) {
566 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
567 let first_line = &self.text[..newline_ix];
568
569 // Trim leading whitespace, unless an edit starts prior to it.
570 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
571 if let Some((first_highlight_range, _)) = self.highlights.first() {
572 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
573 }
574
575 let preview_text = &first_line[preview_start_ix..];
576 let preview_highlights = self
577 .highlights
578 .into_iter()
579 .take_while(|(range, _)| range.start < newline_ix)
580 .filter_map(|(mut range, highlight)| {
581 range.start = range.start.saturating_sub(preview_start_ix);
582 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
583 if range.is_empty() {
584 None
585 } else {
586 Some((range, highlight))
587 }
588 });
589
590 let preview = Self {
591 text: SharedString::new(preview_text),
592 highlights: preview_highlights.collect(),
593 };
594
595 (preview, self.text.len() > newline_ix)
596 }
597}
598
599impl HighlightedTextBuilder {
600 pub fn build(self) -> HighlightedText {
601 HighlightedText {
602 text: self.text.into(),
603 highlights: self.highlights,
604 }
605 }
606
607 pub fn add_text_from_buffer_range<T: ToOffset>(
608 &mut self,
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) {
615 let range = range.to_offset(snapshot);
616 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
617 let start = self.text.len();
618 self.text.push_str(chunk.text);
619 let end = self.text.len();
620
621 if let Some(mut highlight_style) = chunk
622 .syntax_highlight_id
623 .and_then(|id| id.style(syntax_theme))
624 {
625 if let Some(override_style) = override_style {
626 highlight_style.highlight(override_style);
627 }
628 self.highlights.push((start..end, highlight_style));
629 } else if let Some(override_style) = override_style {
630 self.highlights.push((start..end, override_style));
631 }
632 }
633 }
634
635 fn highlighted_chunks<'a>(
636 range: Range<usize>,
637 snapshot: &'a text::BufferSnapshot,
638 syntax_snapshot: &'a SyntaxSnapshot,
639 ) -> BufferChunks<'a> {
640 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
641 grammar.highlights_query.as_ref()
642 });
643
644 let highlight_maps = captures
645 .grammars()
646 .iter()
647 .map(|grammar| grammar.highlight_map())
648 .collect();
649
650 BufferChunks::new(
651 snapshot.as_rope(),
652 range,
653 Some((captures, highlight_maps)),
654 false,
655 None,
656 )
657 }
658}
659
660#[derive(Clone)]
661pub struct EditPreview {
662 old_snapshot: text::BufferSnapshot,
663 applied_edits_snapshot: text::BufferSnapshot,
664 syntax_snapshot: SyntaxSnapshot,
665}
666
667impl EditPreview {
668 pub fn highlight_edits(
669 &self,
670 current_snapshot: &BufferSnapshot,
671 edits: &[(Range<Anchor>, String)],
672 include_deletions: bool,
673 cx: &App,
674 ) -> HighlightedText {
675 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
676 return HighlightedText::default();
677 };
678
679 let mut highlighted_text = HighlightedTextBuilder::default();
680
681 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
682
683 let insertion_highlight_style = HighlightStyle {
684 background_color: Some(cx.theme().status().created_background),
685 ..Default::default()
686 };
687 let deletion_highlight_style = HighlightStyle {
688 background_color: Some(cx.theme().status().deleted_background),
689 ..Default::default()
690 };
691 let syntax_theme = cx.theme().syntax();
692
693 for (range, edit_text) in edits {
694 let edit_new_end_in_preview_snapshot = range
695 .end
696 .bias_right(&self.old_snapshot)
697 .to_offset(&self.applied_edits_snapshot);
698 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
699
700 let unchanged_range_in_preview_snapshot =
701 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
702 if !unchanged_range_in_preview_snapshot.is_empty() {
703 highlighted_text.add_text_from_buffer_range(
704 unchanged_range_in_preview_snapshot,
705 &self.applied_edits_snapshot,
706 &self.syntax_snapshot,
707 None,
708 &syntax_theme,
709 );
710 }
711
712 let range_in_current_snapshot = range.to_offset(current_snapshot);
713 if include_deletions && !range_in_current_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 range_in_current_snapshot,
716 ¤t_snapshot.text,
717 ¤t_snapshot.syntax,
718 Some(deletion_highlight_style),
719 &syntax_theme,
720 );
721 }
722
723 if !edit_text.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
726 &self.applied_edits_snapshot,
727 &self.syntax_snapshot,
728 Some(insertion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
734 }
735
736 highlighted_text.add_text_from_buffer_range(
737 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743
744 highlighted_text.build()
745 }
746
747 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
748 let (first, _) = edits.first()?;
749 let (last, _) = edits.last()?;
750
751 let start = first
752 .start
753 .bias_left(&self.old_snapshot)
754 .to_point(&self.applied_edits_snapshot);
755 let end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
761 let range = Point::new(start.row, 0)
762 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
763
764 Some(range.to_offset(&self.applied_edits_snapshot))
765 }
766}
767
768#[derive(Clone, Debug, PartialEq, Eq)]
769pub struct BracketMatch {
770 pub open_range: Range<usize>,
771 pub close_range: Range<usize>,
772 pub newline_only: bool,
773}
774
775impl Buffer {
776 /// Create a new buffer with the given base text.
777 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
778 Self::build(
779 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
780 None,
781 Capability::ReadWrite,
782 )
783 }
784
785 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
786 pub fn local_normalized(
787 base_text_normalized: Rope,
788 line_ending: LineEnding,
789 cx: &Context<Self>,
790 ) -> Self {
791 Self::build(
792 TextBuffer::new_normalized(
793 0,
794 cx.entity_id().as_non_zero_u64().into(),
795 line_ending,
796 base_text_normalized,
797 ),
798 None,
799 Capability::ReadWrite,
800 )
801 }
802
803 /// Create a new buffer that is a replica of a remote buffer.
804 pub fn remote(
805 remote_id: BufferId,
806 replica_id: ReplicaId,
807 capability: Capability,
808 base_text: impl Into<String>,
809 ) -> Self {
810 Self::build(
811 TextBuffer::new(replica_id, remote_id, base_text.into()),
812 None,
813 capability,
814 )
815 }
816
817 /// Create a new buffer that is a replica of a remote buffer, populating its
818 /// state from the given protobuf message.
819 pub fn from_proto(
820 replica_id: ReplicaId,
821 capability: Capability,
822 message: proto::BufferState,
823 file: Option<Arc<dyn File>>,
824 ) -> Result<Self> {
825 let buffer_id = BufferId::new(message.id)
826 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
827 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
828 let mut this = Self::build(buffer, file, capability);
829 this.text.set_line_ending(proto::deserialize_line_ending(
830 rpc::proto::LineEnding::from_i32(message.line_ending)
831 .ok_or_else(|| anyhow!("missing line_ending"))?,
832 ));
833 this.saved_version = proto::deserialize_version(&message.saved_version);
834 this.saved_mtime = message.saved_mtime.map(|time| time.into());
835 Ok(this)
836 }
837
838 /// Serialize the buffer's state to a protobuf message.
839 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
840 proto::BufferState {
841 id: self.remote_id().into(),
842 file: self.file.as_ref().map(|f| f.to_proto(cx)),
843 base_text: self.base_text().to_string(),
844 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
845 saved_version: proto::serialize_version(&self.saved_version),
846 saved_mtime: self.saved_mtime.map(|time| time.into()),
847 }
848 }
849
850 /// Serialize as protobufs all of the changes to the buffer since the given version.
851 pub fn serialize_ops(
852 &self,
853 since: Option<clock::Global>,
854 cx: &App,
855 ) -> Task<Vec<proto::Operation>> {
856 let mut operations = Vec::new();
857 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
858
859 operations.extend(self.remote_selections.iter().map(|(_, set)| {
860 proto::serialize_operation(&Operation::UpdateSelections {
861 selections: set.selections.clone(),
862 lamport_timestamp: set.lamport_timestamp,
863 line_mode: set.line_mode,
864 cursor_shape: set.cursor_shape,
865 })
866 }));
867
868 for (server_id, diagnostics) in &self.diagnostics {
869 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
870 lamport_timestamp: self.diagnostics_timestamp,
871 server_id: *server_id,
872 diagnostics: diagnostics.iter().cloned().collect(),
873 }));
874 }
875
876 for (server_id, completions) in &self.completion_triggers_per_language_server {
877 operations.push(proto::serialize_operation(
878 &Operation::UpdateCompletionTriggers {
879 triggers: completions.iter().cloned().collect(),
880 lamport_timestamp: self.completion_triggers_timestamp,
881 server_id: *server_id,
882 },
883 ));
884 }
885
886 let text_operations = self.text.operations().clone();
887 cx.background_spawn(async move {
888 let since = since.unwrap_or_default();
889 operations.extend(
890 text_operations
891 .iter()
892 .filter(|(_, op)| !since.observed(op.timestamp()))
893 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
894 );
895 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
896 operations
897 })
898 }
899
900 /// Assign a language to the buffer, returning the buffer.
901 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
902 self.set_language(Some(language), cx);
903 self
904 }
905
906 /// Returns the [`Capability`] of this buffer.
907 pub fn capability(&self) -> Capability {
908 self.capability
909 }
910
911 /// Whether this buffer can only be read.
912 pub fn read_only(&self) -> bool {
913 self.capability == Capability::ReadOnly
914 }
915
916 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
917 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
918 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
919 let snapshot = buffer.snapshot();
920 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
921 Self {
922 saved_mtime,
923 saved_version: buffer.version(),
924 preview_version: buffer.version(),
925 reload_task: None,
926 transaction_depth: 0,
927 was_dirty_before_starting_transaction: None,
928 has_unsaved_edits: Cell::new((buffer.version(), false)),
929 text: buffer,
930 branch_state: None,
931 file,
932 capability,
933 syntax_map,
934 reparse: None,
935 non_text_state_update_count: 0,
936 sync_parse_timeout: Duration::from_millis(1),
937 parse_status: async_watch::channel(ParseStatus::Idle),
938 autoindent_requests: Default::default(),
939 pending_autoindent: Default::default(),
940 language: None,
941 remote_selections: Default::default(),
942 diagnostics: Default::default(),
943 diagnostics_timestamp: Default::default(),
944 completion_triggers: Default::default(),
945 completion_triggers_per_language_server: Default::default(),
946 completion_triggers_timestamp: Default::default(),
947 deferred_ops: OperationQueue::new(),
948 has_conflict: false,
949 change_bits: Default::default(),
950 _subscriptions: Vec::new(),
951 }
952 }
953
954 pub fn build_snapshot(
955 text: Rope,
956 language: Option<Arc<Language>>,
957 language_registry: Option<Arc<LanguageRegistry>>,
958 cx: &mut App,
959 ) -> impl Future<Output = BufferSnapshot> + use<> {
960 let entity_id = cx.reserve_entity::<Self>().entity_id();
961 let buffer_id = entity_id.as_non_zero_u64().into();
962 async move {
963 let text =
964 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
965 let mut syntax = SyntaxMap::new(&text).snapshot();
966 if let Some(language) = language.clone() {
967 let text = text.clone();
968 let language = language.clone();
969 let language_registry = language_registry.clone();
970 syntax.reparse(&text, language_registry, language);
971 }
972 BufferSnapshot {
973 text,
974 syntax,
975 file: None,
976 diagnostics: Default::default(),
977 remote_selections: Default::default(),
978 language,
979 non_text_state_update_count: 0,
980 }
981 }
982 }
983
984 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
985 let entity_id = cx.reserve_entity::<Self>().entity_id();
986 let buffer_id = entity_id.as_non_zero_u64().into();
987 let text =
988 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
989 let syntax = SyntaxMap::new(&text).snapshot();
990 BufferSnapshot {
991 text,
992 syntax,
993 file: None,
994 diagnostics: Default::default(),
995 remote_selections: Default::default(),
996 language: None,
997 non_text_state_update_count: 0,
998 }
999 }
1000
1001 #[cfg(any(test, feature = "test-support"))]
1002 pub fn build_snapshot_sync(
1003 text: Rope,
1004 language: Option<Arc<Language>>,
1005 language_registry: Option<Arc<LanguageRegistry>>,
1006 cx: &mut App,
1007 ) -> BufferSnapshot {
1008 let entity_id = cx.reserve_entity::<Self>().entity_id();
1009 let buffer_id = entity_id.as_non_zero_u64().into();
1010 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1011 let mut syntax = SyntaxMap::new(&text).snapshot();
1012 if let Some(language) = language.clone() {
1013 let text = text.clone();
1014 let language = language.clone();
1015 let language_registry = language_registry.clone();
1016 syntax.reparse(&text, language_registry, language);
1017 }
1018 BufferSnapshot {
1019 text,
1020 syntax,
1021 file: None,
1022 diagnostics: Default::default(),
1023 remote_selections: Default::default(),
1024 language,
1025 non_text_state_update_count: 0,
1026 }
1027 }
1028
1029 /// Retrieve a snapshot of the buffer's current state. This is computationally
1030 /// cheap, and allows reading from the buffer on a background thread.
1031 pub fn snapshot(&self) -> BufferSnapshot {
1032 let text = self.text.snapshot();
1033 let mut syntax_map = self.syntax_map.lock();
1034 syntax_map.interpolate(&text);
1035 let syntax = syntax_map.snapshot();
1036
1037 BufferSnapshot {
1038 text,
1039 syntax,
1040 file: self.file.clone(),
1041 remote_selections: self.remote_selections.clone(),
1042 diagnostics: self.diagnostics.clone(),
1043 language: self.language.clone(),
1044 non_text_state_update_count: self.non_text_state_update_count,
1045 }
1046 }
1047
1048 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1049 let this = cx.entity();
1050 cx.new(|cx| {
1051 let mut branch = Self {
1052 branch_state: Some(BufferBranchState {
1053 base_buffer: this.clone(),
1054 merged_operations: Default::default(),
1055 }),
1056 language: self.language.clone(),
1057 has_conflict: self.has_conflict,
1058 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1059 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1060 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1061 };
1062 if let Some(language_registry) = self.language_registry() {
1063 branch.set_language_registry(language_registry);
1064 }
1065
1066 // Reparse the branch buffer so that we get syntax highlighting immediately.
1067 branch.reparse(cx);
1068
1069 branch
1070 })
1071 }
1072
1073 pub fn preview_edits(
1074 &self,
1075 edits: Arc<[(Range<Anchor>, String)]>,
1076 cx: &App,
1077 ) -> Task<EditPreview> {
1078 let registry = self.language_registry();
1079 let language = self.language().cloned();
1080 let old_snapshot = self.text.snapshot();
1081 let mut branch_buffer = self.text.branch();
1082 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1083 cx.background_spawn(async move {
1084 if !edits.is_empty() {
1085 if let Some(language) = language.clone() {
1086 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1087 }
1088
1089 branch_buffer.edit(edits.iter().cloned());
1090 let snapshot = branch_buffer.snapshot();
1091 syntax_snapshot.interpolate(&snapshot);
1092
1093 if let Some(language) = language {
1094 syntax_snapshot.reparse(&snapshot, registry, language);
1095 }
1096 }
1097 EditPreview {
1098 old_snapshot,
1099 applied_edits_snapshot: branch_buffer.snapshot(),
1100 syntax_snapshot,
1101 }
1102 })
1103 }
1104
1105 /// Applies all of the changes in this buffer that intersect any of the
1106 /// given `ranges` to its base buffer.
1107 ///
1108 /// If `ranges` is empty, then all changes will be applied. This buffer must
1109 /// be a branch buffer to call this method.
1110 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1111 let Some(base_buffer) = self.base_buffer() else {
1112 debug_panic!("not a branch buffer");
1113 return;
1114 };
1115
1116 let mut ranges = if ranges.is_empty() {
1117 &[0..usize::MAX]
1118 } else {
1119 ranges.as_slice()
1120 }
1121 .into_iter()
1122 .peekable();
1123
1124 let mut edits = Vec::new();
1125 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1126 let mut is_included = false;
1127 while let Some(range) = ranges.peek() {
1128 if range.end < edit.new.start {
1129 ranges.next().unwrap();
1130 } else {
1131 if range.start <= edit.new.end {
1132 is_included = true;
1133 }
1134 break;
1135 }
1136 }
1137
1138 if is_included {
1139 edits.push((
1140 edit.old.clone(),
1141 self.text_for_range(edit.new.clone()).collect::<String>(),
1142 ));
1143 }
1144 }
1145
1146 let operation = base_buffer.update(cx, |base_buffer, cx| {
1147 // cx.emit(BufferEvent::DiffBaseChanged);
1148 base_buffer.edit(edits, None, cx)
1149 });
1150
1151 if let Some(operation) = operation {
1152 if let Some(BufferBranchState {
1153 merged_operations, ..
1154 }) = &mut self.branch_state
1155 {
1156 merged_operations.push(operation);
1157 }
1158 }
1159 }
1160
1161 fn on_base_buffer_event(
1162 &mut self,
1163 _: Entity<Buffer>,
1164 event: &BufferEvent,
1165 cx: &mut Context<Self>,
1166 ) {
1167 let BufferEvent::Operation { operation, .. } = event else {
1168 return;
1169 };
1170 let Some(BufferBranchState {
1171 merged_operations, ..
1172 }) = &mut self.branch_state
1173 else {
1174 return;
1175 };
1176
1177 let mut operation_to_undo = None;
1178 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1179 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1180 merged_operations.remove(ix);
1181 operation_to_undo = Some(operation.timestamp);
1182 }
1183 }
1184
1185 self.apply_ops([operation.clone()], cx);
1186
1187 if let Some(timestamp) = operation_to_undo {
1188 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1189 self.undo_operations(counts, cx);
1190 }
1191 }
1192
1193 #[cfg(test)]
1194 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1195 &self.text
1196 }
1197
1198 /// Retrieve a snapshot of the buffer's raw text, without any
1199 /// language-related state like the syntax tree or diagnostics.
1200 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1201 self.text.snapshot()
1202 }
1203
1204 /// The file associated with the buffer, if any.
1205 pub fn file(&self) -> Option<&Arc<dyn File>> {
1206 self.file.as_ref()
1207 }
1208
1209 /// The version of the buffer that was last saved or reloaded from disk.
1210 pub fn saved_version(&self) -> &clock::Global {
1211 &self.saved_version
1212 }
1213
1214 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1215 pub fn saved_mtime(&self) -> Option<MTime> {
1216 self.saved_mtime
1217 }
1218
1219 /// Assign a language to the buffer.
1220 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1221 self.non_text_state_update_count += 1;
1222 self.syntax_map.lock().clear(&self.text);
1223 self.language = language;
1224 self.was_changed();
1225 self.reparse(cx);
1226 cx.emit(BufferEvent::LanguageChanged);
1227 }
1228
1229 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1230 /// other languages if parts of the buffer are written in different languages.
1231 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1232 self.syntax_map
1233 .lock()
1234 .set_language_registry(language_registry);
1235 }
1236
1237 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1238 self.syntax_map.lock().language_registry()
1239 }
1240
1241 /// Assign the buffer a new [`Capability`].
1242 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1243 self.capability = capability;
1244 cx.emit(BufferEvent::CapabilityChanged)
1245 }
1246
1247 /// This method is called to signal that the buffer has been saved.
1248 pub fn did_save(
1249 &mut self,
1250 version: clock::Global,
1251 mtime: Option<MTime>,
1252 cx: &mut Context<Self>,
1253 ) {
1254 self.saved_version = version;
1255 self.has_unsaved_edits
1256 .set((self.saved_version().clone(), false));
1257 self.has_conflict = false;
1258 self.saved_mtime = mtime;
1259 self.was_changed();
1260 cx.emit(BufferEvent::Saved);
1261 cx.notify();
1262 }
1263
1264 /// This method is called to signal that the buffer has been discarded.
1265 pub fn discarded(&self, cx: &mut Context<Self>) {
1266 cx.emit(BufferEvent::Discarded);
1267 cx.notify();
1268 }
1269
1270 /// Reloads the contents of the buffer from disk.
1271 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1272 let (tx, rx) = futures::channel::oneshot::channel();
1273 let prev_version = self.text.version();
1274 self.reload_task = Some(cx.spawn(async move |this, cx| {
1275 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1276 let file = this.file.as_ref()?.as_local()?;
1277
1278 Some((file.disk_state().mtime(), file.load(cx)))
1279 })?
1280 else {
1281 return Ok(());
1282 };
1283
1284 let new_text = new_text.await?;
1285 let diff = this
1286 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1287 .await;
1288 this.update(cx, |this, cx| {
1289 if this.version() == diff.base_version {
1290 this.finalize_last_transaction();
1291 this.apply_diff(diff, cx);
1292 tx.send(this.finalize_last_transaction().cloned()).ok();
1293 this.has_conflict = false;
1294 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1295 } else {
1296 if !diff.edits.is_empty()
1297 || this
1298 .edits_since::<usize>(&diff.base_version)
1299 .next()
1300 .is_some()
1301 {
1302 this.has_conflict = true;
1303 }
1304
1305 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1306 }
1307
1308 this.reload_task.take();
1309 })
1310 }));
1311 rx
1312 }
1313
1314 /// This method is called to signal that the buffer has been reloaded.
1315 pub fn did_reload(
1316 &mut self,
1317 version: clock::Global,
1318 line_ending: LineEnding,
1319 mtime: Option<MTime>,
1320 cx: &mut Context<Self>,
1321 ) {
1322 self.saved_version = version;
1323 self.has_unsaved_edits
1324 .set((self.saved_version.clone(), false));
1325 self.text.set_line_ending(line_ending);
1326 self.saved_mtime = mtime;
1327 cx.emit(BufferEvent::Reloaded);
1328 cx.notify();
1329 }
1330
1331 /// Updates the [`File`] backing this buffer. This should be called when
1332 /// the file has changed or has been deleted.
1333 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1334 let was_dirty = self.is_dirty();
1335 let mut file_changed = false;
1336
1337 if let Some(old_file) = self.file.as_ref() {
1338 if new_file.path() != old_file.path() {
1339 file_changed = true;
1340 }
1341
1342 let old_state = old_file.disk_state();
1343 let new_state = new_file.disk_state();
1344 if old_state != new_state {
1345 file_changed = true;
1346 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1347 cx.emit(BufferEvent::ReloadNeeded)
1348 }
1349 }
1350 } else {
1351 file_changed = true;
1352 };
1353
1354 self.file = Some(new_file);
1355 if file_changed {
1356 self.was_changed();
1357 self.non_text_state_update_count += 1;
1358 if was_dirty != self.is_dirty() {
1359 cx.emit(BufferEvent::DirtyChanged);
1360 }
1361 cx.emit(BufferEvent::FileHandleChanged);
1362 cx.notify();
1363 }
1364 }
1365
1366 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1367 Some(self.branch_state.as_ref()?.base_buffer.clone())
1368 }
1369
1370 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1371 pub fn language(&self) -> Option<&Arc<Language>> {
1372 self.language.as_ref()
1373 }
1374
1375 /// Returns the [`Language`] at the given location.
1376 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1377 let offset = position.to_offset(self);
1378 self.syntax_map
1379 .lock()
1380 .layers_for_range(offset..offset, &self.text, false)
1381 .last()
1382 .map(|info| info.language.clone())
1383 .or_else(|| self.language.clone())
1384 }
1385
1386 /// Returns each [`Language`] for the active syntax layers at the given location.
1387 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1388 let offset = position.to_offset(self);
1389 let mut languages: Vec<Arc<Language>> = self
1390 .syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .map(|info| info.language.clone())
1394 .collect();
1395
1396 if languages.is_empty() {
1397 if let Some(buffer_language) = self.language() {
1398 languages.push(buffer_language.clone());
1399 }
1400 }
1401
1402 languages
1403 }
1404
1405 /// An integer version number that accounts for all updates besides
1406 /// the buffer's text itself (which is versioned via a version vector).
1407 pub fn non_text_state_update_count(&self) -> usize {
1408 self.non_text_state_update_count
1409 }
1410
1411 /// Whether the buffer is being parsed in the background.
1412 #[cfg(any(test, feature = "test-support"))]
1413 pub fn is_parsing(&self) -> bool {
1414 self.reparse.is_some()
1415 }
1416
1417 /// Indicates whether the buffer contains any regions that may be
1418 /// written in a language that hasn't been loaded yet.
1419 pub fn contains_unknown_injections(&self) -> bool {
1420 self.syntax_map.lock().contains_unknown_injections()
1421 }
1422
1423 #[cfg(test)]
1424 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1425 self.sync_parse_timeout = timeout;
1426 }
1427
1428 /// Called after an edit to synchronize the buffer's main parse tree with
1429 /// the buffer's new underlying state.
1430 ///
1431 /// Locks the syntax map and interpolates the edits since the last reparse
1432 /// into the foreground syntax tree.
1433 ///
1434 /// Then takes a stable snapshot of the syntax map before unlocking it.
1435 /// The snapshot with the interpolated edits is sent to a background thread,
1436 /// where we ask Tree-sitter to perform an incremental parse.
1437 ///
1438 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1439 /// waiting on the parse to complete. As soon as it completes, we proceed
1440 /// synchronously, unless a 1ms timeout elapses.
1441 ///
1442 /// If we time out waiting on the parse, we spawn a second task waiting
1443 /// until the parse does complete and return with the interpolated tree still
1444 /// in the foreground. When the background parse completes, call back into
1445 /// the main thread and assign the foreground parse state.
1446 ///
1447 /// If the buffer or grammar changed since the start of the background parse,
1448 /// initiate an additional reparse recursively. To avoid concurrent parses
1449 /// for the same buffer, we only initiate a new parse if we are not already
1450 /// parsing in the background.
1451 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1452 if self.reparse.is_some() {
1453 return;
1454 }
1455 let language = if let Some(language) = self.language.clone() {
1456 language
1457 } else {
1458 return;
1459 };
1460
1461 let text = self.text_snapshot();
1462 let parsed_version = self.version();
1463
1464 let mut syntax_map = self.syntax_map.lock();
1465 syntax_map.interpolate(&text);
1466 let language_registry = syntax_map.language_registry();
1467 let mut syntax_snapshot = syntax_map.snapshot();
1468 drop(syntax_map);
1469
1470 let parse_task = cx.background_spawn({
1471 let language = language.clone();
1472 let language_registry = language_registry.clone();
1473 async move {
1474 syntax_snapshot.reparse(&text, language_registry, language);
1475 syntax_snapshot
1476 }
1477 });
1478
1479 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1480 match cx
1481 .background_executor()
1482 .block_with_timeout(self.sync_parse_timeout, parse_task)
1483 {
1484 Ok(new_syntax_snapshot) => {
1485 self.did_finish_parsing(new_syntax_snapshot, cx);
1486 self.reparse = None;
1487 }
1488 Err(parse_task) => {
1489 self.reparse = Some(cx.spawn(async move |this, cx| {
1490 let new_syntax_map = parse_task.await;
1491 this.update(cx, move |this, cx| {
1492 let grammar_changed =
1493 this.language.as_ref().map_or(true, |current_language| {
1494 !Arc::ptr_eq(&language, current_language)
1495 });
1496 let language_registry_changed = new_syntax_map
1497 .contains_unknown_injections()
1498 && language_registry.map_or(false, |registry| {
1499 registry.version() != new_syntax_map.language_registry_version()
1500 });
1501 let parse_again = language_registry_changed
1502 || grammar_changed
1503 || this.version.changed_since(&parsed_version);
1504 this.did_finish_parsing(new_syntax_map, cx);
1505 this.reparse = None;
1506 if parse_again {
1507 this.reparse(cx);
1508 }
1509 })
1510 .ok();
1511 }));
1512 }
1513 }
1514 }
1515
1516 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1517 self.was_changed();
1518 self.non_text_state_update_count += 1;
1519 self.syntax_map.lock().did_parse(syntax_snapshot);
1520 self.request_autoindent(cx);
1521 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1522 cx.emit(BufferEvent::Reparsed);
1523 cx.notify();
1524 }
1525
1526 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1527 self.parse_status.1.clone()
1528 }
1529
1530 /// Assign to the buffer a set of diagnostics created by a given language server.
1531 pub fn update_diagnostics(
1532 &mut self,
1533 server_id: LanguageServerId,
1534 diagnostics: DiagnosticSet,
1535 cx: &mut Context<Self>,
1536 ) {
1537 let lamport_timestamp = self.text.lamport_clock.tick();
1538 let op = Operation::UpdateDiagnostics {
1539 server_id,
1540 diagnostics: diagnostics.iter().cloned().collect(),
1541 lamport_timestamp,
1542 };
1543 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1544 self.send_operation(op, true, cx);
1545 }
1546
1547 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1548 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1549 return None;
1550 };
1551 Some(&self.diagnostics[idx].1)
1552 }
1553
1554 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1555 if let Some(indent_sizes) = self.compute_autoindents() {
1556 let indent_sizes = cx.background_spawn(indent_sizes);
1557 match cx
1558 .background_executor()
1559 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1560 {
1561 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1562 Err(indent_sizes) => {
1563 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1564 let indent_sizes = indent_sizes.await;
1565 this.update(cx, |this, cx| {
1566 this.apply_autoindents(indent_sizes, cx);
1567 })
1568 .ok();
1569 }));
1570 }
1571 }
1572 } else {
1573 self.autoindent_requests.clear();
1574 }
1575 }
1576
1577 fn compute_autoindents(
1578 &self,
1579 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1580 let max_rows_between_yields = 100;
1581 let snapshot = self.snapshot();
1582 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1583 return None;
1584 }
1585
1586 let autoindent_requests = self.autoindent_requests.clone();
1587 Some(async move {
1588 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1589 for request in autoindent_requests {
1590 // Resolve each edited range to its row in the current buffer and in the
1591 // buffer before this batch of edits.
1592 let mut row_ranges = Vec::new();
1593 let mut old_to_new_rows = BTreeMap::new();
1594 let mut language_indent_sizes_by_new_row = Vec::new();
1595 for entry in &request.entries {
1596 let position = entry.range.start;
1597 let new_row = position.to_point(&snapshot).row;
1598 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1599 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1600
1601 if !entry.first_line_is_new {
1602 let old_row = position.to_point(&request.before_edit).row;
1603 old_to_new_rows.insert(old_row, new_row);
1604 }
1605 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1606 }
1607
1608 // Build a map containing the suggested indentation for each of the edited lines
1609 // with respect to the state of the buffer before these edits. This map is keyed
1610 // by the rows for these lines in the current state of the buffer.
1611 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1612 let old_edited_ranges =
1613 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1614 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1615 let mut language_indent_size = IndentSize::default();
1616 for old_edited_range in old_edited_ranges {
1617 let suggestions = request
1618 .before_edit
1619 .suggest_autoindents(old_edited_range.clone())
1620 .into_iter()
1621 .flatten();
1622 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1623 if let Some(suggestion) = suggestion {
1624 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1625
1626 // Find the indent size based on the language for this row.
1627 while let Some((row, size)) = language_indent_sizes.peek() {
1628 if *row > new_row {
1629 break;
1630 }
1631 language_indent_size = *size;
1632 language_indent_sizes.next();
1633 }
1634
1635 let suggested_indent = old_to_new_rows
1636 .get(&suggestion.basis_row)
1637 .and_then(|from_row| {
1638 Some(old_suggestions.get(from_row).copied()?.0)
1639 })
1640 .unwrap_or_else(|| {
1641 request
1642 .before_edit
1643 .indent_size_for_line(suggestion.basis_row)
1644 })
1645 .with_delta(suggestion.delta, language_indent_size);
1646 old_suggestions
1647 .insert(new_row, (suggested_indent, suggestion.within_error));
1648 }
1649 }
1650 yield_now().await;
1651 }
1652
1653 // Compute new suggestions for each line, but only include them in the result
1654 // if they differ from the old suggestion for that line.
1655 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1656 let mut language_indent_size = IndentSize::default();
1657 for (row_range, original_indent_column) in row_ranges {
1658 let new_edited_row_range = if request.is_block_mode {
1659 row_range.start..row_range.start + 1
1660 } else {
1661 row_range.clone()
1662 };
1663
1664 let suggestions = snapshot
1665 .suggest_autoindents(new_edited_row_range.clone())
1666 .into_iter()
1667 .flatten();
1668 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1669 if let Some(suggestion) = suggestion {
1670 // Find the indent size based on the language for this row.
1671 while let Some((row, size)) = language_indent_sizes.peek() {
1672 if *row > new_row {
1673 break;
1674 }
1675 language_indent_size = *size;
1676 language_indent_sizes.next();
1677 }
1678
1679 let suggested_indent = indent_sizes
1680 .get(&suggestion.basis_row)
1681 .copied()
1682 .map(|e| e.0)
1683 .unwrap_or_else(|| {
1684 snapshot.indent_size_for_line(suggestion.basis_row)
1685 })
1686 .with_delta(suggestion.delta, language_indent_size);
1687
1688 if old_suggestions.get(&new_row).map_or(
1689 true,
1690 |(old_indentation, was_within_error)| {
1691 suggested_indent != *old_indentation
1692 && (!suggestion.within_error || *was_within_error)
1693 },
1694 ) {
1695 indent_sizes.insert(
1696 new_row,
1697 (suggested_indent, request.ignore_empty_lines),
1698 );
1699 }
1700 }
1701 }
1702
1703 if let (true, Some(original_indent_column)) =
1704 (request.is_block_mode, original_indent_column)
1705 {
1706 let new_indent =
1707 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1708 *indent
1709 } else {
1710 snapshot.indent_size_for_line(row_range.start)
1711 };
1712 let delta = new_indent.len as i64 - original_indent_column as i64;
1713 if delta != 0 {
1714 for row in row_range.skip(1) {
1715 indent_sizes.entry(row).or_insert_with(|| {
1716 let mut size = snapshot.indent_size_for_line(row);
1717 if size.kind == new_indent.kind {
1718 match delta.cmp(&0) {
1719 Ordering::Greater => size.len += delta as u32,
1720 Ordering::Less => {
1721 size.len = size.len.saturating_sub(-delta as u32)
1722 }
1723 Ordering::Equal => {}
1724 }
1725 }
1726 (size, request.ignore_empty_lines)
1727 });
1728 }
1729 }
1730 }
1731
1732 yield_now().await;
1733 }
1734 }
1735
1736 indent_sizes
1737 .into_iter()
1738 .filter_map(|(row, (indent, ignore_empty_lines))| {
1739 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1740 None
1741 } else {
1742 Some((row, indent))
1743 }
1744 })
1745 .collect()
1746 })
1747 }
1748
1749 fn apply_autoindents(
1750 &mut self,
1751 indent_sizes: BTreeMap<u32, IndentSize>,
1752 cx: &mut Context<Self>,
1753 ) {
1754 self.autoindent_requests.clear();
1755
1756 let edits: Vec<_> = indent_sizes
1757 .into_iter()
1758 .filter_map(|(row, indent_size)| {
1759 let current_size = indent_size_for_line(self, row);
1760 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1761 })
1762 .collect();
1763
1764 let preserve_preview = self.preserve_preview();
1765 self.edit(edits, None, cx);
1766 if preserve_preview {
1767 self.refresh_preview();
1768 }
1769 }
1770
1771 /// Create a minimal edit that will cause the given row to be indented
1772 /// with the given size. After applying this edit, the length of the line
1773 /// will always be at least `new_size.len`.
1774 pub fn edit_for_indent_size_adjustment(
1775 row: u32,
1776 current_size: IndentSize,
1777 new_size: IndentSize,
1778 ) -> Option<(Range<Point>, String)> {
1779 if new_size.kind == current_size.kind {
1780 match new_size.len.cmp(¤t_size.len) {
1781 Ordering::Greater => {
1782 let point = Point::new(row, 0);
1783 Some((
1784 point..point,
1785 iter::repeat(new_size.char())
1786 .take((new_size.len - current_size.len) as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790
1791 Ordering::Less => Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1793 String::new(),
1794 )),
1795
1796 Ordering::Equal => None,
1797 }
1798 } else {
1799 Some((
1800 Point::new(row, 0)..Point::new(row, current_size.len),
1801 iter::repeat(new_size.char())
1802 .take(new_size.len as usize)
1803 .collect::<String>(),
1804 ))
1805 }
1806 }
1807
1808 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1809 /// and the given new text.
1810 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1811 let old_text = self.as_rope().clone();
1812 let base_version = self.version();
1813 cx.background_executor()
1814 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1815 let old_text = old_text.to_string();
1816 let line_ending = LineEnding::detect(&new_text);
1817 LineEnding::normalize(&mut new_text);
1818 let edits = text_diff(&old_text, &new_text);
1819 Diff {
1820 base_version,
1821 line_ending,
1822 edits,
1823 }
1824 })
1825 }
1826
1827 /// Spawns a background task that searches the buffer for any whitespace
1828 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1829 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1830 let old_text = self.as_rope().clone();
1831 let line_ending = self.line_ending();
1832 let base_version = self.version();
1833 cx.background_spawn(async move {
1834 let ranges = trailing_whitespace_ranges(&old_text);
1835 let empty = Arc::<str>::from("");
1836 Diff {
1837 base_version,
1838 line_ending,
1839 edits: ranges
1840 .into_iter()
1841 .map(|range| (range, empty.clone()))
1842 .collect(),
1843 }
1844 })
1845 }
1846
1847 /// Ensures that the buffer ends with a single newline character, and
1848 /// no other whitespace.
1849 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1850 let len = self.len();
1851 let mut offset = len;
1852 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1853 let non_whitespace_len = chunk
1854 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1855 .len();
1856 offset -= chunk.len();
1857 offset += non_whitespace_len;
1858 if non_whitespace_len != 0 {
1859 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1860 return;
1861 }
1862 break;
1863 }
1864 }
1865 self.edit([(offset..len, "\n")], None, cx);
1866 }
1867
1868 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1869 /// calculated, then adjust the diff to account for those changes, and discard any
1870 /// parts of the diff that conflict with those changes.
1871 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1872 let snapshot = self.snapshot();
1873 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1874 let mut delta = 0;
1875 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1876 while let Some(edit_since) = edits_since.peek() {
1877 // If the edit occurs after a diff hunk, then it does not
1878 // affect that hunk.
1879 if edit_since.old.start > range.end {
1880 break;
1881 }
1882 // If the edit precedes the diff hunk, then adjust the hunk
1883 // to reflect the edit.
1884 else if edit_since.old.end < range.start {
1885 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1886 edits_since.next();
1887 }
1888 // If the edit intersects a diff hunk, then discard that hunk.
1889 else {
1890 return None;
1891 }
1892 }
1893
1894 let start = (range.start as i64 + delta) as usize;
1895 let end = (range.end as i64 + delta) as usize;
1896 Some((start..end, new_text))
1897 });
1898
1899 self.start_transaction();
1900 self.text.set_line_ending(diff.line_ending);
1901 self.edit(adjusted_edits, None, cx);
1902 self.end_transaction(cx)
1903 }
1904
1905 fn has_unsaved_edits(&self) -> bool {
1906 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1907
1908 if last_version == self.version {
1909 self.has_unsaved_edits
1910 .set((last_version, has_unsaved_edits));
1911 return has_unsaved_edits;
1912 }
1913
1914 let has_edits = self.has_edits_since(&self.saved_version);
1915 self.has_unsaved_edits
1916 .set((self.version.clone(), has_edits));
1917 has_edits
1918 }
1919
1920 /// Checks if the buffer has unsaved changes.
1921 pub fn is_dirty(&self) -> bool {
1922 if self.capability == Capability::ReadOnly {
1923 return false;
1924 }
1925 if self.has_conflict {
1926 return true;
1927 }
1928 match self.file.as_ref().map(|f| f.disk_state()) {
1929 Some(DiskState::New) | Some(DiskState::Deleted) => {
1930 !self.is_empty() && self.has_unsaved_edits()
1931 }
1932 _ => self.has_unsaved_edits(),
1933 }
1934 }
1935
1936 /// Checks if the buffer and its file have both changed since the buffer
1937 /// was last saved or reloaded.
1938 pub fn has_conflict(&self) -> bool {
1939 if self.has_conflict {
1940 return true;
1941 }
1942 let Some(file) = self.file.as_ref() else {
1943 return false;
1944 };
1945 match file.disk_state() {
1946 DiskState::New => false,
1947 DiskState::Present { mtime } => match self.saved_mtime {
1948 Some(saved_mtime) => {
1949 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1950 }
1951 None => true,
1952 },
1953 DiskState::Deleted => false,
1954 }
1955 }
1956
1957 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1958 pub fn subscribe(&mut self) -> Subscription {
1959 self.text.subscribe()
1960 }
1961
1962 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1963 ///
1964 /// This allows downstream code to check if the buffer's text has changed without
1965 /// waiting for an effect cycle, which would be required if using eents.
1966 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1967 if let Err(ix) = self
1968 .change_bits
1969 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1970 {
1971 self.change_bits.insert(ix, bit);
1972 }
1973 }
1974
1975 fn was_changed(&mut self) {
1976 self.change_bits.retain(|change_bit| {
1977 change_bit.upgrade().map_or(false, |bit| {
1978 bit.replace(true);
1979 true
1980 })
1981 });
1982 }
1983
1984 /// Starts a transaction, if one is not already in-progress. When undoing or
1985 /// redoing edits, all of the edits performed within a transaction are undone
1986 /// or redone together.
1987 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1988 self.start_transaction_at(Instant::now())
1989 }
1990
1991 /// Starts a transaction, providing the current time. Subsequent transactions
1992 /// that occur within a short period of time will be grouped together. This
1993 /// is controlled by the buffer's undo grouping duration.
1994 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1995 self.transaction_depth += 1;
1996 if self.was_dirty_before_starting_transaction.is_none() {
1997 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1998 }
1999 self.text.start_transaction_at(now)
2000 }
2001
2002 /// Terminates the current transaction, if this is the outermost transaction.
2003 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2004 self.end_transaction_at(Instant::now(), cx)
2005 }
2006
2007 /// Terminates the current transaction, providing the current time. Subsequent transactions
2008 /// that occur within a short period of time will be grouped together. This
2009 /// is controlled by the buffer's undo grouping duration.
2010 pub fn end_transaction_at(
2011 &mut self,
2012 now: Instant,
2013 cx: &mut Context<Self>,
2014 ) -> Option<TransactionId> {
2015 assert!(self.transaction_depth > 0);
2016 self.transaction_depth -= 1;
2017 let was_dirty = if self.transaction_depth == 0 {
2018 self.was_dirty_before_starting_transaction.take().unwrap()
2019 } else {
2020 false
2021 };
2022 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2023 self.did_edit(&start_version, was_dirty, cx);
2024 Some(transaction_id)
2025 } else {
2026 None
2027 }
2028 }
2029
2030 /// Manually add a transaction to the buffer's undo history.
2031 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2032 self.text.push_transaction(transaction, now);
2033 }
2034
2035 /// Prevent the last transaction from being grouped with any subsequent transactions,
2036 /// even if they occur with the buffer's undo grouping duration.
2037 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2038 self.text.finalize_last_transaction()
2039 }
2040
2041 /// Manually group all changes since a given transaction.
2042 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2043 self.text.group_until_transaction(transaction_id);
2044 }
2045
2046 /// Manually remove a transaction from the buffer's undo history
2047 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2048 self.text.forget_transaction(transaction_id)
2049 }
2050
2051 /// Retrieve a transaction from the buffer's undo history
2052 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2053 self.text.get_transaction(transaction_id)
2054 }
2055
2056 /// Manually merge two transactions in the buffer's undo history.
2057 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2058 self.text.merge_transactions(transaction, destination);
2059 }
2060
2061 /// Waits for the buffer to receive operations with the given timestamps.
2062 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2063 &mut self,
2064 edit_ids: It,
2065 ) -> impl Future<Output = Result<()>> + use<It> {
2066 self.text.wait_for_edits(edit_ids)
2067 }
2068
2069 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2070 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2071 &mut self,
2072 anchors: It,
2073 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2074 self.text.wait_for_anchors(anchors)
2075 }
2076
2077 /// Waits for the buffer to receive operations up to the given version.
2078 pub fn wait_for_version(
2079 &mut self,
2080 version: clock::Global,
2081 ) -> impl Future<Output = Result<()>> + use<> {
2082 self.text.wait_for_version(version)
2083 }
2084
2085 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2086 /// [`Buffer::wait_for_version`] to resolve with an error.
2087 pub fn give_up_waiting(&mut self) {
2088 self.text.give_up_waiting();
2089 }
2090
2091 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2092 pub fn set_active_selections(
2093 &mut self,
2094 selections: Arc<[Selection<Anchor>]>,
2095 line_mode: bool,
2096 cursor_shape: CursorShape,
2097 cx: &mut Context<Self>,
2098 ) {
2099 let lamport_timestamp = self.text.lamport_clock.tick();
2100 self.remote_selections.insert(
2101 self.text.replica_id(),
2102 SelectionSet {
2103 selections: selections.clone(),
2104 lamport_timestamp,
2105 line_mode,
2106 cursor_shape,
2107 },
2108 );
2109 self.send_operation(
2110 Operation::UpdateSelections {
2111 selections,
2112 line_mode,
2113 lamport_timestamp,
2114 cursor_shape,
2115 },
2116 true,
2117 cx,
2118 );
2119 self.non_text_state_update_count += 1;
2120 cx.notify();
2121 }
2122
2123 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2124 /// this replica.
2125 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2126 if self
2127 .remote_selections
2128 .get(&self.text.replica_id())
2129 .map_or(true, |set| !set.selections.is_empty())
2130 {
2131 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2132 }
2133 }
2134
2135 /// Replaces the buffer's entire text.
2136 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2137 where
2138 T: Into<Arc<str>>,
2139 {
2140 self.autoindent_requests.clear();
2141 self.edit([(0..self.len(), text)], None, cx)
2142 }
2143
2144 /// Appends the given text to the end of the buffer.
2145 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2146 where
2147 T: Into<Arc<str>>,
2148 {
2149 self.edit([(self.len()..self.len(), text)], None, cx)
2150 }
2151
2152 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2153 /// delete, and a string of text to insert at that location.
2154 ///
2155 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2156 /// request for the edited ranges, which will be processed when the buffer finishes
2157 /// parsing.
2158 ///
2159 /// Parsing takes place at the end of a transaction, and may compute synchronously
2160 /// or asynchronously, depending on the changes.
2161 pub fn edit<I, S, T>(
2162 &mut self,
2163 edits_iter: I,
2164 autoindent_mode: Option<AutoindentMode>,
2165 cx: &mut Context<Self>,
2166 ) -> Option<clock::Lamport>
2167 where
2168 I: IntoIterator<Item = (Range<S>, T)>,
2169 S: ToOffset,
2170 T: Into<Arc<str>>,
2171 {
2172 // Skip invalid edits and coalesce contiguous ones.
2173 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2174
2175 for (range, new_text) in edits_iter {
2176 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2177
2178 if range.start > range.end {
2179 mem::swap(&mut range.start, &mut range.end);
2180 }
2181 let new_text = new_text.into();
2182 if !new_text.is_empty() || !range.is_empty() {
2183 if let Some((prev_range, prev_text)) = edits.last_mut() {
2184 if prev_range.end >= range.start {
2185 prev_range.end = cmp::max(prev_range.end, range.end);
2186 *prev_text = format!("{prev_text}{new_text}").into();
2187 } else {
2188 edits.push((range, new_text));
2189 }
2190 } else {
2191 edits.push((range, new_text));
2192 }
2193 }
2194 }
2195 if edits.is_empty() {
2196 return None;
2197 }
2198
2199 self.start_transaction();
2200 self.pending_autoindent.take();
2201 let autoindent_request = autoindent_mode
2202 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2203
2204 let edit_operation = self.text.edit(edits.iter().cloned());
2205 let edit_id = edit_operation.timestamp();
2206
2207 if let Some((before_edit, mode)) = autoindent_request {
2208 let mut delta = 0isize;
2209 let entries = edits
2210 .into_iter()
2211 .enumerate()
2212 .zip(&edit_operation.as_edit().unwrap().new_text)
2213 .map(|((ix, (range, _)), new_text)| {
2214 let new_text_length = new_text.len();
2215 let old_start = range.start.to_point(&before_edit);
2216 let new_start = (delta + range.start as isize) as usize;
2217 let range_len = range.end - range.start;
2218 delta += new_text_length as isize - range_len as isize;
2219
2220 // Decide what range of the insertion to auto-indent, and whether
2221 // the first line of the insertion should be considered a newly-inserted line
2222 // or an edit to an existing line.
2223 let mut range_of_insertion_to_indent = 0..new_text_length;
2224 let mut first_line_is_new = true;
2225
2226 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2227 let old_line_end = before_edit.line_len(old_start.row);
2228
2229 if old_start.column > old_line_start {
2230 first_line_is_new = false;
2231 }
2232
2233 if !new_text.contains('\n')
2234 && (old_start.column + (range_len as u32) < old_line_end
2235 || old_line_end == old_line_start)
2236 {
2237 first_line_is_new = false;
2238 }
2239
2240 // When inserting text starting with a newline, avoid auto-indenting the
2241 // previous line.
2242 if new_text.starts_with('\n') {
2243 range_of_insertion_to_indent.start += 1;
2244 first_line_is_new = true;
2245 }
2246
2247 let mut original_indent_column = None;
2248 if let AutoindentMode::Block {
2249 original_indent_columns,
2250 } = &mode
2251 {
2252 original_indent_column = Some(if new_text.starts_with('\n') {
2253 indent_size_for_text(
2254 new_text[range_of_insertion_to_indent.clone()].chars(),
2255 )
2256 .len
2257 } else {
2258 original_indent_columns
2259 .get(ix)
2260 .copied()
2261 .flatten()
2262 .unwrap_or_else(|| {
2263 indent_size_for_text(
2264 new_text[range_of_insertion_to_indent.clone()].chars(),
2265 )
2266 .len
2267 })
2268 });
2269
2270 // Avoid auto-indenting the line after the edit.
2271 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2272 range_of_insertion_to_indent.end -= 1;
2273 }
2274 }
2275
2276 AutoindentRequestEntry {
2277 first_line_is_new,
2278 original_indent_column,
2279 indent_size: before_edit.language_indent_size_at(range.start, cx),
2280 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2281 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2282 }
2283 })
2284 .collect();
2285
2286 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2287 before_edit,
2288 entries,
2289 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2290 ignore_empty_lines: false,
2291 }));
2292 }
2293
2294 self.end_transaction(cx);
2295 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2296 Some(edit_id)
2297 }
2298
2299 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2300 self.was_changed();
2301
2302 if self.edits_since::<usize>(old_version).next().is_none() {
2303 return;
2304 }
2305
2306 self.reparse(cx);
2307 cx.emit(BufferEvent::Edited);
2308 if was_dirty != self.is_dirty() {
2309 cx.emit(BufferEvent::DirtyChanged);
2310 }
2311 cx.notify();
2312 }
2313
2314 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2315 where
2316 I: IntoIterator<Item = Range<T>>,
2317 T: ToOffset + Copy,
2318 {
2319 let before_edit = self.snapshot();
2320 let entries = ranges
2321 .into_iter()
2322 .map(|range| AutoindentRequestEntry {
2323 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2324 first_line_is_new: true,
2325 indent_size: before_edit.language_indent_size_at(range.start, cx),
2326 original_indent_column: None,
2327 })
2328 .collect();
2329 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2330 before_edit,
2331 entries,
2332 is_block_mode: false,
2333 ignore_empty_lines: true,
2334 }));
2335 self.request_autoindent(cx);
2336 }
2337
2338 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2339 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2340 pub fn insert_empty_line(
2341 &mut self,
2342 position: impl ToPoint,
2343 space_above: bool,
2344 space_below: bool,
2345 cx: &mut Context<Self>,
2346 ) -> Point {
2347 let mut position = position.to_point(self);
2348
2349 self.start_transaction();
2350
2351 self.edit(
2352 [(position..position, "\n")],
2353 Some(AutoindentMode::EachLine),
2354 cx,
2355 );
2356
2357 if position.column > 0 {
2358 position += Point::new(1, 0);
2359 }
2360
2361 if !self.is_line_blank(position.row) {
2362 self.edit(
2363 [(position..position, "\n")],
2364 Some(AutoindentMode::EachLine),
2365 cx,
2366 );
2367 }
2368
2369 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2370 self.edit(
2371 [(position..position, "\n")],
2372 Some(AutoindentMode::EachLine),
2373 cx,
2374 );
2375 position.row += 1;
2376 }
2377
2378 if space_below
2379 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2380 {
2381 self.edit(
2382 [(position..position, "\n")],
2383 Some(AutoindentMode::EachLine),
2384 cx,
2385 );
2386 }
2387
2388 self.end_transaction(cx);
2389
2390 position
2391 }
2392
2393 /// Applies the given remote operations to the buffer.
2394 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2395 self.pending_autoindent.take();
2396 let was_dirty = self.is_dirty();
2397 let old_version = self.version.clone();
2398 let mut deferred_ops = Vec::new();
2399 let buffer_ops = ops
2400 .into_iter()
2401 .filter_map(|op| match op {
2402 Operation::Buffer(op) => Some(op),
2403 _ => {
2404 if self.can_apply_op(&op) {
2405 self.apply_op(op, cx);
2406 } else {
2407 deferred_ops.push(op);
2408 }
2409 None
2410 }
2411 })
2412 .collect::<Vec<_>>();
2413 for operation in buffer_ops.iter() {
2414 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2415 }
2416 self.text.apply_ops(buffer_ops);
2417 self.deferred_ops.insert(deferred_ops);
2418 self.flush_deferred_ops(cx);
2419 self.did_edit(&old_version, was_dirty, cx);
2420 // Notify independently of whether the buffer was edited as the operations could include a
2421 // selection update.
2422 cx.notify();
2423 }
2424
2425 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2426 let mut deferred_ops = Vec::new();
2427 for op in self.deferred_ops.drain().iter().cloned() {
2428 if self.can_apply_op(&op) {
2429 self.apply_op(op, cx);
2430 } else {
2431 deferred_ops.push(op);
2432 }
2433 }
2434 self.deferred_ops.insert(deferred_ops);
2435 }
2436
2437 pub fn has_deferred_ops(&self) -> bool {
2438 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2439 }
2440
2441 fn can_apply_op(&self, operation: &Operation) -> bool {
2442 match operation {
2443 Operation::Buffer(_) => {
2444 unreachable!("buffer operations should never be applied at this layer")
2445 }
2446 Operation::UpdateDiagnostics {
2447 diagnostics: diagnostic_set,
2448 ..
2449 } => diagnostic_set.iter().all(|diagnostic| {
2450 self.text.can_resolve(&diagnostic.range.start)
2451 && self.text.can_resolve(&diagnostic.range.end)
2452 }),
2453 Operation::UpdateSelections { selections, .. } => selections
2454 .iter()
2455 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2456 Operation::UpdateCompletionTriggers { .. } => true,
2457 }
2458 }
2459
2460 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2461 match operation {
2462 Operation::Buffer(_) => {
2463 unreachable!("buffer operations should never be applied at this layer")
2464 }
2465 Operation::UpdateDiagnostics {
2466 server_id,
2467 diagnostics: diagnostic_set,
2468 lamport_timestamp,
2469 } => {
2470 let snapshot = self.snapshot();
2471 self.apply_diagnostic_update(
2472 server_id,
2473 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2474 lamport_timestamp,
2475 cx,
2476 );
2477 }
2478 Operation::UpdateSelections {
2479 selections,
2480 lamport_timestamp,
2481 line_mode,
2482 cursor_shape,
2483 } => {
2484 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2485 if set.lamport_timestamp > lamport_timestamp {
2486 return;
2487 }
2488 }
2489
2490 self.remote_selections.insert(
2491 lamport_timestamp.replica_id,
2492 SelectionSet {
2493 selections,
2494 lamport_timestamp,
2495 line_mode,
2496 cursor_shape,
2497 },
2498 );
2499 self.text.lamport_clock.observe(lamport_timestamp);
2500 self.non_text_state_update_count += 1;
2501 }
2502 Operation::UpdateCompletionTriggers {
2503 triggers,
2504 lamport_timestamp,
2505 server_id,
2506 } => {
2507 if triggers.is_empty() {
2508 self.completion_triggers_per_language_server
2509 .remove(&server_id);
2510 self.completion_triggers = self
2511 .completion_triggers_per_language_server
2512 .values()
2513 .flat_map(|triggers| triggers.into_iter().cloned())
2514 .collect();
2515 } else {
2516 self.completion_triggers_per_language_server
2517 .insert(server_id, triggers.iter().cloned().collect());
2518 self.completion_triggers.extend(triggers);
2519 }
2520 self.text.lamport_clock.observe(lamport_timestamp);
2521 }
2522 }
2523 }
2524
2525 fn apply_diagnostic_update(
2526 &mut self,
2527 server_id: LanguageServerId,
2528 diagnostics: DiagnosticSet,
2529 lamport_timestamp: clock::Lamport,
2530 cx: &mut Context<Self>,
2531 ) {
2532 if lamport_timestamp > self.diagnostics_timestamp {
2533 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2534 if diagnostics.is_empty() {
2535 if let Ok(ix) = ix {
2536 self.diagnostics.remove(ix);
2537 }
2538 } else {
2539 match ix {
2540 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2541 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2542 };
2543 }
2544 self.diagnostics_timestamp = lamport_timestamp;
2545 self.non_text_state_update_count += 1;
2546 self.text.lamport_clock.observe(lamport_timestamp);
2547 cx.notify();
2548 cx.emit(BufferEvent::DiagnosticsUpdated);
2549 }
2550 }
2551
2552 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2553 self.was_changed();
2554 cx.emit(BufferEvent::Operation {
2555 operation,
2556 is_local,
2557 });
2558 }
2559
2560 /// Removes the selections for a given peer.
2561 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2562 self.remote_selections.remove(&replica_id);
2563 cx.notify();
2564 }
2565
2566 /// Undoes the most recent transaction.
2567 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2568 let was_dirty = self.is_dirty();
2569 let old_version = self.version.clone();
2570
2571 if let Some((transaction_id, operation)) = self.text.undo() {
2572 self.send_operation(Operation::Buffer(operation), true, cx);
2573 self.did_edit(&old_version, was_dirty, cx);
2574 Some(transaction_id)
2575 } else {
2576 None
2577 }
2578 }
2579
2580 /// Manually undoes a specific transaction in the buffer's undo history.
2581 pub fn undo_transaction(
2582 &mut self,
2583 transaction_id: TransactionId,
2584 cx: &mut Context<Self>,
2585 ) -> bool {
2586 let was_dirty = self.is_dirty();
2587 let old_version = self.version.clone();
2588 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2589 self.send_operation(Operation::Buffer(operation), true, cx);
2590 self.did_edit(&old_version, was_dirty, cx);
2591 true
2592 } else {
2593 false
2594 }
2595 }
2596
2597 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2598 pub fn undo_to_transaction(
2599 &mut self,
2600 transaction_id: TransactionId,
2601 cx: &mut Context<Self>,
2602 ) -> bool {
2603 let was_dirty = self.is_dirty();
2604 let old_version = self.version.clone();
2605
2606 let operations = self.text.undo_to_transaction(transaction_id);
2607 let undone = !operations.is_empty();
2608 for operation in operations {
2609 self.send_operation(Operation::Buffer(operation), true, cx);
2610 }
2611 if undone {
2612 self.did_edit(&old_version, was_dirty, cx)
2613 }
2614 undone
2615 }
2616
2617 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2618 let was_dirty = self.is_dirty();
2619 let operation = self.text.undo_operations(counts);
2620 let old_version = self.version.clone();
2621 self.send_operation(Operation::Buffer(operation), true, cx);
2622 self.did_edit(&old_version, was_dirty, cx);
2623 }
2624
2625 /// Manually redoes a specific transaction in the buffer's redo history.
2626 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2627 let was_dirty = self.is_dirty();
2628 let old_version = self.version.clone();
2629
2630 if let Some((transaction_id, operation)) = self.text.redo() {
2631 self.send_operation(Operation::Buffer(operation), true, cx);
2632 self.did_edit(&old_version, was_dirty, cx);
2633 Some(transaction_id)
2634 } else {
2635 None
2636 }
2637 }
2638
2639 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2640 pub fn redo_to_transaction(
2641 &mut self,
2642 transaction_id: TransactionId,
2643 cx: &mut Context<Self>,
2644 ) -> bool {
2645 let was_dirty = self.is_dirty();
2646 let old_version = self.version.clone();
2647
2648 let operations = self.text.redo_to_transaction(transaction_id);
2649 let redone = !operations.is_empty();
2650 for operation in operations {
2651 self.send_operation(Operation::Buffer(operation), true, cx);
2652 }
2653 if redone {
2654 self.did_edit(&old_version, was_dirty, cx)
2655 }
2656 redone
2657 }
2658
2659 /// Override current completion triggers with the user-provided completion triggers.
2660 pub fn set_completion_triggers(
2661 &mut self,
2662 server_id: LanguageServerId,
2663 triggers: BTreeSet<String>,
2664 cx: &mut Context<Self>,
2665 ) {
2666 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2667 if triggers.is_empty() {
2668 self.completion_triggers_per_language_server
2669 .remove(&server_id);
2670 self.completion_triggers = self
2671 .completion_triggers_per_language_server
2672 .values()
2673 .flat_map(|triggers| triggers.into_iter().cloned())
2674 .collect();
2675 } else {
2676 self.completion_triggers_per_language_server
2677 .insert(server_id, triggers.clone());
2678 self.completion_triggers.extend(triggers.iter().cloned());
2679 }
2680 self.send_operation(
2681 Operation::UpdateCompletionTriggers {
2682 triggers: triggers.iter().cloned().collect(),
2683 lamport_timestamp: self.completion_triggers_timestamp,
2684 server_id,
2685 },
2686 true,
2687 cx,
2688 );
2689 cx.notify();
2690 }
2691
2692 /// Returns a list of strings which trigger a completion menu for this language.
2693 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2694 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2695 &self.completion_triggers
2696 }
2697
2698 /// Call this directly after performing edits to prevent the preview tab
2699 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2700 /// to return false until there are additional edits.
2701 pub fn refresh_preview(&mut self) {
2702 self.preview_version = self.version.clone();
2703 }
2704
2705 /// Whether we should preserve the preview status of a tab containing this buffer.
2706 pub fn preserve_preview(&self) -> bool {
2707 !self.has_edits_since(&self.preview_version)
2708 }
2709}
2710
2711#[doc(hidden)]
2712#[cfg(any(test, feature = "test-support"))]
2713impl Buffer {
2714 pub fn edit_via_marked_text(
2715 &mut self,
2716 marked_string: &str,
2717 autoindent_mode: Option<AutoindentMode>,
2718 cx: &mut Context<Self>,
2719 ) {
2720 let edits = self.edits_for_marked_text(marked_string);
2721 self.edit(edits, autoindent_mode, cx);
2722 }
2723
2724 pub fn set_group_interval(&mut self, group_interval: Duration) {
2725 self.text.set_group_interval(group_interval);
2726 }
2727
2728 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2729 where
2730 T: rand::Rng,
2731 {
2732 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2733 let mut last_end = None;
2734 for _ in 0..old_range_count {
2735 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2736 break;
2737 }
2738
2739 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2740 let mut range = self.random_byte_range(new_start, rng);
2741 if rng.gen_bool(0.2) {
2742 mem::swap(&mut range.start, &mut range.end);
2743 }
2744 last_end = Some(range.end);
2745
2746 let new_text_len = rng.gen_range(0..10);
2747 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2748 new_text = new_text.to_uppercase();
2749
2750 edits.push((range, new_text));
2751 }
2752 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2753 self.edit(edits, None, cx);
2754 }
2755
2756 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2757 let was_dirty = self.is_dirty();
2758 let old_version = self.version.clone();
2759
2760 let ops = self.text.randomly_undo_redo(rng);
2761 if !ops.is_empty() {
2762 for op in ops {
2763 self.send_operation(Operation::Buffer(op), true, cx);
2764 self.did_edit(&old_version, was_dirty, cx);
2765 }
2766 }
2767 }
2768}
2769
2770impl EventEmitter<BufferEvent> for Buffer {}
2771
2772impl Deref for Buffer {
2773 type Target = TextBuffer;
2774
2775 fn deref(&self) -> &Self::Target {
2776 &self.text
2777 }
2778}
2779
2780impl BufferSnapshot {
2781 /// Returns [`IndentSize`] for a given line that respects user settings and
2782 /// language preferences.
2783 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2784 indent_size_for_line(self, row)
2785 }
2786
2787 /// Returns [`IndentSize`] for a given position that respects user settings
2788 /// and language preferences.
2789 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2790 let settings = language_settings(
2791 self.language_at(position).map(|l| l.name()),
2792 self.file(),
2793 cx,
2794 );
2795 if settings.hard_tabs {
2796 IndentSize::tab()
2797 } else {
2798 IndentSize::spaces(settings.tab_size.get())
2799 }
2800 }
2801
2802 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2803 /// is passed in as `single_indent_size`.
2804 pub fn suggested_indents(
2805 &self,
2806 rows: impl Iterator<Item = u32>,
2807 single_indent_size: IndentSize,
2808 ) -> BTreeMap<u32, IndentSize> {
2809 let mut result = BTreeMap::new();
2810
2811 for row_range in contiguous_ranges(rows, 10) {
2812 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2813 Some(suggestions) => suggestions,
2814 _ => break,
2815 };
2816
2817 for (row, suggestion) in row_range.zip(suggestions) {
2818 let indent_size = if let Some(suggestion) = suggestion {
2819 result
2820 .get(&suggestion.basis_row)
2821 .copied()
2822 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2823 .with_delta(suggestion.delta, single_indent_size)
2824 } else {
2825 self.indent_size_for_line(row)
2826 };
2827
2828 result.insert(row, indent_size);
2829 }
2830 }
2831
2832 result
2833 }
2834
2835 fn suggest_autoindents(
2836 &self,
2837 row_range: Range<u32>,
2838 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2839 let config = &self.language.as_ref()?.config;
2840 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2841
2842 // Find the suggested indentation ranges based on the syntax tree.
2843 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2844 let end = Point::new(row_range.end, 0);
2845 let range = (start..end).to_offset(&self.text);
2846 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2847 Some(&grammar.indents_config.as_ref()?.query)
2848 });
2849 let indent_configs = matches
2850 .grammars()
2851 .iter()
2852 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2853 .collect::<Vec<_>>();
2854
2855 let mut indent_ranges = Vec::<Range<Point>>::new();
2856 let mut outdent_positions = Vec::<Point>::new();
2857 while let Some(mat) = matches.peek() {
2858 let mut start: Option<Point> = None;
2859 let mut end: Option<Point> = None;
2860
2861 let config = &indent_configs[mat.grammar_index];
2862 for capture in mat.captures {
2863 if capture.index == config.indent_capture_ix {
2864 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2865 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2866 } else if Some(capture.index) == config.start_capture_ix {
2867 start = Some(Point::from_ts_point(capture.node.end_position()));
2868 } else if Some(capture.index) == config.end_capture_ix {
2869 end = Some(Point::from_ts_point(capture.node.start_position()));
2870 } else if Some(capture.index) == config.outdent_capture_ix {
2871 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2872 }
2873 }
2874
2875 matches.advance();
2876 if let Some((start, end)) = start.zip(end) {
2877 if start.row == end.row {
2878 continue;
2879 }
2880
2881 let range = start..end;
2882 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2883 Err(ix) => indent_ranges.insert(ix, range),
2884 Ok(ix) => {
2885 let prev_range = &mut indent_ranges[ix];
2886 prev_range.end = prev_range.end.max(range.end);
2887 }
2888 }
2889 }
2890 }
2891
2892 let mut error_ranges = Vec::<Range<Point>>::new();
2893 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2894 grammar.error_query.as_ref()
2895 });
2896 while let Some(mat) = matches.peek() {
2897 let node = mat.captures[0].node;
2898 let start = Point::from_ts_point(node.start_position());
2899 let end = Point::from_ts_point(node.end_position());
2900 let range = start..end;
2901 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2902 Ok(ix) | Err(ix) => ix,
2903 };
2904 let mut end_ix = ix;
2905 while let Some(existing_range) = error_ranges.get(end_ix) {
2906 if existing_range.end < end {
2907 end_ix += 1;
2908 } else {
2909 break;
2910 }
2911 }
2912 error_ranges.splice(ix..end_ix, [range]);
2913 matches.advance();
2914 }
2915
2916 outdent_positions.sort();
2917 for outdent_position in outdent_positions {
2918 // find the innermost indent range containing this outdent_position
2919 // set its end to the outdent position
2920 if let Some(range_to_truncate) = indent_ranges
2921 .iter_mut()
2922 .filter(|indent_range| indent_range.contains(&outdent_position))
2923 .next_back()
2924 {
2925 range_to_truncate.end = outdent_position;
2926 }
2927 }
2928
2929 // Find the suggested indentation increases and decreased based on regexes.
2930 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2931 self.for_each_line(
2932 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2933 ..Point::new(row_range.end, 0),
2934 |row, line| {
2935 if config
2936 .decrease_indent_pattern
2937 .as_ref()
2938 .map_or(false, |regex| regex.is_match(line))
2939 {
2940 indent_change_rows.push((row, Ordering::Less));
2941 }
2942 if config
2943 .increase_indent_pattern
2944 .as_ref()
2945 .map_or(false, |regex| regex.is_match(line))
2946 {
2947 indent_change_rows.push((row + 1, Ordering::Greater));
2948 }
2949 },
2950 );
2951
2952 let mut indent_changes = indent_change_rows.into_iter().peekable();
2953 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2954 prev_non_blank_row.unwrap_or(0)
2955 } else {
2956 row_range.start.saturating_sub(1)
2957 };
2958 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2959 Some(row_range.map(move |row| {
2960 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2961
2962 let mut indent_from_prev_row = false;
2963 let mut outdent_from_prev_row = false;
2964 let mut outdent_to_row = u32::MAX;
2965 let mut from_regex = false;
2966
2967 while let Some((indent_row, delta)) = indent_changes.peek() {
2968 match indent_row.cmp(&row) {
2969 Ordering::Equal => match delta {
2970 Ordering::Less => {
2971 from_regex = true;
2972 outdent_from_prev_row = true
2973 }
2974 Ordering::Greater => {
2975 indent_from_prev_row = true;
2976 from_regex = true
2977 }
2978 _ => {}
2979 },
2980
2981 Ordering::Greater => break,
2982 Ordering::Less => {}
2983 }
2984
2985 indent_changes.next();
2986 }
2987
2988 for range in &indent_ranges {
2989 if range.start.row >= row {
2990 break;
2991 }
2992 if range.start.row == prev_row && range.end > row_start {
2993 indent_from_prev_row = true;
2994 }
2995 if range.end > prev_row_start && range.end <= row_start {
2996 outdent_to_row = outdent_to_row.min(range.start.row);
2997 }
2998 }
2999
3000 let within_error = error_ranges
3001 .iter()
3002 .any(|e| e.start.row < row && e.end > row_start);
3003
3004 let suggestion = if outdent_to_row == prev_row
3005 || (outdent_from_prev_row && indent_from_prev_row)
3006 {
3007 Some(IndentSuggestion {
3008 basis_row: prev_row,
3009 delta: Ordering::Equal,
3010 within_error: within_error && !from_regex,
3011 })
3012 } else if indent_from_prev_row {
3013 Some(IndentSuggestion {
3014 basis_row: prev_row,
3015 delta: Ordering::Greater,
3016 within_error: within_error && !from_regex,
3017 })
3018 } else if outdent_to_row < prev_row {
3019 Some(IndentSuggestion {
3020 basis_row: outdent_to_row,
3021 delta: Ordering::Equal,
3022 within_error: within_error && !from_regex,
3023 })
3024 } else if outdent_from_prev_row {
3025 Some(IndentSuggestion {
3026 basis_row: prev_row,
3027 delta: Ordering::Less,
3028 within_error: within_error && !from_regex,
3029 })
3030 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3031 {
3032 Some(IndentSuggestion {
3033 basis_row: prev_row,
3034 delta: Ordering::Equal,
3035 within_error: within_error && !from_regex,
3036 })
3037 } else {
3038 None
3039 };
3040
3041 prev_row = row;
3042 prev_row_start = row_start;
3043 suggestion
3044 }))
3045 }
3046
3047 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3048 while row > 0 {
3049 row -= 1;
3050 if !self.is_line_blank(row) {
3051 return Some(row);
3052 }
3053 }
3054 None
3055 }
3056
3057 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3058 let captures = self.syntax.captures(range, &self.text, |grammar| {
3059 grammar.highlights_query.as_ref()
3060 });
3061 let highlight_maps = captures
3062 .grammars()
3063 .iter()
3064 .map(|grammar| grammar.highlight_map())
3065 .collect();
3066 (captures, highlight_maps)
3067 }
3068
3069 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3070 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3071 /// returned in chunks where each chunk has a single syntax highlighting style and
3072 /// diagnostic status.
3073 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3074 let range = range.start.to_offset(self)..range.end.to_offset(self);
3075
3076 let mut syntax = None;
3077 if language_aware {
3078 syntax = Some(self.get_highlights(range.clone()));
3079 }
3080 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3081 let diagnostics = language_aware;
3082 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3083 }
3084
3085 pub fn highlighted_text_for_range<T: ToOffset>(
3086 &self,
3087 range: Range<T>,
3088 override_style: Option<HighlightStyle>,
3089 syntax_theme: &SyntaxTheme,
3090 ) -> HighlightedText {
3091 HighlightedText::from_buffer_range(
3092 range,
3093 &self.text,
3094 &self.syntax,
3095 override_style,
3096 syntax_theme,
3097 )
3098 }
3099
3100 /// Invokes the given callback for each line of text in the given range of the buffer.
3101 /// Uses callback to avoid allocating a string for each line.
3102 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3103 let mut line = String::new();
3104 let mut row = range.start.row;
3105 for chunk in self
3106 .as_rope()
3107 .chunks_in_range(range.to_offset(self))
3108 .chain(["\n"])
3109 {
3110 for (newline_ix, text) in chunk.split('\n').enumerate() {
3111 if newline_ix > 0 {
3112 callback(row, &line);
3113 row += 1;
3114 line.clear();
3115 }
3116 line.push_str(text);
3117 }
3118 }
3119 }
3120
3121 /// Iterates over every [`SyntaxLayer`] in the buffer.
3122 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3123 self.syntax
3124 .layers_for_range(0..self.len(), &self.text, true)
3125 }
3126
3127 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3128 let offset = position.to_offset(self);
3129 self.syntax
3130 .layers_for_range(offset..offset, &self.text, false)
3131 .filter(|l| l.node().end_byte() > offset)
3132 .last()
3133 }
3134
3135 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3136 &self,
3137 range: Range<D>,
3138 ) -> Option<SyntaxLayer> {
3139 let range = range.to_offset(self);
3140 return self
3141 .syntax
3142 .layers_for_range(range, &self.text, false)
3143 .max_by(|a, b| {
3144 if a.depth != b.depth {
3145 a.depth.cmp(&b.depth)
3146 } else if a.offset.0 != b.offset.0 {
3147 a.offset.0.cmp(&b.offset.0)
3148 } else {
3149 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3150 }
3151 });
3152 }
3153
3154 /// Returns the main [`Language`].
3155 pub fn language(&self) -> Option<&Arc<Language>> {
3156 self.language.as_ref()
3157 }
3158
3159 /// Returns the [`Language`] at the given location.
3160 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3161 self.syntax_layer_at(position)
3162 .map(|info| info.language)
3163 .or(self.language.as_ref())
3164 }
3165
3166 /// Returns the settings for the language at the given location.
3167 pub fn settings_at<'a, D: ToOffset>(
3168 &'a self,
3169 position: D,
3170 cx: &'a App,
3171 ) -> Cow<'a, LanguageSettings> {
3172 language_settings(
3173 self.language_at(position).map(|l| l.name()),
3174 self.file.as_ref(),
3175 cx,
3176 )
3177 }
3178
3179 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3180 CharClassifier::new(self.language_scope_at(point))
3181 }
3182
3183 /// Returns the [`LanguageScope`] at the given location.
3184 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3185 let offset = position.to_offset(self);
3186 let mut scope = None;
3187 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3188
3189 // Use the layer that has the smallest node intersecting the given point.
3190 for layer in self
3191 .syntax
3192 .layers_for_range(offset..offset, &self.text, false)
3193 {
3194 let mut cursor = layer.node().walk();
3195
3196 let mut range = None;
3197 loop {
3198 let child_range = cursor.node().byte_range();
3199 if !child_range.contains(&offset) {
3200 break;
3201 }
3202
3203 range = Some(child_range);
3204 if cursor.goto_first_child_for_byte(offset).is_none() {
3205 break;
3206 }
3207 }
3208
3209 if let Some(range) = range {
3210 if smallest_range_and_depth.as_ref().map_or(
3211 true,
3212 |(smallest_range, smallest_range_depth)| {
3213 if layer.depth > *smallest_range_depth {
3214 true
3215 } else if layer.depth == *smallest_range_depth {
3216 range.len() < smallest_range.len()
3217 } else {
3218 false
3219 }
3220 },
3221 ) {
3222 smallest_range_and_depth = Some((range, layer.depth));
3223 scope = Some(LanguageScope {
3224 language: layer.language.clone(),
3225 override_id: layer.override_id(offset, &self.text),
3226 });
3227 }
3228 }
3229 }
3230
3231 scope.or_else(|| {
3232 self.language.clone().map(|language| LanguageScope {
3233 language,
3234 override_id: None,
3235 })
3236 })
3237 }
3238
3239 /// Returns a tuple of the range and character kind of the word
3240 /// surrounding the given position.
3241 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3242 let mut start = start.to_offset(self);
3243 let mut end = start;
3244 let mut next_chars = self.chars_at(start).peekable();
3245 let mut prev_chars = self.reversed_chars_at(start).peekable();
3246
3247 let classifier = self.char_classifier_at(start);
3248 let word_kind = cmp::max(
3249 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3250 next_chars.peek().copied().map(|c| classifier.kind(c)),
3251 );
3252
3253 for ch in prev_chars {
3254 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3255 start -= ch.len_utf8();
3256 } else {
3257 break;
3258 }
3259 }
3260
3261 for ch in next_chars {
3262 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3263 end += ch.len_utf8();
3264 } else {
3265 break;
3266 }
3267 }
3268
3269 (start..end, word_kind)
3270 }
3271
3272 /// Returns the closest syntax node enclosing the given range.
3273 pub fn syntax_ancestor<'a, T: ToOffset>(
3274 &'a self,
3275 range: Range<T>,
3276 ) -> Option<tree_sitter::Node<'a>> {
3277 let range = range.start.to_offset(self)..range.end.to_offset(self);
3278 let mut result: Option<tree_sitter::Node<'a>> = None;
3279 'outer: for layer in self
3280 .syntax
3281 .layers_for_range(range.clone(), &self.text, true)
3282 {
3283 let mut cursor = layer.node().walk();
3284
3285 // Descend to the first leaf that touches the start of the range,
3286 // and if the range is non-empty, extends beyond the start.
3287 while cursor.goto_first_child_for_byte(range.start).is_some() {
3288 if !range.is_empty() && cursor.node().end_byte() == range.start {
3289 cursor.goto_next_sibling();
3290 }
3291 }
3292
3293 // Ascend to the smallest ancestor that strictly contains the range.
3294 loop {
3295 let node_range = cursor.node().byte_range();
3296 if node_range.start <= range.start
3297 && node_range.end >= range.end
3298 && node_range.len() > range.len()
3299 {
3300 break;
3301 }
3302 if !cursor.goto_parent() {
3303 continue 'outer;
3304 }
3305 }
3306
3307 let left_node = cursor.node();
3308 let mut layer_result = left_node;
3309
3310 // For an empty range, try to find another node immediately to the right of the range.
3311 if left_node.end_byte() == range.start {
3312 let mut right_node = None;
3313 while !cursor.goto_next_sibling() {
3314 if !cursor.goto_parent() {
3315 break;
3316 }
3317 }
3318
3319 while cursor.node().start_byte() == range.start {
3320 right_node = Some(cursor.node());
3321 if !cursor.goto_first_child() {
3322 break;
3323 }
3324 }
3325
3326 // If there is a candidate node on both sides of the (empty) range, then
3327 // decide between the two by favoring a named node over an anonymous token.
3328 // If both nodes are the same in that regard, favor the right one.
3329 if let Some(right_node) = right_node {
3330 if right_node.is_named() || !left_node.is_named() {
3331 layer_result = right_node;
3332 }
3333 }
3334 }
3335
3336 if let Some(previous_result) = &result {
3337 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3338 continue;
3339 }
3340 }
3341 result = Some(layer_result);
3342 }
3343
3344 result
3345 }
3346
3347 /// Returns the outline for the buffer.
3348 ///
3349 /// This method allows passing an optional [`SyntaxTheme`] to
3350 /// syntax-highlight the returned symbols.
3351 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3352 self.outline_items_containing(0..self.len(), true, theme)
3353 .map(Outline::new)
3354 }
3355
3356 /// Returns all the symbols that contain the given position.
3357 ///
3358 /// This method allows passing an optional [`SyntaxTheme`] to
3359 /// syntax-highlight the returned symbols.
3360 pub fn symbols_containing<T: ToOffset>(
3361 &self,
3362 position: T,
3363 theme: Option<&SyntaxTheme>,
3364 ) -> Option<Vec<OutlineItem<Anchor>>> {
3365 let position = position.to_offset(self);
3366 let mut items = self.outline_items_containing(
3367 position.saturating_sub(1)..self.len().min(position + 1),
3368 false,
3369 theme,
3370 )?;
3371 let mut prev_depth = None;
3372 items.retain(|item| {
3373 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3374 prev_depth = Some(item.depth);
3375 result
3376 });
3377 Some(items)
3378 }
3379
3380 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3381 let range = range.to_offset(self);
3382 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3383 grammar.outline_config.as_ref().map(|c| &c.query)
3384 });
3385 let configs = matches
3386 .grammars()
3387 .iter()
3388 .map(|g| g.outline_config.as_ref().unwrap())
3389 .collect::<Vec<_>>();
3390
3391 while let Some(mat) = matches.peek() {
3392 let config = &configs[mat.grammar_index];
3393 let containing_item_node = maybe!({
3394 let item_node = mat.captures.iter().find_map(|cap| {
3395 if cap.index == config.item_capture_ix {
3396 Some(cap.node)
3397 } else {
3398 None
3399 }
3400 })?;
3401
3402 let item_byte_range = item_node.byte_range();
3403 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3404 None
3405 } else {
3406 Some(item_node)
3407 }
3408 });
3409
3410 if let Some(item_node) = containing_item_node {
3411 return Some(
3412 Point::from_ts_point(item_node.start_position())
3413 ..Point::from_ts_point(item_node.end_position()),
3414 );
3415 }
3416
3417 matches.advance();
3418 }
3419 None
3420 }
3421
3422 pub fn outline_items_containing<T: ToOffset>(
3423 &self,
3424 range: Range<T>,
3425 include_extra_context: bool,
3426 theme: Option<&SyntaxTheme>,
3427 ) -> Option<Vec<OutlineItem<Anchor>>> {
3428 let range = range.to_offset(self);
3429 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3430 grammar.outline_config.as_ref().map(|c| &c.query)
3431 });
3432 let configs = matches
3433 .grammars()
3434 .iter()
3435 .map(|g| g.outline_config.as_ref().unwrap())
3436 .collect::<Vec<_>>();
3437
3438 let mut items = Vec::new();
3439 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3440 while let Some(mat) = matches.peek() {
3441 let config = &configs[mat.grammar_index];
3442 if let Some(item) =
3443 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3444 {
3445 items.push(item);
3446 } else if let Some(capture) = mat
3447 .captures
3448 .iter()
3449 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3450 {
3451 let capture_range = capture.node.start_position()..capture.node.end_position();
3452 let mut capture_row_range =
3453 capture_range.start.row as u32..capture_range.end.row as u32;
3454 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3455 {
3456 capture_row_range.end -= 1;
3457 }
3458 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3459 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3460 last_row_range.end = capture_row_range.end;
3461 } else {
3462 annotation_row_ranges.push(capture_row_range);
3463 }
3464 } else {
3465 annotation_row_ranges.push(capture_row_range);
3466 }
3467 }
3468 matches.advance();
3469 }
3470
3471 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3472
3473 // Assign depths based on containment relationships and convert to anchors.
3474 let mut item_ends_stack = Vec::<Point>::new();
3475 let mut anchor_items = Vec::new();
3476 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3477 for item in items {
3478 while let Some(last_end) = item_ends_stack.last().copied() {
3479 if last_end < item.range.end {
3480 item_ends_stack.pop();
3481 } else {
3482 break;
3483 }
3484 }
3485
3486 let mut annotation_row_range = None;
3487 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3488 let row_preceding_item = item.range.start.row.saturating_sub(1);
3489 if next_annotation_row_range.end < row_preceding_item {
3490 annotation_row_ranges.next();
3491 } else {
3492 if next_annotation_row_range.end == row_preceding_item {
3493 annotation_row_range = Some(next_annotation_row_range.clone());
3494 annotation_row_ranges.next();
3495 }
3496 break;
3497 }
3498 }
3499
3500 anchor_items.push(OutlineItem {
3501 depth: item_ends_stack.len(),
3502 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3503 text: item.text,
3504 highlight_ranges: item.highlight_ranges,
3505 name_ranges: item.name_ranges,
3506 body_range: item.body_range.map(|body_range| {
3507 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3508 }),
3509 annotation_range: annotation_row_range.map(|annotation_range| {
3510 self.anchor_after(Point::new(annotation_range.start, 0))
3511 ..self.anchor_before(Point::new(
3512 annotation_range.end,
3513 self.line_len(annotation_range.end),
3514 ))
3515 }),
3516 });
3517 item_ends_stack.push(item.range.end);
3518 }
3519
3520 Some(anchor_items)
3521 }
3522
3523 fn next_outline_item(
3524 &self,
3525 config: &OutlineConfig,
3526 mat: &SyntaxMapMatch,
3527 range: &Range<usize>,
3528 include_extra_context: bool,
3529 theme: Option<&SyntaxTheme>,
3530 ) -> Option<OutlineItem<Point>> {
3531 let item_node = mat.captures.iter().find_map(|cap| {
3532 if cap.index == config.item_capture_ix {
3533 Some(cap.node)
3534 } else {
3535 None
3536 }
3537 })?;
3538
3539 let item_byte_range = item_node.byte_range();
3540 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3541 return None;
3542 }
3543 let item_point_range = Point::from_ts_point(item_node.start_position())
3544 ..Point::from_ts_point(item_node.end_position());
3545
3546 let mut open_point = None;
3547 let mut close_point = None;
3548 let mut buffer_ranges = Vec::new();
3549 for capture in mat.captures {
3550 let node_is_name;
3551 if capture.index == config.name_capture_ix {
3552 node_is_name = true;
3553 } else if Some(capture.index) == config.context_capture_ix
3554 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3555 {
3556 node_is_name = false;
3557 } else {
3558 if Some(capture.index) == config.open_capture_ix {
3559 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3560 } else if Some(capture.index) == config.close_capture_ix {
3561 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3562 }
3563
3564 continue;
3565 }
3566
3567 let mut range = capture.node.start_byte()..capture.node.end_byte();
3568 let start = capture.node.start_position();
3569 if capture.node.end_position().row > start.row {
3570 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3571 }
3572
3573 if !range.is_empty() {
3574 buffer_ranges.push((range, node_is_name));
3575 }
3576 }
3577 if buffer_ranges.is_empty() {
3578 return None;
3579 }
3580 let mut text = String::new();
3581 let mut highlight_ranges = Vec::new();
3582 let mut name_ranges = Vec::new();
3583 let mut chunks = self.chunks(
3584 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3585 true,
3586 );
3587 let mut last_buffer_range_end = 0;
3588
3589 for (buffer_range, is_name) in buffer_ranges {
3590 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3591 if space_added {
3592 text.push(' ');
3593 }
3594 let before_append_len = text.len();
3595 let mut offset = buffer_range.start;
3596 chunks.seek(buffer_range.clone());
3597 for mut chunk in chunks.by_ref() {
3598 if chunk.text.len() > buffer_range.end - offset {
3599 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3600 offset = buffer_range.end;
3601 } else {
3602 offset += chunk.text.len();
3603 }
3604 let style = chunk
3605 .syntax_highlight_id
3606 .zip(theme)
3607 .and_then(|(highlight, theme)| highlight.style(theme));
3608 if let Some(style) = style {
3609 let start = text.len();
3610 let end = start + chunk.text.len();
3611 highlight_ranges.push((start..end, style));
3612 }
3613 text.push_str(chunk.text);
3614 if offset >= buffer_range.end {
3615 break;
3616 }
3617 }
3618 if is_name {
3619 let after_append_len = text.len();
3620 let start = if space_added && !name_ranges.is_empty() {
3621 before_append_len - 1
3622 } else {
3623 before_append_len
3624 };
3625 name_ranges.push(start..after_append_len);
3626 }
3627 last_buffer_range_end = buffer_range.end;
3628 }
3629
3630 Some(OutlineItem {
3631 depth: 0, // We'll calculate the depth later
3632 range: item_point_range,
3633 text,
3634 highlight_ranges,
3635 name_ranges,
3636 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3637 annotation_range: None,
3638 })
3639 }
3640
3641 pub fn function_body_fold_ranges<T: ToOffset>(
3642 &self,
3643 within: Range<T>,
3644 ) -> impl Iterator<Item = Range<usize>> + '_ {
3645 self.text_object_ranges(within, TreeSitterOptions::default())
3646 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3647 }
3648
3649 /// For each grammar in the language, runs the provided
3650 /// [`tree_sitter::Query`] against the given range.
3651 pub fn matches(
3652 &self,
3653 range: Range<usize>,
3654 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3655 ) -> SyntaxMapMatches {
3656 self.syntax.matches(range, self, query)
3657 }
3658
3659 pub fn all_bracket_ranges(
3660 &self,
3661 range: Range<usize>,
3662 ) -> impl Iterator<Item = BracketMatch> + '_ {
3663 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3664 grammar.brackets_config.as_ref().map(|c| &c.query)
3665 });
3666 let configs = matches
3667 .grammars()
3668 .iter()
3669 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3670 .collect::<Vec<_>>();
3671
3672 iter::from_fn(move || {
3673 while let Some(mat) = matches.peek() {
3674 let mut open = None;
3675 let mut close = None;
3676 let config = &configs[mat.grammar_index];
3677 let pattern = &config.patterns[mat.pattern_index];
3678 for capture in mat.captures {
3679 if capture.index == config.open_capture_ix {
3680 open = Some(capture.node.byte_range());
3681 } else if capture.index == config.close_capture_ix {
3682 close = Some(capture.node.byte_range());
3683 }
3684 }
3685
3686 matches.advance();
3687
3688 let Some((open_range, close_range)) = open.zip(close) else {
3689 continue;
3690 };
3691
3692 let bracket_range = open_range.start..=close_range.end;
3693 if !bracket_range.overlaps(&range) {
3694 continue;
3695 }
3696
3697 return Some(BracketMatch {
3698 open_range,
3699 close_range,
3700 newline_only: pattern.newline_only,
3701 });
3702 }
3703 None
3704 })
3705 }
3706
3707 /// Returns bracket range pairs overlapping or adjacent to `range`
3708 pub fn bracket_ranges<T: ToOffset>(
3709 &self,
3710 range: Range<T>,
3711 ) -> impl Iterator<Item = BracketMatch> + '_ {
3712 // Find bracket pairs that *inclusively* contain the given range.
3713 let range = range.start.to_offset(self).saturating_sub(1)
3714 ..self.len().min(range.end.to_offset(self) + 1);
3715 self.all_bracket_ranges(range)
3716 .filter(|pair| !pair.newline_only)
3717 }
3718
3719 pub fn text_object_ranges<T: ToOffset>(
3720 &self,
3721 range: Range<T>,
3722 options: TreeSitterOptions,
3723 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3724 let range = range.start.to_offset(self).saturating_sub(1)
3725 ..self.len().min(range.end.to_offset(self) + 1);
3726
3727 let mut matches =
3728 self.syntax
3729 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3730 grammar.text_object_config.as_ref().map(|c| &c.query)
3731 });
3732
3733 let configs = matches
3734 .grammars()
3735 .iter()
3736 .map(|grammar| grammar.text_object_config.as_ref())
3737 .collect::<Vec<_>>();
3738
3739 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3740
3741 iter::from_fn(move || {
3742 loop {
3743 while let Some(capture) = captures.pop() {
3744 if capture.0.overlaps(&range) {
3745 return Some(capture);
3746 }
3747 }
3748
3749 let mat = matches.peek()?;
3750
3751 let Some(config) = configs[mat.grammar_index].as_ref() else {
3752 matches.advance();
3753 continue;
3754 };
3755
3756 for capture in mat.captures {
3757 let Some(ix) = config
3758 .text_objects_by_capture_ix
3759 .binary_search_by_key(&capture.index, |e| e.0)
3760 .ok()
3761 else {
3762 continue;
3763 };
3764 let text_object = config.text_objects_by_capture_ix[ix].1;
3765 let byte_range = capture.node.byte_range();
3766
3767 let mut found = false;
3768 for (range, existing) in captures.iter_mut() {
3769 if existing == &text_object {
3770 range.start = range.start.min(byte_range.start);
3771 range.end = range.end.max(byte_range.end);
3772 found = true;
3773 break;
3774 }
3775 }
3776
3777 if !found {
3778 captures.push((byte_range, text_object));
3779 }
3780 }
3781
3782 matches.advance();
3783 }
3784 })
3785 }
3786
3787 /// Returns enclosing bracket ranges containing the given range
3788 pub fn enclosing_bracket_ranges<T: ToOffset>(
3789 &self,
3790 range: Range<T>,
3791 ) -> impl Iterator<Item = BracketMatch> + '_ {
3792 let range = range.start.to_offset(self)..range.end.to_offset(self);
3793
3794 self.bracket_ranges(range.clone()).filter(move |pair| {
3795 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3796 })
3797 }
3798
3799 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3800 ///
3801 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3802 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3803 &self,
3804 range: Range<T>,
3805 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3806 ) -> Option<(Range<usize>, Range<usize>)> {
3807 let range = range.start.to_offset(self)..range.end.to_offset(self);
3808
3809 // Get the ranges of the innermost pair of brackets.
3810 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3811
3812 for pair in self.enclosing_bracket_ranges(range.clone()) {
3813 if let Some(range_filter) = range_filter {
3814 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3815 continue;
3816 }
3817 }
3818
3819 let len = pair.close_range.end - pair.open_range.start;
3820
3821 if let Some((existing_open, existing_close)) = &result {
3822 let existing_len = existing_close.end - existing_open.start;
3823 if len > existing_len {
3824 continue;
3825 }
3826 }
3827
3828 result = Some((pair.open_range, pair.close_range));
3829 }
3830
3831 result
3832 }
3833
3834 /// Returns anchor ranges for any matches of the redaction query.
3835 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3836 /// will be run on the relevant section of the buffer.
3837 pub fn redacted_ranges<T: ToOffset>(
3838 &self,
3839 range: Range<T>,
3840 ) -> impl Iterator<Item = Range<usize>> + '_ {
3841 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3842 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3843 grammar
3844 .redactions_config
3845 .as_ref()
3846 .map(|config| &config.query)
3847 });
3848
3849 let configs = syntax_matches
3850 .grammars()
3851 .iter()
3852 .map(|grammar| grammar.redactions_config.as_ref())
3853 .collect::<Vec<_>>();
3854
3855 iter::from_fn(move || {
3856 let redacted_range = syntax_matches
3857 .peek()
3858 .and_then(|mat| {
3859 configs[mat.grammar_index].and_then(|config| {
3860 mat.captures
3861 .iter()
3862 .find(|capture| capture.index == config.redaction_capture_ix)
3863 })
3864 })
3865 .map(|mat| mat.node.byte_range());
3866 syntax_matches.advance();
3867 redacted_range
3868 })
3869 }
3870
3871 pub fn injections_intersecting_range<T: ToOffset>(
3872 &self,
3873 range: Range<T>,
3874 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3875 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3876
3877 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3878 grammar
3879 .injection_config
3880 .as_ref()
3881 .map(|config| &config.query)
3882 });
3883
3884 let configs = syntax_matches
3885 .grammars()
3886 .iter()
3887 .map(|grammar| grammar.injection_config.as_ref())
3888 .collect::<Vec<_>>();
3889
3890 iter::from_fn(move || {
3891 let ranges = syntax_matches.peek().and_then(|mat| {
3892 let config = &configs[mat.grammar_index]?;
3893 let content_capture_range = mat.captures.iter().find_map(|capture| {
3894 if capture.index == config.content_capture_ix {
3895 Some(capture.node.byte_range())
3896 } else {
3897 None
3898 }
3899 })?;
3900 let language = self.language_at(content_capture_range.start)?;
3901 Some((content_capture_range, language))
3902 });
3903 syntax_matches.advance();
3904 ranges
3905 })
3906 }
3907
3908 pub fn debug_variable_ranges(
3909 &self,
3910 offset_range: Range<usize>,
3911 ) -> impl Iterator<Item = DebugVariableRanges> + '_ {
3912 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3913 grammar
3914 .debug_variables_config
3915 .as_ref()
3916 .map(|config| &config.query)
3917 });
3918
3919 let configs = syntax_matches
3920 .grammars()
3921 .iter()
3922 .map(|grammar| grammar.debug_variables_config.as_ref())
3923 .collect::<Vec<_>>();
3924
3925 iter::from_fn(move || {
3926 loop {
3927 let mat = syntax_matches.peek()?;
3928
3929 let variable_ranges = configs[mat.grammar_index].and_then(|config| {
3930 let full_range = mat.captures.iter().fold(
3931 Range {
3932 start: usize::MAX,
3933 end: 0,
3934 },
3935 |mut acc, next| {
3936 let byte_range = next.node.byte_range();
3937 if acc.start > byte_range.start {
3938 acc.start = byte_range.start;
3939 }
3940 if acc.end < byte_range.end {
3941 acc.end = byte_range.end;
3942 }
3943 acc
3944 },
3945 );
3946 if full_range.start > full_range.end {
3947 // We did not find a full spanning range of this match.
3948 return None;
3949 }
3950
3951 let captures = mat.captures.iter().filter_map(|capture| {
3952 Some((
3953 capture,
3954 config.captures.get(capture.index as usize).cloned()?,
3955 ))
3956 });
3957
3958 let mut variable_range = None;
3959 for (query, capture) in captures {
3960 if let DebugVariableCapture::Variable = capture {
3961 let _ = variable_range.insert(query.node.byte_range());
3962 }
3963 }
3964
3965 Some(DebugVariableRanges {
3966 buffer_id: self.remote_id(),
3967 range: variable_range?,
3968 })
3969 });
3970
3971 syntax_matches.advance();
3972 if variable_ranges.is_some() {
3973 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3974 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3975 return variable_ranges;
3976 }
3977 }
3978 })
3979 }
3980
3981 pub fn runnable_ranges(
3982 &self,
3983 offset_range: Range<usize>,
3984 ) -> impl Iterator<Item = RunnableRange> + '_ {
3985 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3986 grammar.runnable_config.as_ref().map(|config| &config.query)
3987 });
3988
3989 let test_configs = syntax_matches
3990 .grammars()
3991 .iter()
3992 .map(|grammar| grammar.runnable_config.as_ref())
3993 .collect::<Vec<_>>();
3994
3995 iter::from_fn(move || {
3996 loop {
3997 let mat = syntax_matches.peek()?;
3998
3999 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4000 let mut run_range = None;
4001 let full_range = mat.captures.iter().fold(
4002 Range {
4003 start: usize::MAX,
4004 end: 0,
4005 },
4006 |mut acc, next| {
4007 let byte_range = next.node.byte_range();
4008 if acc.start > byte_range.start {
4009 acc.start = byte_range.start;
4010 }
4011 if acc.end < byte_range.end {
4012 acc.end = byte_range.end;
4013 }
4014 acc
4015 },
4016 );
4017 if full_range.start > full_range.end {
4018 // We did not find a full spanning range of this match.
4019 return None;
4020 }
4021 let extra_captures: SmallVec<[_; 1]> =
4022 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4023 test_configs
4024 .extra_captures
4025 .get(capture.index as usize)
4026 .cloned()
4027 .and_then(|tag_name| match tag_name {
4028 RunnableCapture::Named(name) => {
4029 Some((capture.node.byte_range(), name))
4030 }
4031 RunnableCapture::Run => {
4032 let _ = run_range.insert(capture.node.byte_range());
4033 None
4034 }
4035 })
4036 }));
4037 let run_range = run_range?;
4038 let tags = test_configs
4039 .query
4040 .property_settings(mat.pattern_index)
4041 .iter()
4042 .filter_map(|property| {
4043 if *property.key == *"tag" {
4044 property
4045 .value
4046 .as_ref()
4047 .map(|value| RunnableTag(value.to_string().into()))
4048 } else {
4049 None
4050 }
4051 })
4052 .collect();
4053 let extra_captures = extra_captures
4054 .into_iter()
4055 .map(|(range, name)| {
4056 (
4057 name.to_string(),
4058 self.text_for_range(range.clone()).collect::<String>(),
4059 )
4060 })
4061 .collect();
4062 // All tags should have the same range.
4063 Some(RunnableRange {
4064 run_range,
4065 full_range,
4066 runnable: Runnable {
4067 tags,
4068 language: mat.language,
4069 buffer: self.remote_id(),
4070 },
4071 extra_captures,
4072 buffer_id: self.remote_id(),
4073 })
4074 });
4075
4076 syntax_matches.advance();
4077 if test_range.is_some() {
4078 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4079 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4080 return test_range;
4081 }
4082 }
4083 })
4084 }
4085
4086 /// Returns selections for remote peers intersecting the given range.
4087 #[allow(clippy::type_complexity)]
4088 pub fn selections_in_range(
4089 &self,
4090 range: Range<Anchor>,
4091 include_local: bool,
4092 ) -> impl Iterator<
4093 Item = (
4094 ReplicaId,
4095 bool,
4096 CursorShape,
4097 impl Iterator<Item = &Selection<Anchor>> + '_,
4098 ),
4099 > + '_ {
4100 self.remote_selections
4101 .iter()
4102 .filter(move |(replica_id, set)| {
4103 (include_local || **replica_id != self.text.replica_id())
4104 && !set.selections.is_empty()
4105 })
4106 .map(move |(replica_id, set)| {
4107 let start_ix = match set.selections.binary_search_by(|probe| {
4108 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4109 }) {
4110 Ok(ix) | Err(ix) => ix,
4111 };
4112 let end_ix = match set.selections.binary_search_by(|probe| {
4113 probe.start.cmp(&range.end, self).then(Ordering::Less)
4114 }) {
4115 Ok(ix) | Err(ix) => ix,
4116 };
4117
4118 (
4119 *replica_id,
4120 set.line_mode,
4121 set.cursor_shape,
4122 set.selections[start_ix..end_ix].iter(),
4123 )
4124 })
4125 }
4126
4127 /// Returns if the buffer contains any diagnostics.
4128 pub fn has_diagnostics(&self) -> bool {
4129 !self.diagnostics.is_empty()
4130 }
4131
4132 /// Returns all the diagnostics intersecting the given range.
4133 pub fn diagnostics_in_range<'a, T, O>(
4134 &'a self,
4135 search_range: Range<T>,
4136 reversed: bool,
4137 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4138 where
4139 T: 'a + Clone + ToOffset,
4140 O: 'a + FromAnchor,
4141 {
4142 let mut iterators: Vec<_> = self
4143 .diagnostics
4144 .iter()
4145 .map(|(_, collection)| {
4146 collection
4147 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4148 .peekable()
4149 })
4150 .collect();
4151
4152 std::iter::from_fn(move || {
4153 let (next_ix, _) = iterators
4154 .iter_mut()
4155 .enumerate()
4156 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4157 .min_by(|(_, a), (_, b)| {
4158 let cmp = a
4159 .range
4160 .start
4161 .cmp(&b.range.start, self)
4162 // when range is equal, sort by diagnostic severity
4163 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4164 // and stabilize order with group_id
4165 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4166 if reversed { cmp.reverse() } else { cmp }
4167 })?;
4168 iterators[next_ix]
4169 .next()
4170 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4171 diagnostic,
4172 range: FromAnchor::from_anchor(&range.start, self)
4173 ..FromAnchor::from_anchor(&range.end, self),
4174 })
4175 })
4176 }
4177
4178 /// Returns all the diagnostic groups associated with the given
4179 /// language server ID. If no language server ID is provided,
4180 /// all diagnostics groups are returned.
4181 pub fn diagnostic_groups(
4182 &self,
4183 language_server_id: Option<LanguageServerId>,
4184 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4185 let mut groups = Vec::new();
4186
4187 if let Some(language_server_id) = language_server_id {
4188 if let Ok(ix) = self
4189 .diagnostics
4190 .binary_search_by_key(&language_server_id, |e| e.0)
4191 {
4192 self.diagnostics[ix]
4193 .1
4194 .groups(language_server_id, &mut groups, self);
4195 }
4196 } else {
4197 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4198 diagnostics.groups(*language_server_id, &mut groups, self);
4199 }
4200 }
4201
4202 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4203 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4204 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4205 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4206 });
4207
4208 groups
4209 }
4210
4211 /// Returns an iterator over the diagnostics for the given group.
4212 pub fn diagnostic_group<O>(
4213 &self,
4214 group_id: usize,
4215 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4216 where
4217 O: FromAnchor + 'static,
4218 {
4219 self.diagnostics
4220 .iter()
4221 .flat_map(move |(_, set)| set.group(group_id, self))
4222 }
4223
4224 /// An integer version number that accounts for all updates besides
4225 /// the buffer's text itself (which is versioned via a version vector).
4226 pub fn non_text_state_update_count(&self) -> usize {
4227 self.non_text_state_update_count
4228 }
4229
4230 /// Returns a snapshot of underlying file.
4231 pub fn file(&self) -> Option<&Arc<dyn File>> {
4232 self.file.as_ref()
4233 }
4234
4235 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4236 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4237 if let Some(file) = self.file() {
4238 if file.path().file_name().is_none() || include_root {
4239 Some(file.full_path(cx))
4240 } else {
4241 Some(file.path().to_path_buf())
4242 }
4243 } else {
4244 None
4245 }
4246 }
4247
4248 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4249 let query_str = query.fuzzy_contents;
4250 if query_str.map_or(false, |query| query.is_empty()) {
4251 return BTreeMap::default();
4252 }
4253
4254 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4255 language,
4256 override_id: None,
4257 }));
4258
4259 let mut query_ix = 0;
4260 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4261 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4262
4263 let mut words = BTreeMap::default();
4264 let mut current_word_start_ix = None;
4265 let mut chunk_ix = query.range.start;
4266 for chunk in self.chunks(query.range, false) {
4267 for (i, c) in chunk.text.char_indices() {
4268 let ix = chunk_ix + i;
4269 if classifier.is_word(c) {
4270 if current_word_start_ix.is_none() {
4271 current_word_start_ix = Some(ix);
4272 }
4273
4274 if let Some(query_chars) = &query_chars {
4275 if query_ix < query_len {
4276 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4277 query_ix += 1;
4278 }
4279 }
4280 }
4281 continue;
4282 } else if let Some(word_start) = current_word_start_ix.take() {
4283 if query_ix == query_len {
4284 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4285 let mut word_text = self.text_for_range(word_start..ix).peekable();
4286 let first_char = word_text
4287 .peek()
4288 .and_then(|first_chunk| first_chunk.chars().next());
4289 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4290 if !query.skip_digits
4291 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4292 {
4293 words.insert(word_text.collect(), word_range);
4294 }
4295 }
4296 }
4297 query_ix = 0;
4298 }
4299 chunk_ix += chunk.text.len();
4300 }
4301
4302 words
4303 }
4304}
4305
4306pub struct WordsQuery<'a> {
4307 /// Only returns words with all chars from the fuzzy string in them.
4308 pub fuzzy_contents: Option<&'a str>,
4309 /// Skips words that start with a digit.
4310 pub skip_digits: bool,
4311 /// Buffer offset range, to look for words.
4312 pub range: Range<usize>,
4313}
4314
4315fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4316 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4317}
4318
4319fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4320 let mut result = IndentSize::spaces(0);
4321 for c in text {
4322 let kind = match c {
4323 ' ' => IndentKind::Space,
4324 '\t' => IndentKind::Tab,
4325 _ => break,
4326 };
4327 if result.len == 0 {
4328 result.kind = kind;
4329 }
4330 result.len += 1;
4331 }
4332 result
4333}
4334
4335impl Clone for BufferSnapshot {
4336 fn clone(&self) -> Self {
4337 Self {
4338 text: self.text.clone(),
4339 syntax: self.syntax.clone(),
4340 file: self.file.clone(),
4341 remote_selections: self.remote_selections.clone(),
4342 diagnostics: self.diagnostics.clone(),
4343 language: self.language.clone(),
4344 non_text_state_update_count: self.non_text_state_update_count,
4345 }
4346 }
4347}
4348
4349impl Deref for BufferSnapshot {
4350 type Target = text::BufferSnapshot;
4351
4352 fn deref(&self) -> &Self::Target {
4353 &self.text
4354 }
4355}
4356
4357unsafe impl Send for BufferChunks<'_> {}
4358
4359impl<'a> BufferChunks<'a> {
4360 pub(crate) fn new(
4361 text: &'a Rope,
4362 range: Range<usize>,
4363 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4364 diagnostics: bool,
4365 buffer_snapshot: Option<&'a BufferSnapshot>,
4366 ) -> Self {
4367 let mut highlights = None;
4368 if let Some((captures, highlight_maps)) = syntax {
4369 highlights = Some(BufferChunkHighlights {
4370 captures,
4371 next_capture: None,
4372 stack: Default::default(),
4373 highlight_maps,
4374 })
4375 }
4376
4377 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4378 let chunks = text.chunks_in_range(range.clone());
4379
4380 let mut this = BufferChunks {
4381 range,
4382 buffer_snapshot,
4383 chunks,
4384 diagnostic_endpoints,
4385 error_depth: 0,
4386 warning_depth: 0,
4387 information_depth: 0,
4388 hint_depth: 0,
4389 unnecessary_depth: 0,
4390 highlights,
4391 };
4392 this.initialize_diagnostic_endpoints();
4393 this
4394 }
4395
4396 /// Seeks to the given byte offset in the buffer.
4397 pub fn seek(&mut self, range: Range<usize>) {
4398 let old_range = std::mem::replace(&mut self.range, range.clone());
4399 self.chunks.set_range(self.range.clone());
4400 if let Some(highlights) = self.highlights.as_mut() {
4401 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4402 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4403 highlights
4404 .stack
4405 .retain(|(end_offset, _)| *end_offset > range.start);
4406 if let Some(capture) = &highlights.next_capture {
4407 if range.start >= capture.node.start_byte() {
4408 let next_capture_end = capture.node.end_byte();
4409 if range.start < next_capture_end {
4410 highlights.stack.push((
4411 next_capture_end,
4412 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4413 ));
4414 }
4415 highlights.next_capture.take();
4416 }
4417 }
4418 } else if let Some(snapshot) = self.buffer_snapshot {
4419 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4420 *highlights = BufferChunkHighlights {
4421 captures,
4422 next_capture: None,
4423 stack: Default::default(),
4424 highlight_maps,
4425 };
4426 } else {
4427 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4428 // Seeking such BufferChunks is not supported.
4429 debug_assert!(
4430 false,
4431 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4432 );
4433 }
4434
4435 highlights.captures.set_byte_range(self.range.clone());
4436 self.initialize_diagnostic_endpoints();
4437 }
4438 }
4439
4440 fn initialize_diagnostic_endpoints(&mut self) {
4441 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4442 if let Some(buffer) = self.buffer_snapshot {
4443 let mut diagnostic_endpoints = Vec::new();
4444 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4445 diagnostic_endpoints.push(DiagnosticEndpoint {
4446 offset: entry.range.start,
4447 is_start: true,
4448 severity: entry.diagnostic.severity,
4449 is_unnecessary: entry.diagnostic.is_unnecessary,
4450 });
4451 diagnostic_endpoints.push(DiagnosticEndpoint {
4452 offset: entry.range.end,
4453 is_start: false,
4454 severity: entry.diagnostic.severity,
4455 is_unnecessary: entry.diagnostic.is_unnecessary,
4456 });
4457 }
4458 diagnostic_endpoints
4459 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4460 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4461 self.hint_depth = 0;
4462 self.error_depth = 0;
4463 self.warning_depth = 0;
4464 self.information_depth = 0;
4465 }
4466 }
4467 }
4468
4469 /// The current byte offset in the buffer.
4470 pub fn offset(&self) -> usize {
4471 self.range.start
4472 }
4473
4474 pub fn range(&self) -> Range<usize> {
4475 self.range.clone()
4476 }
4477
4478 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4479 let depth = match endpoint.severity {
4480 DiagnosticSeverity::ERROR => &mut self.error_depth,
4481 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4482 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4483 DiagnosticSeverity::HINT => &mut self.hint_depth,
4484 _ => return,
4485 };
4486 if endpoint.is_start {
4487 *depth += 1;
4488 } else {
4489 *depth -= 1;
4490 }
4491
4492 if endpoint.is_unnecessary {
4493 if endpoint.is_start {
4494 self.unnecessary_depth += 1;
4495 } else {
4496 self.unnecessary_depth -= 1;
4497 }
4498 }
4499 }
4500
4501 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4502 if self.error_depth > 0 {
4503 Some(DiagnosticSeverity::ERROR)
4504 } else if self.warning_depth > 0 {
4505 Some(DiagnosticSeverity::WARNING)
4506 } else if self.information_depth > 0 {
4507 Some(DiagnosticSeverity::INFORMATION)
4508 } else if self.hint_depth > 0 {
4509 Some(DiagnosticSeverity::HINT)
4510 } else {
4511 None
4512 }
4513 }
4514
4515 fn current_code_is_unnecessary(&self) -> bool {
4516 self.unnecessary_depth > 0
4517 }
4518}
4519
4520impl<'a> Iterator for BufferChunks<'a> {
4521 type Item = Chunk<'a>;
4522
4523 fn next(&mut self) -> Option<Self::Item> {
4524 let mut next_capture_start = usize::MAX;
4525 let mut next_diagnostic_endpoint = usize::MAX;
4526
4527 if let Some(highlights) = self.highlights.as_mut() {
4528 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4529 if *parent_capture_end <= self.range.start {
4530 highlights.stack.pop();
4531 } else {
4532 break;
4533 }
4534 }
4535
4536 if highlights.next_capture.is_none() {
4537 highlights.next_capture = highlights.captures.next();
4538 }
4539
4540 while let Some(capture) = highlights.next_capture.as_ref() {
4541 if self.range.start < capture.node.start_byte() {
4542 next_capture_start = capture.node.start_byte();
4543 break;
4544 } else {
4545 let highlight_id =
4546 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4547 highlights
4548 .stack
4549 .push((capture.node.end_byte(), highlight_id));
4550 highlights.next_capture = highlights.captures.next();
4551 }
4552 }
4553 }
4554
4555 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4556 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4557 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4558 if endpoint.offset <= self.range.start {
4559 self.update_diagnostic_depths(endpoint);
4560 diagnostic_endpoints.next();
4561 } else {
4562 next_diagnostic_endpoint = endpoint.offset;
4563 break;
4564 }
4565 }
4566 }
4567 self.diagnostic_endpoints = diagnostic_endpoints;
4568
4569 if let Some(chunk) = self.chunks.peek() {
4570 let chunk_start = self.range.start;
4571 let mut chunk_end = (self.chunks.offset() + chunk.len())
4572 .min(next_capture_start)
4573 .min(next_diagnostic_endpoint);
4574 let mut highlight_id = None;
4575 if let Some(highlights) = self.highlights.as_ref() {
4576 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4577 chunk_end = chunk_end.min(*parent_capture_end);
4578 highlight_id = Some(*parent_highlight_id);
4579 }
4580 }
4581
4582 let slice =
4583 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4584 self.range.start = chunk_end;
4585 if self.range.start == self.chunks.offset() + chunk.len() {
4586 self.chunks.next().unwrap();
4587 }
4588
4589 Some(Chunk {
4590 text: slice,
4591 syntax_highlight_id: highlight_id,
4592 diagnostic_severity: self.current_diagnostic_severity(),
4593 is_unnecessary: self.current_code_is_unnecessary(),
4594 ..Default::default()
4595 })
4596 } else {
4597 None
4598 }
4599 }
4600}
4601
4602impl operation_queue::Operation for Operation {
4603 fn lamport_timestamp(&self) -> clock::Lamport {
4604 match self {
4605 Operation::Buffer(_) => {
4606 unreachable!("buffer operations should never be deferred at this layer")
4607 }
4608 Operation::UpdateDiagnostics {
4609 lamport_timestamp, ..
4610 }
4611 | Operation::UpdateSelections {
4612 lamport_timestamp, ..
4613 }
4614 | Operation::UpdateCompletionTriggers {
4615 lamport_timestamp, ..
4616 } => *lamport_timestamp,
4617 }
4618 }
4619}
4620
4621impl Default for Diagnostic {
4622 fn default() -> Self {
4623 Self {
4624 source: Default::default(),
4625 code: None,
4626 code_description: None,
4627 severity: DiagnosticSeverity::ERROR,
4628 message: Default::default(),
4629 markdown: None,
4630 group_id: 0,
4631 is_primary: false,
4632 is_disk_based: false,
4633 is_unnecessary: false,
4634 data: None,
4635 }
4636 }
4637}
4638
4639impl IndentSize {
4640 /// Returns an [`IndentSize`] representing the given spaces.
4641 pub fn spaces(len: u32) -> Self {
4642 Self {
4643 len,
4644 kind: IndentKind::Space,
4645 }
4646 }
4647
4648 /// Returns an [`IndentSize`] representing a tab.
4649 pub fn tab() -> Self {
4650 Self {
4651 len: 1,
4652 kind: IndentKind::Tab,
4653 }
4654 }
4655
4656 /// An iterator over the characters represented by this [`IndentSize`].
4657 pub fn chars(&self) -> impl Iterator<Item = char> {
4658 iter::repeat(self.char()).take(self.len as usize)
4659 }
4660
4661 /// The character representation of this [`IndentSize`].
4662 pub fn char(&self) -> char {
4663 match self.kind {
4664 IndentKind::Space => ' ',
4665 IndentKind::Tab => '\t',
4666 }
4667 }
4668
4669 /// Consumes the current [`IndentSize`] and returns a new one that has
4670 /// been shrunk or enlarged by the given size along the given direction.
4671 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4672 match direction {
4673 Ordering::Less => {
4674 if self.kind == size.kind && self.len >= size.len {
4675 self.len -= size.len;
4676 }
4677 }
4678 Ordering::Equal => {}
4679 Ordering::Greater => {
4680 if self.len == 0 {
4681 self = size;
4682 } else if self.kind == size.kind {
4683 self.len += size.len;
4684 }
4685 }
4686 }
4687 self
4688 }
4689
4690 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4691 match self.kind {
4692 IndentKind::Space => self.len as usize,
4693 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4694 }
4695 }
4696}
4697
4698#[cfg(any(test, feature = "test-support"))]
4699pub struct TestFile {
4700 pub path: Arc<Path>,
4701 pub root_name: String,
4702 pub local_root: Option<PathBuf>,
4703}
4704
4705#[cfg(any(test, feature = "test-support"))]
4706impl File for TestFile {
4707 fn path(&self) -> &Arc<Path> {
4708 &self.path
4709 }
4710
4711 fn full_path(&self, _: &gpui::App) -> PathBuf {
4712 PathBuf::from(&self.root_name).join(self.path.as_ref())
4713 }
4714
4715 fn as_local(&self) -> Option<&dyn LocalFile> {
4716 if self.local_root.is_some() {
4717 Some(self)
4718 } else {
4719 None
4720 }
4721 }
4722
4723 fn disk_state(&self) -> DiskState {
4724 unimplemented!()
4725 }
4726
4727 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4728 self.path().file_name().unwrap_or(self.root_name.as_ref())
4729 }
4730
4731 fn worktree_id(&self, _: &App) -> WorktreeId {
4732 WorktreeId::from_usize(0)
4733 }
4734
4735 fn to_proto(&self, _: &App) -> rpc::proto::File {
4736 unimplemented!()
4737 }
4738
4739 fn is_private(&self) -> bool {
4740 false
4741 }
4742}
4743
4744#[cfg(any(test, feature = "test-support"))]
4745impl LocalFile for TestFile {
4746 fn abs_path(&self, _cx: &App) -> PathBuf {
4747 PathBuf::from(self.local_root.as_ref().unwrap())
4748 .join(&self.root_name)
4749 .join(self.path.as_ref())
4750 }
4751
4752 fn load(&self, _cx: &App) -> Task<Result<String>> {
4753 unimplemented!()
4754 }
4755
4756 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4757 unimplemented!()
4758 }
4759}
4760
4761pub(crate) fn contiguous_ranges(
4762 values: impl Iterator<Item = u32>,
4763 max_len: usize,
4764) -> impl Iterator<Item = Range<u32>> {
4765 let mut values = values;
4766 let mut current_range: Option<Range<u32>> = None;
4767 std::iter::from_fn(move || {
4768 loop {
4769 if let Some(value) = values.next() {
4770 if let Some(range) = &mut current_range {
4771 if value == range.end && range.len() < max_len {
4772 range.end += 1;
4773 continue;
4774 }
4775 }
4776
4777 let prev_range = current_range.clone();
4778 current_range = Some(value..(value + 1));
4779 if prev_range.is_some() {
4780 return prev_range;
4781 }
4782 } else {
4783 return current_range.take();
4784 }
4785 }
4786 })
4787}
4788
4789#[derive(Default, Debug)]
4790pub struct CharClassifier {
4791 scope: Option<LanguageScope>,
4792 for_completion: bool,
4793 ignore_punctuation: bool,
4794}
4795
4796impl CharClassifier {
4797 pub fn new(scope: Option<LanguageScope>) -> Self {
4798 Self {
4799 scope,
4800 for_completion: false,
4801 ignore_punctuation: false,
4802 }
4803 }
4804
4805 pub fn for_completion(self, for_completion: bool) -> Self {
4806 Self {
4807 for_completion,
4808 ..self
4809 }
4810 }
4811
4812 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4813 Self {
4814 ignore_punctuation,
4815 ..self
4816 }
4817 }
4818
4819 pub fn is_whitespace(&self, c: char) -> bool {
4820 self.kind(c) == CharKind::Whitespace
4821 }
4822
4823 pub fn is_word(&self, c: char) -> bool {
4824 self.kind(c) == CharKind::Word
4825 }
4826
4827 pub fn is_punctuation(&self, c: char) -> bool {
4828 self.kind(c) == CharKind::Punctuation
4829 }
4830
4831 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4832 if c.is_alphanumeric() || c == '_' {
4833 return CharKind::Word;
4834 }
4835
4836 if let Some(scope) = &self.scope {
4837 let characters = if self.for_completion {
4838 scope.completion_query_characters()
4839 } else {
4840 scope.word_characters()
4841 };
4842 if let Some(characters) = characters {
4843 if characters.contains(&c) {
4844 return CharKind::Word;
4845 }
4846 }
4847 }
4848
4849 if c.is_whitespace() {
4850 return CharKind::Whitespace;
4851 }
4852
4853 if ignore_punctuation {
4854 CharKind::Word
4855 } else {
4856 CharKind::Punctuation
4857 }
4858 }
4859
4860 pub fn kind(&self, c: char) -> CharKind {
4861 self.kind_with(c, self.ignore_punctuation)
4862 }
4863}
4864
4865/// Find all of the ranges of whitespace that occur at the ends of lines
4866/// in the given rope.
4867///
4868/// This could also be done with a regex search, but this implementation
4869/// avoids copying text.
4870pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4871 let mut ranges = Vec::new();
4872
4873 let mut offset = 0;
4874 let mut prev_chunk_trailing_whitespace_range = 0..0;
4875 for chunk in rope.chunks() {
4876 let mut prev_line_trailing_whitespace_range = 0..0;
4877 for (i, line) in chunk.split('\n').enumerate() {
4878 let line_end_offset = offset + line.len();
4879 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4880 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4881
4882 if i == 0 && trimmed_line_len == 0 {
4883 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4884 }
4885 if !prev_line_trailing_whitespace_range.is_empty() {
4886 ranges.push(prev_line_trailing_whitespace_range);
4887 }
4888
4889 offset = line_end_offset + 1;
4890 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4891 }
4892
4893 offset -= 1;
4894 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4895 }
4896
4897 if !prev_chunk_trailing_whitespace_range.is_empty() {
4898 ranges.push(prev_chunk_trailing_whitespace_range);
4899 }
4900
4901 ranges
4902}