1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
233 pub data: Option<Value>,
234}
235
236/// An operation used to synchronize this buffer with its other replicas.
237#[derive(Clone, Debug, PartialEq)]
238pub enum Operation {
239 /// A text operation.
240 Buffer(text::Operation),
241
242 /// An update to the buffer's diagnostics.
243 UpdateDiagnostics {
244 /// The id of the language server that produced the new diagnostics.
245 server_id: LanguageServerId,
246 /// The diagnostics.
247 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
248 /// The buffer's lamport timestamp.
249 lamport_timestamp: clock::Lamport,
250 },
251
252 /// An update to the most recent selections in this buffer.
253 UpdateSelections {
254 /// The selections.
255 selections: Arc<[Selection<Anchor>]>,
256 /// The buffer's lamport timestamp.
257 lamport_timestamp: clock::Lamport,
258 /// Whether the selections are in 'line mode'.
259 line_mode: bool,
260 /// The [`CursorShape`] associated with these selections.
261 cursor_shape: CursorShape,
262 },
263
264 /// An update to the characters that should trigger autocompletion
265 /// for this buffer.
266 UpdateCompletionTriggers {
267 /// The characters that trigger autocompletion.
268 triggers: Vec<String>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// The language server ID.
272 server_id: LanguageServerId,
273 },
274}
275
276/// An event that occurs in a buffer.
277#[derive(Clone, Debug, PartialEq)]
278pub enum BufferEvent {
279 /// The buffer was changed in a way that must be
280 /// propagated to its other replicas.
281 Operation {
282 operation: Operation,
283 is_local: bool,
284 },
285 /// The buffer was edited.
286 Edited,
287 /// The buffer's `dirty` bit changed.
288 DirtyChanged,
289 /// The buffer was saved.
290 Saved,
291 /// The buffer's file was changed on disk.
292 FileHandleChanged,
293 /// The buffer was reloaded.
294 Reloaded,
295 /// The buffer is in need of a reload
296 ReloadNeeded,
297 /// The buffer's language was changed.
298 LanguageChanged,
299 /// The buffer's syntax trees were updated.
300 Reparsed,
301 /// The buffer's diagnostics were updated.
302 DiagnosticsUpdated,
303 /// The buffer gained or lost editing capabilities.
304 CapabilityChanged,
305 /// The buffer was explicitly requested to close.
306 Closed,
307 /// The buffer was discarded when closing.
308 Discarded,
309}
310
311/// The file associated with a buffer.
312pub trait File: Send + Sync + Any {
313 /// Returns the [`LocalFile`] associated with this file, if the
314 /// file is local.
315 fn as_local(&self) -> Option<&dyn LocalFile>;
316
317 /// Returns whether this file is local.
318 fn is_local(&self) -> bool {
319 self.as_local().is_some()
320 }
321
322 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
323 /// only available in some states, such as modification time.
324 fn disk_state(&self) -> DiskState;
325
326 /// Returns the path of this file relative to the worktree's root directory.
327 fn path(&self) -> &Arc<Path>;
328
329 /// Returns the path of this file relative to the worktree's parent directory (this means it
330 /// includes the name of the worktree's root folder).
331 fn full_path(&self, cx: &App) -> PathBuf;
332
333 /// Returns the last component of this handle's absolute path. If this handle refers to the root
334 /// of its worktree, then this method will return the name of the worktree itself.
335 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
336
337 /// Returns the id of the worktree to which this file belongs.
338 ///
339 /// This is needed for looking up project-specific settings.
340 fn worktree_id(&self, cx: &App) -> WorktreeId;
341
342 /// Converts this file into a protobuf message.
343 fn to_proto(&self, cx: &App) -> rpc::proto::File;
344
345 /// Return whether Zed considers this to be a private file.
346 fn is_private(&self) -> bool;
347}
348
349/// The file's storage status - whether it's stored (`Present`), and if so when it was last
350/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
351/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
352/// indicator for new files.
353#[derive(Copy, Clone, Debug, PartialEq)]
354pub enum DiskState {
355 /// File created in Zed that has not been saved.
356 New,
357 /// File present on the filesystem.
358 Present { mtime: MTime },
359 /// Deleted file that was previously present.
360 Deleted,
361}
362
363impl DiskState {
364 /// Returns the file's last known modification time on disk.
365 pub fn mtime(self) -> Option<MTime> {
366 match self {
367 DiskState::New => None,
368 DiskState::Present { mtime } => Some(mtime),
369 DiskState::Deleted => None,
370 }
371 }
372
373 pub fn exists(&self) -> bool {
374 match self {
375 DiskState::New => false,
376 DiskState::Present { .. } => true,
377 DiskState::Deleted => false,
378 }
379 }
380}
381
382/// The file associated with a buffer, in the case where the file is on the local disk.
383pub trait LocalFile: File {
384 /// Returns the absolute path of this file
385 fn abs_path(&self, cx: &App) -> PathBuf;
386
387 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
388 fn load(&self, cx: &App) -> Task<Result<String>>;
389
390 /// Loads the file's contents from disk.
391 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
392}
393
394/// The auto-indent behavior associated with an editing operation.
395/// For some editing operations, each affected line of text has its
396/// indentation recomputed. For other operations, the entire block
397/// of edited text is adjusted uniformly.
398#[derive(Clone, Debug)]
399pub enum AutoindentMode {
400 /// Indent each line of inserted text.
401 EachLine,
402 /// Apply the same indentation adjustment to all of the lines
403 /// in a given insertion.
404 Block {
405 /// The original indentation column of the first line of each
406 /// insertion, if it has been copied.
407 ///
408 /// Knowing this makes it possible to preserve the relative indentation
409 /// of every line in the insertion from when it was copied.
410 ///
411 /// If the original indent column is `a`, and the first line of insertion
412 /// is then auto-indented to column `b`, then every other line of
413 /// the insertion will be auto-indented to column `b - a`
414 original_indent_columns: Vec<Option<u32>>,
415 },
416}
417
418#[derive(Clone)]
419struct AutoindentRequest {
420 before_edit: BufferSnapshot,
421 entries: Vec<AutoindentRequestEntry>,
422 is_block_mode: bool,
423 ignore_empty_lines: bool,
424}
425
426#[derive(Debug, Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485}
486
487/// A set of edits to a given version of a buffer, computed asynchronously.
488#[derive(Debug)]
489pub struct Diff {
490 pub base_version: clock::Global,
491 pub line_ending: LineEnding,
492 pub edits: Vec<(Range<usize>, Arc<str>)>,
493}
494
495#[derive(Clone, Copy)]
496pub(crate) struct DiagnosticEndpoint {
497 offset: usize,
498 is_start: bool,
499 severity: DiagnosticSeverity,
500 is_unnecessary: bool,
501}
502
503/// A class of characters, used for characterizing a run of text.
504#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
505pub enum CharKind {
506 /// Whitespace.
507 Whitespace,
508 /// Punctuation.
509 Punctuation,
510 /// Word.
511 Word,
512}
513
514/// A runnable is a set of data about a region that could be resolved into a task
515pub struct Runnable {
516 pub tags: SmallVec<[RunnableTag; 1]>,
517 pub language: Arc<Language>,
518 pub buffer: BufferId,
519}
520
521#[derive(Default, Clone, Debug)]
522pub struct HighlightedText {
523 pub text: SharedString,
524 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
525}
526
527#[derive(Default, Debug)]
528struct HighlightedTextBuilder {
529 pub text: String,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533impl HighlightedText {
534 pub fn from_buffer_range<T: ToOffset>(
535 range: Range<T>,
536 snapshot: &text::BufferSnapshot,
537 syntax_snapshot: &SyntaxSnapshot,
538 override_style: Option<HighlightStyle>,
539 syntax_theme: &SyntaxTheme,
540 ) -> Self {
541 let mut highlighted_text = HighlightedTextBuilder::default();
542 highlighted_text.add_text_from_buffer_range(
543 range,
544 snapshot,
545 syntax_snapshot,
546 override_style,
547 syntax_theme,
548 );
549 highlighted_text.build()
550 }
551
552 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
553 gpui::StyledText::new(self.text.clone())
554 .with_default_highlights(default_style, self.highlights.iter().cloned())
555 }
556
557 /// Returns the first line without leading whitespace unless highlighted
558 /// and a boolean indicating if there are more lines after
559 pub fn first_line_preview(self) -> (Self, bool) {
560 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
561 let first_line = &self.text[..newline_ix];
562
563 // Trim leading whitespace, unless an edit starts prior to it.
564 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
565 if let Some((first_highlight_range, _)) = self.highlights.first() {
566 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
567 }
568
569 let preview_text = &first_line[preview_start_ix..];
570 let preview_highlights = self
571 .highlights
572 .into_iter()
573 .take_while(|(range, _)| range.start < newline_ix)
574 .filter_map(|(mut range, highlight)| {
575 range.start = range.start.saturating_sub(preview_start_ix);
576 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
577 if range.is_empty() {
578 None
579 } else {
580 Some((range, highlight))
581 }
582 });
583
584 let preview = Self {
585 text: SharedString::new(preview_text),
586 highlights: preview_highlights.collect(),
587 };
588
589 (preview, self.text.len() > newline_ix)
590 }
591}
592
593impl HighlightedTextBuilder {
594 pub fn build(self) -> HighlightedText {
595 HighlightedText {
596 text: self.text.into(),
597 highlights: self.highlights,
598 }
599 }
600
601 pub fn add_text_from_buffer_range<T: ToOffset>(
602 &mut self,
603 range: Range<T>,
604 snapshot: &text::BufferSnapshot,
605 syntax_snapshot: &SyntaxSnapshot,
606 override_style: Option<HighlightStyle>,
607 syntax_theme: &SyntaxTheme,
608 ) {
609 let range = range.to_offset(snapshot);
610 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
611 let start = self.text.len();
612 self.text.push_str(chunk.text);
613 let end = self.text.len();
614
615 if let Some(mut highlight_style) = chunk
616 .syntax_highlight_id
617 .and_then(|id| id.style(syntax_theme))
618 {
619 if let Some(override_style) = override_style {
620 highlight_style.highlight(override_style);
621 }
622 self.highlights.push((start..end, highlight_style));
623 } else if let Some(override_style) = override_style {
624 self.highlights.push((start..end, override_style));
625 }
626 }
627 }
628
629 fn highlighted_chunks<'a>(
630 range: Range<usize>,
631 snapshot: &'a text::BufferSnapshot,
632 syntax_snapshot: &'a SyntaxSnapshot,
633 ) -> BufferChunks<'a> {
634 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
635 grammar.highlights_query.as_ref()
636 });
637
638 let highlight_maps = captures
639 .grammars()
640 .iter()
641 .map(|grammar| grammar.highlight_map())
642 .collect();
643
644 BufferChunks::new(
645 snapshot.as_rope(),
646 range,
647 Some((captures, highlight_maps)),
648 false,
649 None,
650 )
651 }
652}
653
654#[derive(Clone)]
655pub struct EditPreview {
656 old_snapshot: text::BufferSnapshot,
657 applied_edits_snapshot: text::BufferSnapshot,
658 syntax_snapshot: SyntaxSnapshot,
659}
660
661impl EditPreview {
662 pub fn highlight_edits(
663 &self,
664 current_snapshot: &BufferSnapshot,
665 edits: &[(Range<Anchor>, String)],
666 include_deletions: bool,
667 cx: &App,
668 ) -> HighlightedText {
669 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
670 return HighlightedText::default();
671 };
672
673 let mut highlighted_text = HighlightedTextBuilder::default();
674
675 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
676
677 let insertion_highlight_style = HighlightStyle {
678 background_color: Some(cx.theme().status().created_background),
679 ..Default::default()
680 };
681 let deletion_highlight_style = HighlightStyle {
682 background_color: Some(cx.theme().status().deleted_background),
683 ..Default::default()
684 };
685 let syntax_theme = cx.theme().syntax();
686
687 for (range, edit_text) in edits {
688 let edit_new_end_in_preview_snapshot = range
689 .end
690 .bias_right(&self.old_snapshot)
691 .to_offset(&self.applied_edits_snapshot);
692 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
693
694 let unchanged_range_in_preview_snapshot =
695 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
696 if !unchanged_range_in_preview_snapshot.is_empty() {
697 highlighted_text.add_text_from_buffer_range(
698 unchanged_range_in_preview_snapshot,
699 &self.applied_edits_snapshot,
700 &self.syntax_snapshot,
701 None,
702 &syntax_theme,
703 );
704 }
705
706 let range_in_current_snapshot = range.to_offset(current_snapshot);
707 if include_deletions && !range_in_current_snapshot.is_empty() {
708 highlighted_text.add_text_from_buffer_range(
709 range_in_current_snapshot,
710 ¤t_snapshot.text,
711 ¤t_snapshot.syntax,
712 Some(deletion_highlight_style),
713 &syntax_theme,
714 );
715 }
716
717 if !edit_text.is_empty() {
718 highlighted_text.add_text_from_buffer_range(
719 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
720 &self.applied_edits_snapshot,
721 &self.syntax_snapshot,
722 Some(insertion_highlight_style),
723 &syntax_theme,
724 );
725 }
726
727 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
728 }
729
730 highlighted_text.add_text_from_buffer_range(
731 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
732 &self.applied_edits_snapshot,
733 &self.syntax_snapshot,
734 None,
735 &syntax_theme,
736 );
737
738 highlighted_text.build()
739 }
740
741 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
742 let (first, _) = edits.first()?;
743 let (last, _) = edits.last()?;
744
745 let start = first
746 .start
747 .bias_left(&self.old_snapshot)
748 .to_point(&self.applied_edits_snapshot);
749 let end = last
750 .end
751 .bias_right(&self.old_snapshot)
752 .to_point(&self.applied_edits_snapshot);
753
754 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
755 let range = Point::new(start.row, 0)
756 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
757
758 Some(range.to_offset(&self.applied_edits_snapshot))
759 }
760}
761
762#[derive(Clone, Debug, PartialEq, Eq)]
763pub struct BracketMatch {
764 pub open_range: Range<usize>,
765 pub close_range: Range<usize>,
766 pub newline_only: bool,
767}
768
769impl Buffer {
770 /// Create a new buffer with the given base text.
771 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
772 Self::build(
773 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
774 None,
775 Capability::ReadWrite,
776 )
777 }
778
779 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
780 pub fn local_normalized(
781 base_text_normalized: Rope,
782 line_ending: LineEnding,
783 cx: &Context<Self>,
784 ) -> Self {
785 Self::build(
786 TextBuffer::new_normalized(
787 0,
788 cx.entity_id().as_non_zero_u64().into(),
789 line_ending,
790 base_text_normalized,
791 ),
792 None,
793 Capability::ReadWrite,
794 )
795 }
796
797 /// Create a new buffer that is a replica of a remote buffer.
798 pub fn remote(
799 remote_id: BufferId,
800 replica_id: ReplicaId,
801 capability: Capability,
802 base_text: impl Into<String>,
803 ) -> Self {
804 Self::build(
805 TextBuffer::new(replica_id, remote_id, base_text.into()),
806 None,
807 capability,
808 )
809 }
810
811 /// Create a new buffer that is a replica of a remote buffer, populating its
812 /// state from the given protobuf message.
813 pub fn from_proto(
814 replica_id: ReplicaId,
815 capability: Capability,
816 message: proto::BufferState,
817 file: Option<Arc<dyn File>>,
818 ) -> Result<Self> {
819 let buffer_id = BufferId::new(message.id)
820 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
821 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
822 let mut this = Self::build(buffer, file, capability);
823 this.text.set_line_ending(proto::deserialize_line_ending(
824 rpc::proto::LineEnding::from_i32(message.line_ending)
825 .ok_or_else(|| anyhow!("missing line_ending"))?,
826 ));
827 this.saved_version = proto::deserialize_version(&message.saved_version);
828 this.saved_mtime = message.saved_mtime.map(|time| time.into());
829 Ok(this)
830 }
831
832 /// Serialize the buffer's state to a protobuf message.
833 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
834 proto::BufferState {
835 id: self.remote_id().into(),
836 file: self.file.as_ref().map(|f| f.to_proto(cx)),
837 base_text: self.base_text().to_string(),
838 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
839 saved_version: proto::serialize_version(&self.saved_version),
840 saved_mtime: self.saved_mtime.map(|time| time.into()),
841 }
842 }
843
844 /// Serialize as protobufs all of the changes to the buffer since the given version.
845 pub fn serialize_ops(
846 &self,
847 since: Option<clock::Global>,
848 cx: &App,
849 ) -> Task<Vec<proto::Operation>> {
850 let mut operations = Vec::new();
851 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
852
853 operations.extend(self.remote_selections.iter().map(|(_, set)| {
854 proto::serialize_operation(&Operation::UpdateSelections {
855 selections: set.selections.clone(),
856 lamport_timestamp: set.lamport_timestamp,
857 line_mode: set.line_mode,
858 cursor_shape: set.cursor_shape,
859 })
860 }));
861
862 for (server_id, diagnostics) in &self.diagnostics {
863 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
864 lamport_timestamp: self.diagnostics_timestamp,
865 server_id: *server_id,
866 diagnostics: diagnostics.iter().cloned().collect(),
867 }));
868 }
869
870 for (server_id, completions) in &self.completion_triggers_per_language_server {
871 operations.push(proto::serialize_operation(
872 &Operation::UpdateCompletionTriggers {
873 triggers: completions.iter().cloned().collect(),
874 lamport_timestamp: self.completion_triggers_timestamp,
875 server_id: *server_id,
876 },
877 ));
878 }
879
880 let text_operations = self.text.operations().clone();
881 cx.background_spawn(async move {
882 let since = since.unwrap_or_default();
883 operations.extend(
884 text_operations
885 .iter()
886 .filter(|(_, op)| !since.observed(op.timestamp()))
887 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
888 );
889 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
890 operations
891 })
892 }
893
894 /// Assign a language to the buffer, returning the buffer.
895 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
896 self.set_language(Some(language), cx);
897 self
898 }
899
900 /// Returns the [`Capability`] of this buffer.
901 pub fn capability(&self) -> Capability {
902 self.capability
903 }
904
905 /// Whether this buffer can only be read.
906 pub fn read_only(&self) -> bool {
907 self.capability == Capability::ReadOnly
908 }
909
910 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
911 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
912 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
913 let snapshot = buffer.snapshot();
914 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
915 Self {
916 saved_mtime,
917 saved_version: buffer.version(),
918 preview_version: buffer.version(),
919 reload_task: None,
920 transaction_depth: 0,
921 was_dirty_before_starting_transaction: None,
922 has_unsaved_edits: Cell::new((buffer.version(), false)),
923 text: buffer,
924 branch_state: None,
925 file,
926 capability,
927 syntax_map,
928 reparse: None,
929 non_text_state_update_count: 0,
930 sync_parse_timeout: Duration::from_millis(1),
931 parse_status: async_watch::channel(ParseStatus::Idle),
932 autoindent_requests: Default::default(),
933 pending_autoindent: Default::default(),
934 language: None,
935 remote_selections: Default::default(),
936 diagnostics: Default::default(),
937 diagnostics_timestamp: Default::default(),
938 completion_triggers: Default::default(),
939 completion_triggers_per_language_server: Default::default(),
940 completion_triggers_timestamp: Default::default(),
941 deferred_ops: OperationQueue::new(),
942 has_conflict: false,
943 change_bits: Default::default(),
944 _subscriptions: Vec::new(),
945 }
946 }
947
948 pub fn build_snapshot(
949 text: Rope,
950 language: Option<Arc<Language>>,
951 language_registry: Option<Arc<LanguageRegistry>>,
952 cx: &mut App,
953 ) -> impl Future<Output = BufferSnapshot> + use<> {
954 let entity_id = cx.reserve_entity::<Self>().entity_id();
955 let buffer_id = entity_id.as_non_zero_u64().into();
956 async move {
957 let text =
958 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
959 let mut syntax = SyntaxMap::new(&text).snapshot();
960 if let Some(language) = language.clone() {
961 let text = text.clone();
962 let language = language.clone();
963 let language_registry = language_registry.clone();
964 syntax.reparse(&text, language_registry, language);
965 }
966 BufferSnapshot {
967 text,
968 syntax,
969 file: None,
970 diagnostics: Default::default(),
971 remote_selections: Default::default(),
972 language,
973 non_text_state_update_count: 0,
974 }
975 }
976 }
977
978 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
979 let entity_id = cx.reserve_entity::<Self>().entity_id();
980 let buffer_id = entity_id.as_non_zero_u64().into();
981 let text =
982 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
983 let syntax = SyntaxMap::new(&text).snapshot();
984 BufferSnapshot {
985 text,
986 syntax,
987 file: None,
988 diagnostics: Default::default(),
989 remote_selections: Default::default(),
990 language: None,
991 non_text_state_update_count: 0,
992 }
993 }
994
995 #[cfg(any(test, feature = "test-support"))]
996 pub fn build_snapshot_sync(
997 text: Rope,
998 language: Option<Arc<Language>>,
999 language_registry: Option<Arc<LanguageRegistry>>,
1000 cx: &mut App,
1001 ) -> BufferSnapshot {
1002 let entity_id = cx.reserve_entity::<Self>().entity_id();
1003 let buffer_id = entity_id.as_non_zero_u64().into();
1004 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1005 let mut syntax = SyntaxMap::new(&text).snapshot();
1006 if let Some(language) = language.clone() {
1007 let text = text.clone();
1008 let language = language.clone();
1009 let language_registry = language_registry.clone();
1010 syntax.reparse(&text, language_registry, language);
1011 }
1012 BufferSnapshot {
1013 text,
1014 syntax,
1015 file: None,
1016 diagnostics: Default::default(),
1017 remote_selections: Default::default(),
1018 language,
1019 non_text_state_update_count: 0,
1020 }
1021 }
1022
1023 /// Retrieve a snapshot of the buffer's current state. This is computationally
1024 /// cheap, and allows reading from the buffer on a background thread.
1025 pub fn snapshot(&self) -> BufferSnapshot {
1026 let text = self.text.snapshot();
1027 let mut syntax_map = self.syntax_map.lock();
1028 syntax_map.interpolate(&text);
1029 let syntax = syntax_map.snapshot();
1030
1031 BufferSnapshot {
1032 text,
1033 syntax,
1034 file: self.file.clone(),
1035 remote_selections: self.remote_selections.clone(),
1036 diagnostics: self.diagnostics.clone(),
1037 language: self.language.clone(),
1038 non_text_state_update_count: self.non_text_state_update_count,
1039 }
1040 }
1041
1042 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1043 let this = cx.entity();
1044 cx.new(|cx| {
1045 let mut branch = Self {
1046 branch_state: Some(BufferBranchState {
1047 base_buffer: this.clone(),
1048 merged_operations: Default::default(),
1049 }),
1050 language: self.language.clone(),
1051 has_conflict: self.has_conflict,
1052 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1053 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1054 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1055 };
1056 if let Some(language_registry) = self.language_registry() {
1057 branch.set_language_registry(language_registry);
1058 }
1059
1060 // Reparse the branch buffer so that we get syntax highlighting immediately.
1061 branch.reparse(cx);
1062
1063 branch
1064 })
1065 }
1066
1067 pub fn preview_edits(
1068 &self,
1069 edits: Arc<[(Range<Anchor>, String)]>,
1070 cx: &App,
1071 ) -> Task<EditPreview> {
1072 let registry = self.language_registry();
1073 let language = self.language().cloned();
1074 let old_snapshot = self.text.snapshot();
1075 let mut branch_buffer = self.text.branch();
1076 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1077 cx.background_spawn(async move {
1078 if !edits.is_empty() {
1079 if let Some(language) = language.clone() {
1080 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1081 }
1082
1083 branch_buffer.edit(edits.iter().cloned());
1084 let snapshot = branch_buffer.snapshot();
1085 syntax_snapshot.interpolate(&snapshot);
1086
1087 if let Some(language) = language {
1088 syntax_snapshot.reparse(&snapshot, registry, language);
1089 }
1090 }
1091 EditPreview {
1092 old_snapshot,
1093 applied_edits_snapshot: branch_buffer.snapshot(),
1094 syntax_snapshot,
1095 }
1096 })
1097 }
1098
1099 /// Applies all of the changes in this buffer that intersect any of the
1100 /// given `ranges` to its base buffer.
1101 ///
1102 /// If `ranges` is empty, then all changes will be applied. This buffer must
1103 /// be a branch buffer to call this method.
1104 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1105 let Some(base_buffer) = self.base_buffer() else {
1106 debug_panic!("not a branch buffer");
1107 return;
1108 };
1109
1110 let mut ranges = if ranges.is_empty() {
1111 &[0..usize::MAX]
1112 } else {
1113 ranges.as_slice()
1114 }
1115 .into_iter()
1116 .peekable();
1117
1118 let mut edits = Vec::new();
1119 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1120 let mut is_included = false;
1121 while let Some(range) = ranges.peek() {
1122 if range.end < edit.new.start {
1123 ranges.next().unwrap();
1124 } else {
1125 if range.start <= edit.new.end {
1126 is_included = true;
1127 }
1128 break;
1129 }
1130 }
1131
1132 if is_included {
1133 edits.push((
1134 edit.old.clone(),
1135 self.text_for_range(edit.new.clone()).collect::<String>(),
1136 ));
1137 }
1138 }
1139
1140 let operation = base_buffer.update(cx, |base_buffer, cx| {
1141 // cx.emit(BufferEvent::DiffBaseChanged);
1142 base_buffer.edit(edits, None, cx)
1143 });
1144
1145 if let Some(operation) = operation {
1146 if let Some(BufferBranchState {
1147 merged_operations, ..
1148 }) = &mut self.branch_state
1149 {
1150 merged_operations.push(operation);
1151 }
1152 }
1153 }
1154
1155 fn on_base_buffer_event(
1156 &mut self,
1157 _: Entity<Buffer>,
1158 event: &BufferEvent,
1159 cx: &mut Context<Self>,
1160 ) {
1161 let BufferEvent::Operation { operation, .. } = event else {
1162 return;
1163 };
1164 let Some(BufferBranchState {
1165 merged_operations, ..
1166 }) = &mut self.branch_state
1167 else {
1168 return;
1169 };
1170
1171 let mut operation_to_undo = None;
1172 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1173 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1174 merged_operations.remove(ix);
1175 operation_to_undo = Some(operation.timestamp);
1176 }
1177 }
1178
1179 self.apply_ops([operation.clone()], cx);
1180
1181 if let Some(timestamp) = operation_to_undo {
1182 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1183 self.undo_operations(counts, cx);
1184 }
1185 }
1186
1187 #[cfg(test)]
1188 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1189 &self.text
1190 }
1191
1192 /// Retrieve a snapshot of the buffer's raw text, without any
1193 /// language-related state like the syntax tree or diagnostics.
1194 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1195 self.text.snapshot()
1196 }
1197
1198 /// The file associated with the buffer, if any.
1199 pub fn file(&self) -> Option<&Arc<dyn File>> {
1200 self.file.as_ref()
1201 }
1202
1203 /// The version of the buffer that was last saved or reloaded from disk.
1204 pub fn saved_version(&self) -> &clock::Global {
1205 &self.saved_version
1206 }
1207
1208 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1209 pub fn saved_mtime(&self) -> Option<MTime> {
1210 self.saved_mtime
1211 }
1212
1213 /// Assign a language to the buffer.
1214 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1215 self.non_text_state_update_count += 1;
1216 self.syntax_map.lock().clear(&self.text);
1217 self.language = language;
1218 self.was_changed();
1219 self.reparse(cx);
1220 cx.emit(BufferEvent::LanguageChanged);
1221 }
1222
1223 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1224 /// other languages if parts of the buffer are written in different languages.
1225 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1226 self.syntax_map
1227 .lock()
1228 .set_language_registry(language_registry);
1229 }
1230
1231 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1232 self.syntax_map.lock().language_registry()
1233 }
1234
1235 /// Assign the buffer a new [`Capability`].
1236 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1237 self.capability = capability;
1238 cx.emit(BufferEvent::CapabilityChanged)
1239 }
1240
1241 /// This method is called to signal that the buffer has been saved.
1242 pub fn did_save(
1243 &mut self,
1244 version: clock::Global,
1245 mtime: Option<MTime>,
1246 cx: &mut Context<Self>,
1247 ) {
1248 self.saved_version = version;
1249 self.has_unsaved_edits
1250 .set((self.saved_version().clone(), false));
1251 self.has_conflict = false;
1252 self.saved_mtime = mtime;
1253 self.was_changed();
1254 cx.emit(BufferEvent::Saved);
1255 cx.notify();
1256 }
1257
1258 /// This method is called to signal that the buffer has been discarded.
1259 pub fn discarded(&self, cx: &mut Context<Self>) {
1260 cx.emit(BufferEvent::Discarded);
1261 cx.notify();
1262 }
1263
1264 /// Reloads the contents of the buffer from disk.
1265 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1266 let (tx, rx) = futures::channel::oneshot::channel();
1267 let prev_version = self.text.version();
1268 self.reload_task = Some(cx.spawn(async move |this, cx| {
1269 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1270 let file = this.file.as_ref()?.as_local()?;
1271
1272 Some((file.disk_state().mtime(), file.load(cx)))
1273 })?
1274 else {
1275 return Ok(());
1276 };
1277
1278 let new_text = new_text.await?;
1279 let diff = this
1280 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1281 .await;
1282 this.update(cx, |this, cx| {
1283 if this.version() == diff.base_version {
1284 this.finalize_last_transaction();
1285 this.apply_diff(diff, cx);
1286 tx.send(this.finalize_last_transaction().cloned()).ok();
1287 this.has_conflict = false;
1288 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1289 } else {
1290 if !diff.edits.is_empty()
1291 || this
1292 .edits_since::<usize>(&diff.base_version)
1293 .next()
1294 .is_some()
1295 {
1296 this.has_conflict = true;
1297 }
1298
1299 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1300 }
1301
1302 this.reload_task.take();
1303 })
1304 }));
1305 rx
1306 }
1307
1308 /// This method is called to signal that the buffer has been reloaded.
1309 pub fn did_reload(
1310 &mut self,
1311 version: clock::Global,
1312 line_ending: LineEnding,
1313 mtime: Option<MTime>,
1314 cx: &mut Context<Self>,
1315 ) {
1316 self.saved_version = version;
1317 self.has_unsaved_edits
1318 .set((self.saved_version.clone(), false));
1319 self.text.set_line_ending(line_ending);
1320 self.saved_mtime = mtime;
1321 cx.emit(BufferEvent::Reloaded);
1322 cx.notify();
1323 }
1324
1325 /// Updates the [`File`] backing this buffer. This should be called when
1326 /// the file has changed or has been deleted.
1327 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1328 let was_dirty = self.is_dirty();
1329 let mut file_changed = false;
1330
1331 if let Some(old_file) = self.file.as_ref() {
1332 if new_file.path() != old_file.path() {
1333 file_changed = true;
1334 }
1335
1336 let old_state = old_file.disk_state();
1337 let new_state = new_file.disk_state();
1338 if old_state != new_state {
1339 file_changed = true;
1340 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1341 cx.emit(BufferEvent::ReloadNeeded)
1342 }
1343 }
1344 } else {
1345 file_changed = true;
1346 };
1347
1348 self.file = Some(new_file);
1349 if file_changed {
1350 self.was_changed();
1351 self.non_text_state_update_count += 1;
1352 if was_dirty != self.is_dirty() {
1353 cx.emit(BufferEvent::DirtyChanged);
1354 }
1355 cx.emit(BufferEvent::FileHandleChanged);
1356 cx.notify();
1357 }
1358 }
1359
1360 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1361 Some(self.branch_state.as_ref()?.base_buffer.clone())
1362 }
1363
1364 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1365 pub fn language(&self) -> Option<&Arc<Language>> {
1366 self.language.as_ref()
1367 }
1368
1369 /// Returns the [`Language`] at the given location.
1370 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1371 let offset = position.to_offset(self);
1372 self.syntax_map
1373 .lock()
1374 .layers_for_range(offset..offset, &self.text, false)
1375 .last()
1376 .map(|info| info.language.clone())
1377 .or_else(|| self.language.clone())
1378 }
1379
1380 /// Returns each [`Language`] for the active syntax layers at the given location.
1381 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1382 let offset = position.to_offset(self);
1383 let mut languages: Vec<Arc<Language>> = self
1384 .syntax_map
1385 .lock()
1386 .layers_for_range(offset..offset, &self.text, false)
1387 .map(|info| info.language.clone())
1388 .collect();
1389
1390 if languages.is_empty() {
1391 if let Some(buffer_language) = self.language() {
1392 languages.push(buffer_language.clone());
1393 }
1394 }
1395
1396 languages
1397 }
1398
1399 /// An integer version number that accounts for all updates besides
1400 /// the buffer's text itself (which is versioned via a version vector).
1401 pub fn non_text_state_update_count(&self) -> usize {
1402 self.non_text_state_update_count
1403 }
1404
1405 /// Whether the buffer is being parsed in the background.
1406 #[cfg(any(test, feature = "test-support"))]
1407 pub fn is_parsing(&self) -> bool {
1408 self.reparse.is_some()
1409 }
1410
1411 /// Indicates whether the buffer contains any regions that may be
1412 /// written in a language that hasn't been loaded yet.
1413 pub fn contains_unknown_injections(&self) -> bool {
1414 self.syntax_map.lock().contains_unknown_injections()
1415 }
1416
1417 #[cfg(test)]
1418 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1419 self.sync_parse_timeout = timeout;
1420 }
1421
1422 /// Called after an edit to synchronize the buffer's main parse tree with
1423 /// the buffer's new underlying state.
1424 ///
1425 /// Locks the syntax map and interpolates the edits since the last reparse
1426 /// into the foreground syntax tree.
1427 ///
1428 /// Then takes a stable snapshot of the syntax map before unlocking it.
1429 /// The snapshot with the interpolated edits is sent to a background thread,
1430 /// where we ask Tree-sitter to perform an incremental parse.
1431 ///
1432 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1433 /// waiting on the parse to complete. As soon as it completes, we proceed
1434 /// synchronously, unless a 1ms timeout elapses.
1435 ///
1436 /// If we time out waiting on the parse, we spawn a second task waiting
1437 /// until the parse does complete and return with the interpolated tree still
1438 /// in the foreground. When the background parse completes, call back into
1439 /// the main thread and assign the foreground parse state.
1440 ///
1441 /// If the buffer or grammar changed since the start of the background parse,
1442 /// initiate an additional reparse recursively. To avoid concurrent parses
1443 /// for the same buffer, we only initiate a new parse if we are not already
1444 /// parsing in the background.
1445 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1446 if self.reparse.is_some() {
1447 return;
1448 }
1449 let language = if let Some(language) = self.language.clone() {
1450 language
1451 } else {
1452 return;
1453 };
1454
1455 let text = self.text_snapshot();
1456 let parsed_version = self.version();
1457
1458 let mut syntax_map = self.syntax_map.lock();
1459 syntax_map.interpolate(&text);
1460 let language_registry = syntax_map.language_registry();
1461 let mut syntax_snapshot = syntax_map.snapshot();
1462 drop(syntax_map);
1463
1464 let parse_task = cx.background_spawn({
1465 let language = language.clone();
1466 let language_registry = language_registry.clone();
1467 async move {
1468 syntax_snapshot.reparse(&text, language_registry, language);
1469 syntax_snapshot
1470 }
1471 });
1472
1473 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1474 match cx
1475 .background_executor()
1476 .block_with_timeout(self.sync_parse_timeout, parse_task)
1477 {
1478 Ok(new_syntax_snapshot) => {
1479 self.did_finish_parsing(new_syntax_snapshot, cx);
1480 self.reparse = None;
1481 }
1482 Err(parse_task) => {
1483 self.reparse = Some(cx.spawn(async move |this, cx| {
1484 let new_syntax_map = parse_task.await;
1485 this.update(cx, move |this, cx| {
1486 let grammar_changed =
1487 this.language.as_ref().map_or(true, |current_language| {
1488 !Arc::ptr_eq(&language, current_language)
1489 });
1490 let language_registry_changed = new_syntax_map
1491 .contains_unknown_injections()
1492 && language_registry.map_or(false, |registry| {
1493 registry.version() != new_syntax_map.language_registry_version()
1494 });
1495 let parse_again = language_registry_changed
1496 || grammar_changed
1497 || this.version.changed_since(&parsed_version);
1498 this.did_finish_parsing(new_syntax_map, cx);
1499 this.reparse = None;
1500 if parse_again {
1501 this.reparse(cx);
1502 }
1503 })
1504 .ok();
1505 }));
1506 }
1507 }
1508 }
1509
1510 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1511 self.was_changed();
1512 self.non_text_state_update_count += 1;
1513 self.syntax_map.lock().did_parse(syntax_snapshot);
1514 self.request_autoindent(cx);
1515 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1516 cx.emit(BufferEvent::Reparsed);
1517 cx.notify();
1518 }
1519
1520 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1521 self.parse_status.1.clone()
1522 }
1523
1524 /// Assign to the buffer a set of diagnostics created by a given language server.
1525 pub fn update_diagnostics(
1526 &mut self,
1527 server_id: LanguageServerId,
1528 diagnostics: DiagnosticSet,
1529 cx: &mut Context<Self>,
1530 ) {
1531 let lamport_timestamp = self.text.lamport_clock.tick();
1532 let op = Operation::UpdateDiagnostics {
1533 server_id,
1534 diagnostics: diagnostics.iter().cloned().collect(),
1535 lamport_timestamp,
1536 };
1537 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1538 self.send_operation(op, true, cx);
1539 }
1540
1541 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1542 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1543 return None;
1544 };
1545 Some(&self.diagnostics[idx].1)
1546 }
1547
1548 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1549 if let Some(indent_sizes) = self.compute_autoindents() {
1550 let indent_sizes = cx.background_spawn(indent_sizes);
1551 match cx
1552 .background_executor()
1553 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1554 {
1555 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1556 Err(indent_sizes) => {
1557 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1558 let indent_sizes = indent_sizes.await;
1559 this.update(cx, |this, cx| {
1560 this.apply_autoindents(indent_sizes, cx);
1561 })
1562 .ok();
1563 }));
1564 }
1565 }
1566 } else {
1567 self.autoindent_requests.clear();
1568 }
1569 }
1570
1571 fn compute_autoindents(
1572 &self,
1573 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1574 let max_rows_between_yields = 100;
1575 let snapshot = self.snapshot();
1576 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1577 return None;
1578 }
1579
1580 let autoindent_requests = self.autoindent_requests.clone();
1581 Some(async move {
1582 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1583 for request in autoindent_requests {
1584 // Resolve each edited range to its row in the current buffer and in the
1585 // buffer before this batch of edits.
1586 let mut row_ranges = Vec::new();
1587 let mut old_to_new_rows = BTreeMap::new();
1588 let mut language_indent_sizes_by_new_row = Vec::new();
1589 for entry in &request.entries {
1590 let position = entry.range.start;
1591 let new_row = position.to_point(&snapshot).row;
1592 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1593 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1594
1595 if !entry.first_line_is_new {
1596 let old_row = position.to_point(&request.before_edit).row;
1597 old_to_new_rows.insert(old_row, new_row);
1598 }
1599 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1600 }
1601
1602 // Build a map containing the suggested indentation for each of the edited lines
1603 // with respect to the state of the buffer before these edits. This map is keyed
1604 // by the rows for these lines in the current state of the buffer.
1605 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1606 let old_edited_ranges =
1607 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1608 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1609 let mut language_indent_size = IndentSize::default();
1610 for old_edited_range in old_edited_ranges {
1611 let suggestions = request
1612 .before_edit
1613 .suggest_autoindents(old_edited_range.clone())
1614 .into_iter()
1615 .flatten();
1616 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1617 if let Some(suggestion) = suggestion {
1618 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1619
1620 // Find the indent size based on the language for this row.
1621 while let Some((row, size)) = language_indent_sizes.peek() {
1622 if *row > new_row {
1623 break;
1624 }
1625 language_indent_size = *size;
1626 language_indent_sizes.next();
1627 }
1628
1629 let suggested_indent = old_to_new_rows
1630 .get(&suggestion.basis_row)
1631 .and_then(|from_row| {
1632 Some(old_suggestions.get(from_row).copied()?.0)
1633 })
1634 .unwrap_or_else(|| {
1635 request
1636 .before_edit
1637 .indent_size_for_line(suggestion.basis_row)
1638 })
1639 .with_delta(suggestion.delta, language_indent_size);
1640 old_suggestions
1641 .insert(new_row, (suggested_indent, suggestion.within_error));
1642 }
1643 }
1644 yield_now().await;
1645 }
1646
1647 // Compute new suggestions for each line, but only include them in the result
1648 // if they differ from the old suggestion for that line.
1649 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1650 let mut language_indent_size = IndentSize::default();
1651 for (row_range, original_indent_column) in row_ranges {
1652 let new_edited_row_range = if request.is_block_mode {
1653 row_range.start..row_range.start + 1
1654 } else {
1655 row_range.clone()
1656 };
1657
1658 let suggestions = snapshot
1659 .suggest_autoindents(new_edited_row_range.clone())
1660 .into_iter()
1661 .flatten();
1662 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1663 if let Some(suggestion) = suggestion {
1664 // Find the indent size based on the language for this row.
1665 while let Some((row, size)) = language_indent_sizes.peek() {
1666 if *row > new_row {
1667 break;
1668 }
1669 language_indent_size = *size;
1670 language_indent_sizes.next();
1671 }
1672
1673 let suggested_indent = indent_sizes
1674 .get(&suggestion.basis_row)
1675 .copied()
1676 .map(|e| e.0)
1677 .unwrap_or_else(|| {
1678 snapshot.indent_size_for_line(suggestion.basis_row)
1679 })
1680 .with_delta(suggestion.delta, language_indent_size);
1681
1682 if old_suggestions.get(&new_row).map_or(
1683 true,
1684 |(old_indentation, was_within_error)| {
1685 suggested_indent != *old_indentation
1686 && (!suggestion.within_error || *was_within_error)
1687 },
1688 ) {
1689 indent_sizes.insert(
1690 new_row,
1691 (suggested_indent, request.ignore_empty_lines),
1692 );
1693 }
1694 }
1695 }
1696
1697 if let (true, Some(original_indent_column)) =
1698 (request.is_block_mode, original_indent_column)
1699 {
1700 let new_indent =
1701 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1702 *indent
1703 } else {
1704 snapshot.indent_size_for_line(row_range.start)
1705 };
1706 let delta = new_indent.len as i64 - original_indent_column as i64;
1707 if delta != 0 {
1708 for row in row_range.skip(1) {
1709 indent_sizes.entry(row).or_insert_with(|| {
1710 let mut size = snapshot.indent_size_for_line(row);
1711 if size.kind == new_indent.kind {
1712 match delta.cmp(&0) {
1713 Ordering::Greater => size.len += delta as u32,
1714 Ordering::Less => {
1715 size.len = size.len.saturating_sub(-delta as u32)
1716 }
1717 Ordering::Equal => {}
1718 }
1719 }
1720 (size, request.ignore_empty_lines)
1721 });
1722 }
1723 }
1724 }
1725
1726 yield_now().await;
1727 }
1728 }
1729
1730 indent_sizes
1731 .into_iter()
1732 .filter_map(|(row, (indent, ignore_empty_lines))| {
1733 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1734 None
1735 } else {
1736 Some((row, indent))
1737 }
1738 })
1739 .collect()
1740 })
1741 }
1742
1743 fn apply_autoindents(
1744 &mut self,
1745 indent_sizes: BTreeMap<u32, IndentSize>,
1746 cx: &mut Context<Self>,
1747 ) {
1748 self.autoindent_requests.clear();
1749
1750 let edits: Vec<_> = indent_sizes
1751 .into_iter()
1752 .filter_map(|(row, indent_size)| {
1753 let current_size = indent_size_for_line(self, row);
1754 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1755 })
1756 .collect();
1757
1758 let preserve_preview = self.preserve_preview();
1759 self.edit(edits, None, cx);
1760 if preserve_preview {
1761 self.refresh_preview();
1762 }
1763 }
1764
1765 /// Create a minimal edit that will cause the given row to be indented
1766 /// with the given size. After applying this edit, the length of the line
1767 /// will always be at least `new_size.len`.
1768 pub fn edit_for_indent_size_adjustment(
1769 row: u32,
1770 current_size: IndentSize,
1771 new_size: IndentSize,
1772 ) -> Option<(Range<Point>, String)> {
1773 if new_size.kind == current_size.kind {
1774 match new_size.len.cmp(¤t_size.len) {
1775 Ordering::Greater => {
1776 let point = Point::new(row, 0);
1777 Some((
1778 point..point,
1779 iter::repeat(new_size.char())
1780 .take((new_size.len - current_size.len) as usize)
1781 .collect::<String>(),
1782 ))
1783 }
1784
1785 Ordering::Less => Some((
1786 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1787 String::new(),
1788 )),
1789
1790 Ordering::Equal => None,
1791 }
1792 } else {
1793 Some((
1794 Point::new(row, 0)..Point::new(row, current_size.len),
1795 iter::repeat(new_size.char())
1796 .take(new_size.len as usize)
1797 .collect::<String>(),
1798 ))
1799 }
1800 }
1801
1802 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1803 /// and the given new text.
1804 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1805 let old_text = self.as_rope().clone();
1806 let base_version = self.version();
1807 cx.background_executor()
1808 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1809 let old_text = old_text.to_string();
1810 let line_ending = LineEnding::detect(&new_text);
1811 LineEnding::normalize(&mut new_text);
1812 let edits = text_diff(&old_text, &new_text);
1813 Diff {
1814 base_version,
1815 line_ending,
1816 edits,
1817 }
1818 })
1819 }
1820
1821 /// Spawns a background task that searches the buffer for any whitespace
1822 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1823 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1824 let old_text = self.as_rope().clone();
1825 let line_ending = self.line_ending();
1826 let base_version = self.version();
1827 cx.background_spawn(async move {
1828 let ranges = trailing_whitespace_ranges(&old_text);
1829 let empty = Arc::<str>::from("");
1830 Diff {
1831 base_version,
1832 line_ending,
1833 edits: ranges
1834 .into_iter()
1835 .map(|range| (range, empty.clone()))
1836 .collect(),
1837 }
1838 })
1839 }
1840
1841 /// Ensures that the buffer ends with a single newline character, and
1842 /// no other whitespace.
1843 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1844 let len = self.len();
1845 let mut offset = len;
1846 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1847 let non_whitespace_len = chunk
1848 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1849 .len();
1850 offset -= chunk.len();
1851 offset += non_whitespace_len;
1852 if non_whitespace_len != 0 {
1853 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1854 return;
1855 }
1856 break;
1857 }
1858 }
1859 self.edit([(offset..len, "\n")], None, cx);
1860 }
1861
1862 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1863 /// calculated, then adjust the diff to account for those changes, and discard any
1864 /// parts of the diff that conflict with those changes.
1865 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1866 let snapshot = self.snapshot();
1867 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1868 let mut delta = 0;
1869 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1870 while let Some(edit_since) = edits_since.peek() {
1871 // If the edit occurs after a diff hunk, then it does not
1872 // affect that hunk.
1873 if edit_since.old.start > range.end {
1874 break;
1875 }
1876 // If the edit precedes the diff hunk, then adjust the hunk
1877 // to reflect the edit.
1878 else if edit_since.old.end < range.start {
1879 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1880 edits_since.next();
1881 }
1882 // If the edit intersects a diff hunk, then discard that hunk.
1883 else {
1884 return None;
1885 }
1886 }
1887
1888 let start = (range.start as i64 + delta) as usize;
1889 let end = (range.end as i64 + delta) as usize;
1890 Some((start..end, new_text))
1891 });
1892
1893 self.start_transaction();
1894 self.text.set_line_ending(diff.line_ending);
1895 self.edit(adjusted_edits, None, cx);
1896 self.end_transaction(cx)
1897 }
1898
1899 fn has_unsaved_edits(&self) -> bool {
1900 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1901
1902 if last_version == self.version {
1903 self.has_unsaved_edits
1904 .set((last_version, has_unsaved_edits));
1905 return has_unsaved_edits;
1906 }
1907
1908 let has_edits = self.has_edits_since(&self.saved_version);
1909 self.has_unsaved_edits
1910 .set((self.version.clone(), has_edits));
1911 has_edits
1912 }
1913
1914 /// Checks if the buffer has unsaved changes.
1915 pub fn is_dirty(&self) -> bool {
1916 if self.capability == Capability::ReadOnly {
1917 return false;
1918 }
1919 if self.has_conflict {
1920 return true;
1921 }
1922 match self.file.as_ref().map(|f| f.disk_state()) {
1923 Some(DiskState::New) | Some(DiskState::Deleted) => {
1924 !self.is_empty() && self.has_unsaved_edits()
1925 }
1926 _ => self.has_unsaved_edits(),
1927 }
1928 }
1929
1930 /// Checks if the buffer and its file have both changed since the buffer
1931 /// was last saved or reloaded.
1932 pub fn has_conflict(&self) -> bool {
1933 if self.has_conflict {
1934 return true;
1935 }
1936 let Some(file) = self.file.as_ref() else {
1937 return false;
1938 };
1939 match file.disk_state() {
1940 DiskState::New => false,
1941 DiskState::Present { mtime } => match self.saved_mtime {
1942 Some(saved_mtime) => {
1943 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1944 }
1945 None => true,
1946 },
1947 DiskState::Deleted => false,
1948 }
1949 }
1950
1951 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1952 pub fn subscribe(&mut self) -> Subscription {
1953 self.text.subscribe()
1954 }
1955
1956 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1957 ///
1958 /// This allows downstream code to check if the buffer's text has changed without
1959 /// waiting for an effect cycle, which would be required if using eents.
1960 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1961 if let Err(ix) = self
1962 .change_bits
1963 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1964 {
1965 self.change_bits.insert(ix, bit);
1966 }
1967 }
1968
1969 fn was_changed(&mut self) {
1970 self.change_bits.retain(|change_bit| {
1971 change_bit.upgrade().map_or(false, |bit| {
1972 bit.replace(true);
1973 true
1974 })
1975 });
1976 }
1977
1978 /// Starts a transaction, if one is not already in-progress. When undoing or
1979 /// redoing edits, all of the edits performed within a transaction are undone
1980 /// or redone together.
1981 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1982 self.start_transaction_at(Instant::now())
1983 }
1984
1985 /// Starts a transaction, providing the current time. Subsequent transactions
1986 /// that occur within a short period of time will be grouped together. This
1987 /// is controlled by the buffer's undo grouping duration.
1988 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1989 self.transaction_depth += 1;
1990 if self.was_dirty_before_starting_transaction.is_none() {
1991 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1992 }
1993 self.text.start_transaction_at(now)
1994 }
1995
1996 /// Terminates the current transaction, if this is the outermost transaction.
1997 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1998 self.end_transaction_at(Instant::now(), cx)
1999 }
2000
2001 /// Terminates the current transaction, providing the current time. Subsequent transactions
2002 /// that occur within a short period of time will be grouped together. This
2003 /// is controlled by the buffer's undo grouping duration.
2004 pub fn end_transaction_at(
2005 &mut self,
2006 now: Instant,
2007 cx: &mut Context<Self>,
2008 ) -> Option<TransactionId> {
2009 assert!(self.transaction_depth > 0);
2010 self.transaction_depth -= 1;
2011 let was_dirty = if self.transaction_depth == 0 {
2012 self.was_dirty_before_starting_transaction.take().unwrap()
2013 } else {
2014 false
2015 };
2016 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2017 self.did_edit(&start_version, was_dirty, cx);
2018 Some(transaction_id)
2019 } else {
2020 None
2021 }
2022 }
2023
2024 /// Manually add a transaction to the buffer's undo history.
2025 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2026 self.text.push_transaction(transaction, now);
2027 }
2028
2029 /// Prevent the last transaction from being grouped with any subsequent transactions,
2030 /// even if they occur with the buffer's undo grouping duration.
2031 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2032 self.text.finalize_last_transaction()
2033 }
2034
2035 /// Manually group all changes since a given transaction.
2036 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2037 self.text.group_until_transaction(transaction_id);
2038 }
2039
2040 /// Manually remove a transaction from the buffer's undo history
2041 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2042 self.text.forget_transaction(transaction_id)
2043 }
2044
2045 /// Retrieve a transaction from the buffer's undo history
2046 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2047 self.text.get_transaction(transaction_id)
2048 }
2049
2050 /// Manually merge two transactions in the buffer's undo history.
2051 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2052 self.text.merge_transactions(transaction, destination);
2053 }
2054
2055 /// Waits for the buffer to receive operations with the given timestamps.
2056 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2057 &mut self,
2058 edit_ids: It,
2059 ) -> impl Future<Output = Result<()>> + use<It> {
2060 self.text.wait_for_edits(edit_ids)
2061 }
2062
2063 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2064 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2065 &mut self,
2066 anchors: It,
2067 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2068 self.text.wait_for_anchors(anchors)
2069 }
2070
2071 /// Waits for the buffer to receive operations up to the given version.
2072 pub fn wait_for_version(
2073 &mut self,
2074 version: clock::Global,
2075 ) -> impl Future<Output = Result<()>> + use<> {
2076 self.text.wait_for_version(version)
2077 }
2078
2079 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2080 /// [`Buffer::wait_for_version`] to resolve with an error.
2081 pub fn give_up_waiting(&mut self) {
2082 self.text.give_up_waiting();
2083 }
2084
2085 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2086 pub fn set_active_selections(
2087 &mut self,
2088 selections: Arc<[Selection<Anchor>]>,
2089 line_mode: bool,
2090 cursor_shape: CursorShape,
2091 cx: &mut Context<Self>,
2092 ) {
2093 let lamport_timestamp = self.text.lamport_clock.tick();
2094 self.remote_selections.insert(
2095 self.text.replica_id(),
2096 SelectionSet {
2097 selections: selections.clone(),
2098 lamport_timestamp,
2099 line_mode,
2100 cursor_shape,
2101 },
2102 );
2103 self.send_operation(
2104 Operation::UpdateSelections {
2105 selections,
2106 line_mode,
2107 lamport_timestamp,
2108 cursor_shape,
2109 },
2110 true,
2111 cx,
2112 );
2113 self.non_text_state_update_count += 1;
2114 cx.notify();
2115 }
2116
2117 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2118 /// this replica.
2119 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2120 if self
2121 .remote_selections
2122 .get(&self.text.replica_id())
2123 .map_or(true, |set| !set.selections.is_empty())
2124 {
2125 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2126 }
2127 }
2128
2129 pub fn set_agent_selections(
2130 &mut self,
2131 selections: Arc<[Selection<Anchor>]>,
2132 line_mode: bool,
2133 cursor_shape: CursorShape,
2134 cx: &mut Context<Self>,
2135 ) {
2136 let lamport_timestamp = self.text.lamport_clock.tick();
2137 self.remote_selections.insert(
2138 AGENT_REPLICA_ID,
2139 SelectionSet {
2140 selections: selections.clone(),
2141 lamport_timestamp,
2142 line_mode,
2143 cursor_shape,
2144 },
2145 );
2146 self.non_text_state_update_count += 1;
2147 cx.notify();
2148 }
2149
2150 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2151 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2152 }
2153
2154 /// Replaces the buffer's entire text.
2155 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2156 where
2157 T: Into<Arc<str>>,
2158 {
2159 self.autoindent_requests.clear();
2160 self.edit([(0..self.len(), text)], None, cx)
2161 }
2162
2163 /// Appends the given text to the end of the buffer.
2164 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2165 where
2166 T: Into<Arc<str>>,
2167 {
2168 self.edit([(self.len()..self.len(), text)], None, cx)
2169 }
2170
2171 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2172 /// delete, and a string of text to insert at that location.
2173 ///
2174 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2175 /// request for the edited ranges, which will be processed when the buffer finishes
2176 /// parsing.
2177 ///
2178 /// Parsing takes place at the end of a transaction, and may compute synchronously
2179 /// or asynchronously, depending on the changes.
2180 pub fn edit<I, S, T>(
2181 &mut self,
2182 edits_iter: I,
2183 autoindent_mode: Option<AutoindentMode>,
2184 cx: &mut Context<Self>,
2185 ) -> Option<clock::Lamport>
2186 where
2187 I: IntoIterator<Item = (Range<S>, T)>,
2188 S: ToOffset,
2189 T: Into<Arc<str>>,
2190 {
2191 // Skip invalid edits and coalesce contiguous ones.
2192 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2193
2194 for (range, new_text) in edits_iter {
2195 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2196
2197 if range.start > range.end {
2198 mem::swap(&mut range.start, &mut range.end);
2199 }
2200 let new_text = new_text.into();
2201 if !new_text.is_empty() || !range.is_empty() {
2202 if let Some((prev_range, prev_text)) = edits.last_mut() {
2203 if prev_range.end >= range.start {
2204 prev_range.end = cmp::max(prev_range.end, range.end);
2205 *prev_text = format!("{prev_text}{new_text}").into();
2206 } else {
2207 edits.push((range, new_text));
2208 }
2209 } else {
2210 edits.push((range, new_text));
2211 }
2212 }
2213 }
2214 if edits.is_empty() {
2215 return None;
2216 }
2217
2218 self.start_transaction();
2219 self.pending_autoindent.take();
2220 let autoindent_request = autoindent_mode
2221 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2222
2223 let edit_operation = self.text.edit(edits.iter().cloned());
2224 let edit_id = edit_operation.timestamp();
2225
2226 if let Some((before_edit, mode)) = autoindent_request {
2227 let mut delta = 0isize;
2228 let entries = edits
2229 .into_iter()
2230 .enumerate()
2231 .zip(&edit_operation.as_edit().unwrap().new_text)
2232 .map(|((ix, (range, _)), new_text)| {
2233 let new_text_length = new_text.len();
2234 let old_start = range.start.to_point(&before_edit);
2235 let new_start = (delta + range.start as isize) as usize;
2236 let range_len = range.end - range.start;
2237 delta += new_text_length as isize - range_len as isize;
2238
2239 // Decide what range of the insertion to auto-indent, and whether
2240 // the first line of the insertion should be considered a newly-inserted line
2241 // or an edit to an existing line.
2242 let mut range_of_insertion_to_indent = 0..new_text_length;
2243 let mut first_line_is_new = true;
2244
2245 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2246 let old_line_end = before_edit.line_len(old_start.row);
2247
2248 if old_start.column > old_line_start {
2249 first_line_is_new = false;
2250 }
2251
2252 if !new_text.contains('\n')
2253 && (old_start.column + (range_len as u32) < old_line_end
2254 || old_line_end == old_line_start)
2255 {
2256 first_line_is_new = false;
2257 }
2258
2259 // When inserting text starting with a newline, avoid auto-indenting the
2260 // previous line.
2261 if new_text.starts_with('\n') {
2262 range_of_insertion_to_indent.start += 1;
2263 first_line_is_new = true;
2264 }
2265
2266 let mut original_indent_column = None;
2267 if let AutoindentMode::Block {
2268 original_indent_columns,
2269 } = &mode
2270 {
2271 original_indent_column = Some(if new_text.starts_with('\n') {
2272 indent_size_for_text(
2273 new_text[range_of_insertion_to_indent.clone()].chars(),
2274 )
2275 .len
2276 } else {
2277 original_indent_columns
2278 .get(ix)
2279 .copied()
2280 .flatten()
2281 .unwrap_or_else(|| {
2282 indent_size_for_text(
2283 new_text[range_of_insertion_to_indent.clone()].chars(),
2284 )
2285 .len
2286 })
2287 });
2288
2289 // Avoid auto-indenting the line after the edit.
2290 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2291 range_of_insertion_to_indent.end -= 1;
2292 }
2293 }
2294
2295 AutoindentRequestEntry {
2296 first_line_is_new,
2297 original_indent_column,
2298 indent_size: before_edit.language_indent_size_at(range.start, cx),
2299 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2300 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2301 }
2302 })
2303 .collect();
2304
2305 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2306 before_edit,
2307 entries,
2308 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2309 ignore_empty_lines: false,
2310 }));
2311 }
2312
2313 self.end_transaction(cx);
2314 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2315 Some(edit_id)
2316 }
2317
2318 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2319 self.was_changed();
2320
2321 if self.edits_since::<usize>(old_version).next().is_none() {
2322 return;
2323 }
2324
2325 self.reparse(cx);
2326 cx.emit(BufferEvent::Edited);
2327 if was_dirty != self.is_dirty() {
2328 cx.emit(BufferEvent::DirtyChanged);
2329 }
2330 cx.notify();
2331 }
2332
2333 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2334 where
2335 I: IntoIterator<Item = Range<T>>,
2336 T: ToOffset + Copy,
2337 {
2338 let before_edit = self.snapshot();
2339 let entries = ranges
2340 .into_iter()
2341 .map(|range| AutoindentRequestEntry {
2342 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2343 first_line_is_new: true,
2344 indent_size: before_edit.language_indent_size_at(range.start, cx),
2345 original_indent_column: None,
2346 })
2347 .collect();
2348 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2349 before_edit,
2350 entries,
2351 is_block_mode: false,
2352 ignore_empty_lines: true,
2353 }));
2354 self.request_autoindent(cx);
2355 }
2356
2357 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2358 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2359 pub fn insert_empty_line(
2360 &mut self,
2361 position: impl ToPoint,
2362 space_above: bool,
2363 space_below: bool,
2364 cx: &mut Context<Self>,
2365 ) -> Point {
2366 let mut position = position.to_point(self);
2367
2368 self.start_transaction();
2369
2370 self.edit(
2371 [(position..position, "\n")],
2372 Some(AutoindentMode::EachLine),
2373 cx,
2374 );
2375
2376 if position.column > 0 {
2377 position += Point::new(1, 0);
2378 }
2379
2380 if !self.is_line_blank(position.row) {
2381 self.edit(
2382 [(position..position, "\n")],
2383 Some(AutoindentMode::EachLine),
2384 cx,
2385 );
2386 }
2387
2388 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2389 self.edit(
2390 [(position..position, "\n")],
2391 Some(AutoindentMode::EachLine),
2392 cx,
2393 );
2394 position.row += 1;
2395 }
2396
2397 if space_below
2398 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2399 {
2400 self.edit(
2401 [(position..position, "\n")],
2402 Some(AutoindentMode::EachLine),
2403 cx,
2404 );
2405 }
2406
2407 self.end_transaction(cx);
2408
2409 position
2410 }
2411
2412 /// Applies the given remote operations to the buffer.
2413 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2414 self.pending_autoindent.take();
2415 let was_dirty = self.is_dirty();
2416 let old_version = self.version.clone();
2417 let mut deferred_ops = Vec::new();
2418 let buffer_ops = ops
2419 .into_iter()
2420 .filter_map(|op| match op {
2421 Operation::Buffer(op) => Some(op),
2422 _ => {
2423 if self.can_apply_op(&op) {
2424 self.apply_op(op, cx);
2425 } else {
2426 deferred_ops.push(op);
2427 }
2428 None
2429 }
2430 })
2431 .collect::<Vec<_>>();
2432 for operation in buffer_ops.iter() {
2433 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2434 }
2435 self.text.apply_ops(buffer_ops);
2436 self.deferred_ops.insert(deferred_ops);
2437 self.flush_deferred_ops(cx);
2438 self.did_edit(&old_version, was_dirty, cx);
2439 // Notify independently of whether the buffer was edited as the operations could include a
2440 // selection update.
2441 cx.notify();
2442 }
2443
2444 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2445 let mut deferred_ops = Vec::new();
2446 for op in self.deferred_ops.drain().iter().cloned() {
2447 if self.can_apply_op(&op) {
2448 self.apply_op(op, cx);
2449 } else {
2450 deferred_ops.push(op);
2451 }
2452 }
2453 self.deferred_ops.insert(deferred_ops);
2454 }
2455
2456 pub fn has_deferred_ops(&self) -> bool {
2457 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2458 }
2459
2460 fn can_apply_op(&self, operation: &Operation) -> bool {
2461 match operation {
2462 Operation::Buffer(_) => {
2463 unreachable!("buffer operations should never be applied at this layer")
2464 }
2465 Operation::UpdateDiagnostics {
2466 diagnostics: diagnostic_set,
2467 ..
2468 } => diagnostic_set.iter().all(|diagnostic| {
2469 self.text.can_resolve(&diagnostic.range.start)
2470 && self.text.can_resolve(&diagnostic.range.end)
2471 }),
2472 Operation::UpdateSelections { selections, .. } => selections
2473 .iter()
2474 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2475 Operation::UpdateCompletionTriggers { .. } => true,
2476 }
2477 }
2478
2479 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2480 match operation {
2481 Operation::Buffer(_) => {
2482 unreachable!("buffer operations should never be applied at this layer")
2483 }
2484 Operation::UpdateDiagnostics {
2485 server_id,
2486 diagnostics: diagnostic_set,
2487 lamport_timestamp,
2488 } => {
2489 let snapshot = self.snapshot();
2490 self.apply_diagnostic_update(
2491 server_id,
2492 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2493 lamport_timestamp,
2494 cx,
2495 );
2496 }
2497 Operation::UpdateSelections {
2498 selections,
2499 lamport_timestamp,
2500 line_mode,
2501 cursor_shape,
2502 } => {
2503 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2504 if set.lamport_timestamp > lamport_timestamp {
2505 return;
2506 }
2507 }
2508
2509 self.remote_selections.insert(
2510 lamport_timestamp.replica_id,
2511 SelectionSet {
2512 selections,
2513 lamport_timestamp,
2514 line_mode,
2515 cursor_shape,
2516 },
2517 );
2518 self.text.lamport_clock.observe(lamport_timestamp);
2519 self.non_text_state_update_count += 1;
2520 }
2521 Operation::UpdateCompletionTriggers {
2522 triggers,
2523 lamport_timestamp,
2524 server_id,
2525 } => {
2526 if triggers.is_empty() {
2527 self.completion_triggers_per_language_server
2528 .remove(&server_id);
2529 self.completion_triggers = self
2530 .completion_triggers_per_language_server
2531 .values()
2532 .flat_map(|triggers| triggers.into_iter().cloned())
2533 .collect();
2534 } else {
2535 self.completion_triggers_per_language_server
2536 .insert(server_id, triggers.iter().cloned().collect());
2537 self.completion_triggers.extend(triggers);
2538 }
2539 self.text.lamport_clock.observe(lamport_timestamp);
2540 }
2541 }
2542 }
2543
2544 fn apply_diagnostic_update(
2545 &mut self,
2546 server_id: LanguageServerId,
2547 diagnostics: DiagnosticSet,
2548 lamport_timestamp: clock::Lamport,
2549 cx: &mut Context<Self>,
2550 ) {
2551 if lamport_timestamp > self.diagnostics_timestamp {
2552 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2553 if diagnostics.is_empty() {
2554 if let Ok(ix) = ix {
2555 self.diagnostics.remove(ix);
2556 }
2557 } else {
2558 match ix {
2559 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2560 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2561 };
2562 }
2563 self.diagnostics_timestamp = lamport_timestamp;
2564 self.non_text_state_update_count += 1;
2565 self.text.lamport_clock.observe(lamport_timestamp);
2566 cx.notify();
2567 cx.emit(BufferEvent::DiagnosticsUpdated);
2568 }
2569 }
2570
2571 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2572 self.was_changed();
2573 cx.emit(BufferEvent::Operation {
2574 operation,
2575 is_local,
2576 });
2577 }
2578
2579 /// Removes the selections for a given peer.
2580 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2581 self.remote_selections.remove(&replica_id);
2582 cx.notify();
2583 }
2584
2585 /// Undoes the most recent transaction.
2586 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2587 let was_dirty = self.is_dirty();
2588 let old_version = self.version.clone();
2589
2590 if let Some((transaction_id, operation)) = self.text.undo() {
2591 self.send_operation(Operation::Buffer(operation), true, cx);
2592 self.did_edit(&old_version, was_dirty, cx);
2593 Some(transaction_id)
2594 } else {
2595 None
2596 }
2597 }
2598
2599 /// Manually undoes a specific transaction in the buffer's undo history.
2600 pub fn undo_transaction(
2601 &mut self,
2602 transaction_id: TransactionId,
2603 cx: &mut Context<Self>,
2604 ) -> bool {
2605 let was_dirty = self.is_dirty();
2606 let old_version = self.version.clone();
2607 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2608 self.send_operation(Operation::Buffer(operation), true, cx);
2609 self.did_edit(&old_version, was_dirty, cx);
2610 true
2611 } else {
2612 false
2613 }
2614 }
2615
2616 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2617 pub fn undo_to_transaction(
2618 &mut self,
2619 transaction_id: TransactionId,
2620 cx: &mut Context<Self>,
2621 ) -> bool {
2622 let was_dirty = self.is_dirty();
2623 let old_version = self.version.clone();
2624
2625 let operations = self.text.undo_to_transaction(transaction_id);
2626 let undone = !operations.is_empty();
2627 for operation in operations {
2628 self.send_operation(Operation::Buffer(operation), true, cx);
2629 }
2630 if undone {
2631 self.did_edit(&old_version, was_dirty, cx)
2632 }
2633 undone
2634 }
2635
2636 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2637 let was_dirty = self.is_dirty();
2638 let operation = self.text.undo_operations(counts);
2639 let old_version = self.version.clone();
2640 self.send_operation(Operation::Buffer(operation), true, cx);
2641 self.did_edit(&old_version, was_dirty, cx);
2642 }
2643
2644 /// Manually redoes a specific transaction in the buffer's redo history.
2645 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2646 let was_dirty = self.is_dirty();
2647 let old_version = self.version.clone();
2648
2649 if let Some((transaction_id, operation)) = self.text.redo() {
2650 self.send_operation(Operation::Buffer(operation), true, cx);
2651 self.did_edit(&old_version, was_dirty, cx);
2652 Some(transaction_id)
2653 } else {
2654 None
2655 }
2656 }
2657
2658 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2659 pub fn redo_to_transaction(
2660 &mut self,
2661 transaction_id: TransactionId,
2662 cx: &mut Context<Self>,
2663 ) -> bool {
2664 let was_dirty = self.is_dirty();
2665 let old_version = self.version.clone();
2666
2667 let operations = self.text.redo_to_transaction(transaction_id);
2668 let redone = !operations.is_empty();
2669 for operation in operations {
2670 self.send_operation(Operation::Buffer(operation), true, cx);
2671 }
2672 if redone {
2673 self.did_edit(&old_version, was_dirty, cx)
2674 }
2675 redone
2676 }
2677
2678 /// Override current completion triggers with the user-provided completion triggers.
2679 pub fn set_completion_triggers(
2680 &mut self,
2681 server_id: LanguageServerId,
2682 triggers: BTreeSet<String>,
2683 cx: &mut Context<Self>,
2684 ) {
2685 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2686 if triggers.is_empty() {
2687 self.completion_triggers_per_language_server
2688 .remove(&server_id);
2689 self.completion_triggers = self
2690 .completion_triggers_per_language_server
2691 .values()
2692 .flat_map(|triggers| triggers.into_iter().cloned())
2693 .collect();
2694 } else {
2695 self.completion_triggers_per_language_server
2696 .insert(server_id, triggers.clone());
2697 self.completion_triggers.extend(triggers.iter().cloned());
2698 }
2699 self.send_operation(
2700 Operation::UpdateCompletionTriggers {
2701 triggers: triggers.iter().cloned().collect(),
2702 lamport_timestamp: self.completion_triggers_timestamp,
2703 server_id,
2704 },
2705 true,
2706 cx,
2707 );
2708 cx.notify();
2709 }
2710
2711 /// Returns a list of strings which trigger a completion menu for this language.
2712 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2713 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2714 &self.completion_triggers
2715 }
2716
2717 /// Call this directly after performing edits to prevent the preview tab
2718 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2719 /// to return false until there are additional edits.
2720 pub fn refresh_preview(&mut self) {
2721 self.preview_version = self.version.clone();
2722 }
2723
2724 /// Whether we should preserve the preview status of a tab containing this buffer.
2725 pub fn preserve_preview(&self) -> bool {
2726 !self.has_edits_since(&self.preview_version)
2727 }
2728}
2729
2730#[doc(hidden)]
2731#[cfg(any(test, feature = "test-support"))]
2732impl Buffer {
2733 pub fn edit_via_marked_text(
2734 &mut self,
2735 marked_string: &str,
2736 autoindent_mode: Option<AutoindentMode>,
2737 cx: &mut Context<Self>,
2738 ) {
2739 let edits = self.edits_for_marked_text(marked_string);
2740 self.edit(edits, autoindent_mode, cx);
2741 }
2742
2743 pub fn set_group_interval(&mut self, group_interval: Duration) {
2744 self.text.set_group_interval(group_interval);
2745 }
2746
2747 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2748 where
2749 T: rand::Rng,
2750 {
2751 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2752 let mut last_end = None;
2753 for _ in 0..old_range_count {
2754 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2755 break;
2756 }
2757
2758 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2759 let mut range = self.random_byte_range(new_start, rng);
2760 if rng.gen_bool(0.2) {
2761 mem::swap(&mut range.start, &mut range.end);
2762 }
2763 last_end = Some(range.end);
2764
2765 let new_text_len = rng.gen_range(0..10);
2766 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2767 new_text = new_text.to_uppercase();
2768
2769 edits.push((range, new_text));
2770 }
2771 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2772 self.edit(edits, None, cx);
2773 }
2774
2775 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2776 let was_dirty = self.is_dirty();
2777 let old_version = self.version.clone();
2778
2779 let ops = self.text.randomly_undo_redo(rng);
2780 if !ops.is_empty() {
2781 for op in ops {
2782 self.send_operation(Operation::Buffer(op), true, cx);
2783 self.did_edit(&old_version, was_dirty, cx);
2784 }
2785 }
2786 }
2787}
2788
2789impl EventEmitter<BufferEvent> for Buffer {}
2790
2791impl Deref for Buffer {
2792 type Target = TextBuffer;
2793
2794 fn deref(&self) -> &Self::Target {
2795 &self.text
2796 }
2797}
2798
2799impl BufferSnapshot {
2800 /// Returns [`IndentSize`] for a given line that respects user settings and
2801 /// language preferences.
2802 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2803 indent_size_for_line(self, row)
2804 }
2805
2806 /// Returns [`IndentSize`] for a given position that respects user settings
2807 /// and language preferences.
2808 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2809 let settings = language_settings(
2810 self.language_at(position).map(|l| l.name()),
2811 self.file(),
2812 cx,
2813 );
2814 if settings.hard_tabs {
2815 IndentSize::tab()
2816 } else {
2817 IndentSize::spaces(settings.tab_size.get())
2818 }
2819 }
2820
2821 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2822 /// is passed in as `single_indent_size`.
2823 pub fn suggested_indents(
2824 &self,
2825 rows: impl Iterator<Item = u32>,
2826 single_indent_size: IndentSize,
2827 ) -> BTreeMap<u32, IndentSize> {
2828 let mut result = BTreeMap::new();
2829
2830 for row_range in contiguous_ranges(rows, 10) {
2831 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2832 Some(suggestions) => suggestions,
2833 _ => break,
2834 };
2835
2836 for (row, suggestion) in row_range.zip(suggestions) {
2837 let indent_size = if let Some(suggestion) = suggestion {
2838 result
2839 .get(&suggestion.basis_row)
2840 .copied()
2841 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2842 .with_delta(suggestion.delta, single_indent_size)
2843 } else {
2844 self.indent_size_for_line(row)
2845 };
2846
2847 result.insert(row, indent_size);
2848 }
2849 }
2850
2851 result
2852 }
2853
2854 fn suggest_autoindents(
2855 &self,
2856 row_range: Range<u32>,
2857 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2858 let config = &self.language.as_ref()?.config;
2859 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2860
2861 // Find the suggested indentation ranges based on the syntax tree.
2862 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2863 let end = Point::new(row_range.end, 0);
2864 let range = (start..end).to_offset(&self.text);
2865 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2866 Some(&grammar.indents_config.as_ref()?.query)
2867 });
2868 let indent_configs = matches
2869 .grammars()
2870 .iter()
2871 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2872 .collect::<Vec<_>>();
2873
2874 let mut indent_ranges = Vec::<Range<Point>>::new();
2875 let mut outdent_positions = Vec::<Point>::new();
2876 while let Some(mat) = matches.peek() {
2877 let mut start: Option<Point> = None;
2878 let mut end: Option<Point> = None;
2879
2880 let config = &indent_configs[mat.grammar_index];
2881 for capture in mat.captures {
2882 if capture.index == config.indent_capture_ix {
2883 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2884 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2885 } else if Some(capture.index) == config.start_capture_ix {
2886 start = Some(Point::from_ts_point(capture.node.end_position()));
2887 } else if Some(capture.index) == config.end_capture_ix {
2888 end = Some(Point::from_ts_point(capture.node.start_position()));
2889 } else if Some(capture.index) == config.outdent_capture_ix {
2890 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2891 }
2892 }
2893
2894 matches.advance();
2895 if let Some((start, end)) = start.zip(end) {
2896 if start.row == end.row {
2897 continue;
2898 }
2899
2900 let range = start..end;
2901 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2902 Err(ix) => indent_ranges.insert(ix, range),
2903 Ok(ix) => {
2904 let prev_range = &mut indent_ranges[ix];
2905 prev_range.end = prev_range.end.max(range.end);
2906 }
2907 }
2908 }
2909 }
2910
2911 let mut error_ranges = Vec::<Range<Point>>::new();
2912 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2913 grammar.error_query.as_ref()
2914 });
2915 while let Some(mat) = matches.peek() {
2916 let node = mat.captures[0].node;
2917 let start = Point::from_ts_point(node.start_position());
2918 let end = Point::from_ts_point(node.end_position());
2919 let range = start..end;
2920 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2921 Ok(ix) | Err(ix) => ix,
2922 };
2923 let mut end_ix = ix;
2924 while let Some(existing_range) = error_ranges.get(end_ix) {
2925 if existing_range.end < end {
2926 end_ix += 1;
2927 } else {
2928 break;
2929 }
2930 }
2931 error_ranges.splice(ix..end_ix, [range]);
2932 matches.advance();
2933 }
2934
2935 outdent_positions.sort();
2936 for outdent_position in outdent_positions {
2937 // find the innermost indent range containing this outdent_position
2938 // set its end to the outdent position
2939 if let Some(range_to_truncate) = indent_ranges
2940 .iter_mut()
2941 .filter(|indent_range| indent_range.contains(&outdent_position))
2942 .next_back()
2943 {
2944 range_to_truncate.end = outdent_position;
2945 }
2946 }
2947
2948 // Find the suggested indentation increases and decreased based on regexes.
2949 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2950 self.for_each_line(
2951 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2952 ..Point::new(row_range.end, 0),
2953 |row, line| {
2954 if config
2955 .decrease_indent_pattern
2956 .as_ref()
2957 .map_or(false, |regex| regex.is_match(line))
2958 {
2959 indent_change_rows.push((row, Ordering::Less));
2960 }
2961 if config
2962 .increase_indent_pattern
2963 .as_ref()
2964 .map_or(false, |regex| regex.is_match(line))
2965 {
2966 indent_change_rows.push((row + 1, Ordering::Greater));
2967 }
2968 },
2969 );
2970
2971 let mut indent_changes = indent_change_rows.into_iter().peekable();
2972 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2973 prev_non_blank_row.unwrap_or(0)
2974 } else {
2975 row_range.start.saturating_sub(1)
2976 };
2977 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2978 Some(row_range.map(move |row| {
2979 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2980
2981 let mut indent_from_prev_row = false;
2982 let mut outdent_from_prev_row = false;
2983 let mut outdent_to_row = u32::MAX;
2984 let mut from_regex = false;
2985
2986 while let Some((indent_row, delta)) = indent_changes.peek() {
2987 match indent_row.cmp(&row) {
2988 Ordering::Equal => match delta {
2989 Ordering::Less => {
2990 from_regex = true;
2991 outdent_from_prev_row = true
2992 }
2993 Ordering::Greater => {
2994 indent_from_prev_row = true;
2995 from_regex = true
2996 }
2997 _ => {}
2998 },
2999
3000 Ordering::Greater => break,
3001 Ordering::Less => {}
3002 }
3003
3004 indent_changes.next();
3005 }
3006
3007 for range in &indent_ranges {
3008 if range.start.row >= row {
3009 break;
3010 }
3011 if range.start.row == prev_row && range.end > row_start {
3012 indent_from_prev_row = true;
3013 }
3014 if range.end > prev_row_start && range.end <= row_start {
3015 outdent_to_row = outdent_to_row.min(range.start.row);
3016 }
3017 }
3018
3019 let within_error = error_ranges
3020 .iter()
3021 .any(|e| e.start.row < row && e.end > row_start);
3022
3023 let suggestion = if outdent_to_row == prev_row
3024 || (outdent_from_prev_row && indent_from_prev_row)
3025 {
3026 Some(IndentSuggestion {
3027 basis_row: prev_row,
3028 delta: Ordering::Equal,
3029 within_error: within_error && !from_regex,
3030 })
3031 } else if indent_from_prev_row {
3032 Some(IndentSuggestion {
3033 basis_row: prev_row,
3034 delta: Ordering::Greater,
3035 within_error: within_error && !from_regex,
3036 })
3037 } else if outdent_to_row < prev_row {
3038 Some(IndentSuggestion {
3039 basis_row: outdent_to_row,
3040 delta: Ordering::Equal,
3041 within_error: within_error && !from_regex,
3042 })
3043 } else if outdent_from_prev_row {
3044 Some(IndentSuggestion {
3045 basis_row: prev_row,
3046 delta: Ordering::Less,
3047 within_error: within_error && !from_regex,
3048 })
3049 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3050 {
3051 Some(IndentSuggestion {
3052 basis_row: prev_row,
3053 delta: Ordering::Equal,
3054 within_error: within_error && !from_regex,
3055 })
3056 } else {
3057 None
3058 };
3059
3060 prev_row = row;
3061 prev_row_start = row_start;
3062 suggestion
3063 }))
3064 }
3065
3066 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3067 while row > 0 {
3068 row -= 1;
3069 if !self.is_line_blank(row) {
3070 return Some(row);
3071 }
3072 }
3073 None
3074 }
3075
3076 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3077 let captures = self.syntax.captures(range, &self.text, |grammar| {
3078 grammar.highlights_query.as_ref()
3079 });
3080 let highlight_maps = captures
3081 .grammars()
3082 .iter()
3083 .map(|grammar| grammar.highlight_map())
3084 .collect();
3085 (captures, highlight_maps)
3086 }
3087
3088 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3089 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3090 /// returned in chunks where each chunk has a single syntax highlighting style and
3091 /// diagnostic status.
3092 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3093 let range = range.start.to_offset(self)..range.end.to_offset(self);
3094
3095 let mut syntax = None;
3096 if language_aware {
3097 syntax = Some(self.get_highlights(range.clone()));
3098 }
3099 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3100 let diagnostics = language_aware;
3101 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3102 }
3103
3104 pub fn highlighted_text_for_range<T: ToOffset>(
3105 &self,
3106 range: Range<T>,
3107 override_style: Option<HighlightStyle>,
3108 syntax_theme: &SyntaxTheme,
3109 ) -> HighlightedText {
3110 HighlightedText::from_buffer_range(
3111 range,
3112 &self.text,
3113 &self.syntax,
3114 override_style,
3115 syntax_theme,
3116 )
3117 }
3118
3119 /// Invokes the given callback for each line of text in the given range of the buffer.
3120 /// Uses callback to avoid allocating a string for each line.
3121 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3122 let mut line = String::new();
3123 let mut row = range.start.row;
3124 for chunk in self
3125 .as_rope()
3126 .chunks_in_range(range.to_offset(self))
3127 .chain(["\n"])
3128 {
3129 for (newline_ix, text) in chunk.split('\n').enumerate() {
3130 if newline_ix > 0 {
3131 callback(row, &line);
3132 row += 1;
3133 line.clear();
3134 }
3135 line.push_str(text);
3136 }
3137 }
3138 }
3139
3140 /// Iterates over every [`SyntaxLayer`] in the buffer.
3141 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3142 self.syntax
3143 .layers_for_range(0..self.len(), &self.text, true)
3144 }
3145
3146 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3147 let offset = position.to_offset(self);
3148 self.syntax
3149 .layers_for_range(offset..offset, &self.text, false)
3150 .filter(|l| l.node().end_byte() > offset)
3151 .last()
3152 }
3153
3154 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3155 &self,
3156 range: Range<D>,
3157 ) -> Option<SyntaxLayer> {
3158 let range = range.to_offset(self);
3159 return self
3160 .syntax
3161 .layers_for_range(range, &self.text, false)
3162 .max_by(|a, b| {
3163 if a.depth != b.depth {
3164 a.depth.cmp(&b.depth)
3165 } else if a.offset.0 != b.offset.0 {
3166 a.offset.0.cmp(&b.offset.0)
3167 } else {
3168 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3169 }
3170 });
3171 }
3172
3173 /// Returns the main [`Language`].
3174 pub fn language(&self) -> Option<&Arc<Language>> {
3175 self.language.as_ref()
3176 }
3177
3178 /// Returns the [`Language`] at the given location.
3179 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3180 self.syntax_layer_at(position)
3181 .map(|info| info.language)
3182 .or(self.language.as_ref())
3183 }
3184
3185 /// Returns the settings for the language at the given location.
3186 pub fn settings_at<'a, D: ToOffset>(
3187 &'a self,
3188 position: D,
3189 cx: &'a App,
3190 ) -> Cow<'a, LanguageSettings> {
3191 language_settings(
3192 self.language_at(position).map(|l| l.name()),
3193 self.file.as_ref(),
3194 cx,
3195 )
3196 }
3197
3198 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3199 CharClassifier::new(self.language_scope_at(point))
3200 }
3201
3202 /// Returns the [`LanguageScope`] at the given location.
3203 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3204 let offset = position.to_offset(self);
3205 let mut scope = None;
3206 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3207
3208 // Use the layer that has the smallest node intersecting the given point.
3209 for layer in self
3210 .syntax
3211 .layers_for_range(offset..offset, &self.text, false)
3212 {
3213 let mut cursor = layer.node().walk();
3214
3215 let mut range = None;
3216 loop {
3217 let child_range = cursor.node().byte_range();
3218 if !child_range.contains(&offset) {
3219 break;
3220 }
3221
3222 range = Some(child_range);
3223 if cursor.goto_first_child_for_byte(offset).is_none() {
3224 break;
3225 }
3226 }
3227
3228 if let Some(range) = range {
3229 if smallest_range_and_depth.as_ref().map_or(
3230 true,
3231 |(smallest_range, smallest_range_depth)| {
3232 if layer.depth > *smallest_range_depth {
3233 true
3234 } else if layer.depth == *smallest_range_depth {
3235 range.len() < smallest_range.len()
3236 } else {
3237 false
3238 }
3239 },
3240 ) {
3241 smallest_range_and_depth = Some((range, layer.depth));
3242 scope = Some(LanguageScope {
3243 language: layer.language.clone(),
3244 override_id: layer.override_id(offset, &self.text),
3245 });
3246 }
3247 }
3248 }
3249
3250 scope.or_else(|| {
3251 self.language.clone().map(|language| LanguageScope {
3252 language,
3253 override_id: None,
3254 })
3255 })
3256 }
3257
3258 /// Returns a tuple of the range and character kind of the word
3259 /// surrounding the given position.
3260 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3261 let mut start = start.to_offset(self);
3262 let mut end = start;
3263 let mut next_chars = self.chars_at(start).peekable();
3264 let mut prev_chars = self.reversed_chars_at(start).peekable();
3265
3266 let classifier = self.char_classifier_at(start);
3267 let word_kind = cmp::max(
3268 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3269 next_chars.peek().copied().map(|c| classifier.kind(c)),
3270 );
3271
3272 for ch in prev_chars {
3273 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3274 start -= ch.len_utf8();
3275 } else {
3276 break;
3277 }
3278 }
3279
3280 for ch in next_chars {
3281 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3282 end += ch.len_utf8();
3283 } else {
3284 break;
3285 }
3286 }
3287
3288 (start..end, word_kind)
3289 }
3290
3291 /// Returns the closest syntax node enclosing the given range.
3292 pub fn syntax_ancestor<'a, T: ToOffset>(
3293 &'a self,
3294 range: Range<T>,
3295 ) -> Option<tree_sitter::Node<'a>> {
3296 let range = range.start.to_offset(self)..range.end.to_offset(self);
3297 let mut result: Option<tree_sitter::Node<'a>> = None;
3298 'outer: for layer in self
3299 .syntax
3300 .layers_for_range(range.clone(), &self.text, true)
3301 {
3302 let mut cursor = layer.node().walk();
3303
3304 // Descend to the first leaf that touches the start of the range.
3305 //
3306 // If the range is non-empty and the current node ends exactly at the start,
3307 // move to the next sibling to find a node that extends beyond the start.
3308 //
3309 // If the range is empty and the current node starts after the range position,
3310 // move to the previous sibling to find the node that contains the position.
3311 while cursor.goto_first_child_for_byte(range.start).is_some() {
3312 if !range.is_empty() && cursor.node().end_byte() == range.start {
3313 cursor.goto_next_sibling();
3314 }
3315 if range.is_empty() && cursor.node().start_byte() > range.start {
3316 cursor.goto_previous_sibling();
3317 }
3318 }
3319
3320 // Ascend to the smallest ancestor that strictly contains the range.
3321 loop {
3322 let node_range = cursor.node().byte_range();
3323 if node_range.start <= range.start
3324 && node_range.end >= range.end
3325 && node_range.len() > range.len()
3326 {
3327 break;
3328 }
3329 if !cursor.goto_parent() {
3330 continue 'outer;
3331 }
3332 }
3333
3334 let left_node = cursor.node();
3335 let mut layer_result = left_node;
3336
3337 // For an empty range, try to find another node immediately to the right of the range.
3338 if left_node.end_byte() == range.start {
3339 let mut right_node = None;
3340 while !cursor.goto_next_sibling() {
3341 if !cursor.goto_parent() {
3342 break;
3343 }
3344 }
3345
3346 while cursor.node().start_byte() == range.start {
3347 right_node = Some(cursor.node());
3348 if !cursor.goto_first_child() {
3349 break;
3350 }
3351 }
3352
3353 // If there is a candidate node on both sides of the (empty) range, then
3354 // decide between the two by favoring a named node over an anonymous token.
3355 // If both nodes are the same in that regard, favor the right one.
3356 if let Some(right_node) = right_node {
3357 if right_node.is_named() || !left_node.is_named() {
3358 layer_result = right_node;
3359 }
3360 }
3361 }
3362
3363 if let Some(previous_result) = &result {
3364 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3365 continue;
3366 }
3367 }
3368 result = Some(layer_result);
3369 }
3370
3371 result
3372 }
3373
3374 /// Returns the root syntax node within the given row
3375 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3376 let start_offset = position.to_offset(self);
3377
3378 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3379
3380 let layer = self
3381 .syntax
3382 .layers_for_range(start_offset..start_offset, &self.text, true)
3383 .next()?;
3384
3385 let mut cursor = layer.node().walk();
3386
3387 // Descend to the first leaf that touches the start of the range.
3388 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3389 if cursor.node().end_byte() == start_offset {
3390 cursor.goto_next_sibling();
3391 }
3392 }
3393
3394 // Ascend to the root node within the same row.
3395 while cursor.goto_parent() {
3396 if cursor.node().start_position().row != row {
3397 break;
3398 }
3399 }
3400
3401 return Some(cursor.node());
3402 }
3403
3404 /// Returns the outline for the buffer.
3405 ///
3406 /// This method allows passing an optional [`SyntaxTheme`] to
3407 /// syntax-highlight the returned symbols.
3408 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3409 self.outline_items_containing(0..self.len(), true, theme)
3410 .map(Outline::new)
3411 }
3412
3413 /// Returns all the symbols that contain the given position.
3414 ///
3415 /// This method allows passing an optional [`SyntaxTheme`] to
3416 /// syntax-highlight the returned symbols.
3417 pub fn symbols_containing<T: ToOffset>(
3418 &self,
3419 position: T,
3420 theme: Option<&SyntaxTheme>,
3421 ) -> Option<Vec<OutlineItem<Anchor>>> {
3422 let position = position.to_offset(self);
3423 let mut items = self.outline_items_containing(
3424 position.saturating_sub(1)..self.len().min(position + 1),
3425 false,
3426 theme,
3427 )?;
3428 let mut prev_depth = None;
3429 items.retain(|item| {
3430 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3431 prev_depth = Some(item.depth);
3432 result
3433 });
3434 Some(items)
3435 }
3436
3437 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3438 let range = range.to_offset(self);
3439 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3440 grammar.outline_config.as_ref().map(|c| &c.query)
3441 });
3442 let configs = matches
3443 .grammars()
3444 .iter()
3445 .map(|g| g.outline_config.as_ref().unwrap())
3446 .collect::<Vec<_>>();
3447
3448 while let Some(mat) = matches.peek() {
3449 let config = &configs[mat.grammar_index];
3450 let containing_item_node = maybe!({
3451 let item_node = mat.captures.iter().find_map(|cap| {
3452 if cap.index == config.item_capture_ix {
3453 Some(cap.node)
3454 } else {
3455 None
3456 }
3457 })?;
3458
3459 let item_byte_range = item_node.byte_range();
3460 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3461 None
3462 } else {
3463 Some(item_node)
3464 }
3465 });
3466
3467 if let Some(item_node) = containing_item_node {
3468 return Some(
3469 Point::from_ts_point(item_node.start_position())
3470 ..Point::from_ts_point(item_node.end_position()),
3471 );
3472 }
3473
3474 matches.advance();
3475 }
3476 None
3477 }
3478
3479 pub fn outline_items_containing<T: ToOffset>(
3480 &self,
3481 range: Range<T>,
3482 include_extra_context: bool,
3483 theme: Option<&SyntaxTheme>,
3484 ) -> Option<Vec<OutlineItem<Anchor>>> {
3485 let range = range.to_offset(self);
3486 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3487 grammar.outline_config.as_ref().map(|c| &c.query)
3488 });
3489 let configs = matches
3490 .grammars()
3491 .iter()
3492 .map(|g| g.outline_config.as_ref().unwrap())
3493 .collect::<Vec<_>>();
3494
3495 let mut items = Vec::new();
3496 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3497 while let Some(mat) = matches.peek() {
3498 let config = &configs[mat.grammar_index];
3499 if let Some(item) =
3500 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3501 {
3502 items.push(item);
3503 } else if let Some(capture) = mat
3504 .captures
3505 .iter()
3506 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3507 {
3508 let capture_range = capture.node.start_position()..capture.node.end_position();
3509 let mut capture_row_range =
3510 capture_range.start.row as u32..capture_range.end.row as u32;
3511 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3512 {
3513 capture_row_range.end -= 1;
3514 }
3515 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3516 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3517 last_row_range.end = capture_row_range.end;
3518 } else {
3519 annotation_row_ranges.push(capture_row_range);
3520 }
3521 } else {
3522 annotation_row_ranges.push(capture_row_range);
3523 }
3524 }
3525 matches.advance();
3526 }
3527
3528 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3529
3530 // Assign depths based on containment relationships and convert to anchors.
3531 let mut item_ends_stack = Vec::<Point>::new();
3532 let mut anchor_items = Vec::new();
3533 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3534 for item in items {
3535 while let Some(last_end) = item_ends_stack.last().copied() {
3536 if last_end < item.range.end {
3537 item_ends_stack.pop();
3538 } else {
3539 break;
3540 }
3541 }
3542
3543 let mut annotation_row_range = None;
3544 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3545 let row_preceding_item = item.range.start.row.saturating_sub(1);
3546 if next_annotation_row_range.end < row_preceding_item {
3547 annotation_row_ranges.next();
3548 } else {
3549 if next_annotation_row_range.end == row_preceding_item {
3550 annotation_row_range = Some(next_annotation_row_range.clone());
3551 annotation_row_ranges.next();
3552 }
3553 break;
3554 }
3555 }
3556
3557 anchor_items.push(OutlineItem {
3558 depth: item_ends_stack.len(),
3559 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3560 text: item.text,
3561 highlight_ranges: item.highlight_ranges,
3562 name_ranges: item.name_ranges,
3563 body_range: item.body_range.map(|body_range| {
3564 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3565 }),
3566 annotation_range: annotation_row_range.map(|annotation_range| {
3567 self.anchor_after(Point::new(annotation_range.start, 0))
3568 ..self.anchor_before(Point::new(
3569 annotation_range.end,
3570 self.line_len(annotation_range.end),
3571 ))
3572 }),
3573 });
3574 item_ends_stack.push(item.range.end);
3575 }
3576
3577 Some(anchor_items)
3578 }
3579
3580 fn next_outline_item(
3581 &self,
3582 config: &OutlineConfig,
3583 mat: &SyntaxMapMatch,
3584 range: &Range<usize>,
3585 include_extra_context: bool,
3586 theme: Option<&SyntaxTheme>,
3587 ) -> Option<OutlineItem<Point>> {
3588 let item_node = mat.captures.iter().find_map(|cap| {
3589 if cap.index == config.item_capture_ix {
3590 Some(cap.node)
3591 } else {
3592 None
3593 }
3594 })?;
3595
3596 let item_byte_range = item_node.byte_range();
3597 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3598 return None;
3599 }
3600 let item_point_range = Point::from_ts_point(item_node.start_position())
3601 ..Point::from_ts_point(item_node.end_position());
3602
3603 let mut open_point = None;
3604 let mut close_point = None;
3605 let mut buffer_ranges = Vec::new();
3606 for capture in mat.captures {
3607 let node_is_name;
3608 if capture.index == config.name_capture_ix {
3609 node_is_name = true;
3610 } else if Some(capture.index) == config.context_capture_ix
3611 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3612 {
3613 node_is_name = false;
3614 } else {
3615 if Some(capture.index) == config.open_capture_ix {
3616 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3617 } else if Some(capture.index) == config.close_capture_ix {
3618 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3619 }
3620
3621 continue;
3622 }
3623
3624 let mut range = capture.node.start_byte()..capture.node.end_byte();
3625 let start = capture.node.start_position();
3626 if capture.node.end_position().row > start.row {
3627 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3628 }
3629
3630 if !range.is_empty() {
3631 buffer_ranges.push((range, node_is_name));
3632 }
3633 }
3634 if buffer_ranges.is_empty() {
3635 return None;
3636 }
3637 let mut text = String::new();
3638 let mut highlight_ranges = Vec::new();
3639 let mut name_ranges = Vec::new();
3640 let mut chunks = self.chunks(
3641 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3642 true,
3643 );
3644 let mut last_buffer_range_end = 0;
3645
3646 for (buffer_range, is_name) in buffer_ranges {
3647 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3648 if space_added {
3649 text.push(' ');
3650 }
3651 let before_append_len = text.len();
3652 let mut offset = buffer_range.start;
3653 chunks.seek(buffer_range.clone());
3654 for mut chunk in chunks.by_ref() {
3655 if chunk.text.len() > buffer_range.end - offset {
3656 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3657 offset = buffer_range.end;
3658 } else {
3659 offset += chunk.text.len();
3660 }
3661 let style = chunk
3662 .syntax_highlight_id
3663 .zip(theme)
3664 .and_then(|(highlight, theme)| highlight.style(theme));
3665 if let Some(style) = style {
3666 let start = text.len();
3667 let end = start + chunk.text.len();
3668 highlight_ranges.push((start..end, style));
3669 }
3670 text.push_str(chunk.text);
3671 if offset >= buffer_range.end {
3672 break;
3673 }
3674 }
3675 if is_name {
3676 let after_append_len = text.len();
3677 let start = if space_added && !name_ranges.is_empty() {
3678 before_append_len - 1
3679 } else {
3680 before_append_len
3681 };
3682 name_ranges.push(start..after_append_len);
3683 }
3684 last_buffer_range_end = buffer_range.end;
3685 }
3686
3687 Some(OutlineItem {
3688 depth: 0, // We'll calculate the depth later
3689 range: item_point_range,
3690 text,
3691 highlight_ranges,
3692 name_ranges,
3693 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3694 annotation_range: None,
3695 })
3696 }
3697
3698 pub fn function_body_fold_ranges<T: ToOffset>(
3699 &self,
3700 within: Range<T>,
3701 ) -> impl Iterator<Item = Range<usize>> + '_ {
3702 self.text_object_ranges(within, TreeSitterOptions::default())
3703 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3704 }
3705
3706 /// For each grammar in the language, runs the provided
3707 /// [`tree_sitter::Query`] against the given range.
3708 pub fn matches(
3709 &self,
3710 range: Range<usize>,
3711 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3712 ) -> SyntaxMapMatches {
3713 self.syntax.matches(range, self, query)
3714 }
3715
3716 pub fn all_bracket_ranges(
3717 &self,
3718 range: Range<usize>,
3719 ) -> impl Iterator<Item = BracketMatch> + '_ {
3720 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3721 grammar.brackets_config.as_ref().map(|c| &c.query)
3722 });
3723 let configs = matches
3724 .grammars()
3725 .iter()
3726 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3727 .collect::<Vec<_>>();
3728
3729 iter::from_fn(move || {
3730 while let Some(mat) = matches.peek() {
3731 let mut open = None;
3732 let mut close = None;
3733 let config = &configs[mat.grammar_index];
3734 let pattern = &config.patterns[mat.pattern_index];
3735 for capture in mat.captures {
3736 if capture.index == config.open_capture_ix {
3737 open = Some(capture.node.byte_range());
3738 } else if capture.index == config.close_capture_ix {
3739 close = Some(capture.node.byte_range());
3740 }
3741 }
3742
3743 matches.advance();
3744
3745 let Some((open_range, close_range)) = open.zip(close) else {
3746 continue;
3747 };
3748
3749 let bracket_range = open_range.start..=close_range.end;
3750 if !bracket_range.overlaps(&range) {
3751 continue;
3752 }
3753
3754 return Some(BracketMatch {
3755 open_range,
3756 close_range,
3757 newline_only: pattern.newline_only,
3758 });
3759 }
3760 None
3761 })
3762 }
3763
3764 /// Returns bracket range pairs overlapping or adjacent to `range`
3765 pub fn bracket_ranges<T: ToOffset>(
3766 &self,
3767 range: Range<T>,
3768 ) -> impl Iterator<Item = BracketMatch> + '_ {
3769 // Find bracket pairs that *inclusively* contain the given range.
3770 let range = range.start.to_offset(self).saturating_sub(1)
3771 ..self.len().min(range.end.to_offset(self) + 1);
3772 self.all_bracket_ranges(range)
3773 .filter(|pair| !pair.newline_only)
3774 }
3775
3776 pub fn text_object_ranges<T: ToOffset>(
3777 &self,
3778 range: Range<T>,
3779 options: TreeSitterOptions,
3780 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3781 let range = range.start.to_offset(self).saturating_sub(1)
3782 ..self.len().min(range.end.to_offset(self) + 1);
3783
3784 let mut matches =
3785 self.syntax
3786 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3787 grammar.text_object_config.as_ref().map(|c| &c.query)
3788 });
3789
3790 let configs = matches
3791 .grammars()
3792 .iter()
3793 .map(|grammar| grammar.text_object_config.as_ref())
3794 .collect::<Vec<_>>();
3795
3796 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3797
3798 iter::from_fn(move || {
3799 loop {
3800 while let Some(capture) = captures.pop() {
3801 if capture.0.overlaps(&range) {
3802 return Some(capture);
3803 }
3804 }
3805
3806 let mat = matches.peek()?;
3807
3808 let Some(config) = configs[mat.grammar_index].as_ref() else {
3809 matches.advance();
3810 continue;
3811 };
3812
3813 for capture in mat.captures {
3814 let Some(ix) = config
3815 .text_objects_by_capture_ix
3816 .binary_search_by_key(&capture.index, |e| e.0)
3817 .ok()
3818 else {
3819 continue;
3820 };
3821 let text_object = config.text_objects_by_capture_ix[ix].1;
3822 let byte_range = capture.node.byte_range();
3823
3824 let mut found = false;
3825 for (range, existing) in captures.iter_mut() {
3826 if existing == &text_object {
3827 range.start = range.start.min(byte_range.start);
3828 range.end = range.end.max(byte_range.end);
3829 found = true;
3830 break;
3831 }
3832 }
3833
3834 if !found {
3835 captures.push((byte_range, text_object));
3836 }
3837 }
3838
3839 matches.advance();
3840 }
3841 })
3842 }
3843
3844 /// Returns enclosing bracket ranges containing the given range
3845 pub fn enclosing_bracket_ranges<T: ToOffset>(
3846 &self,
3847 range: Range<T>,
3848 ) -> impl Iterator<Item = BracketMatch> + '_ {
3849 let range = range.start.to_offset(self)..range.end.to_offset(self);
3850
3851 self.bracket_ranges(range.clone()).filter(move |pair| {
3852 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3853 })
3854 }
3855
3856 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3857 ///
3858 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3859 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3860 &self,
3861 range: Range<T>,
3862 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3863 ) -> Option<(Range<usize>, Range<usize>)> {
3864 let range = range.start.to_offset(self)..range.end.to_offset(self);
3865
3866 // Get the ranges of the innermost pair of brackets.
3867 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3868
3869 for pair in self.enclosing_bracket_ranges(range.clone()) {
3870 if let Some(range_filter) = range_filter {
3871 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3872 continue;
3873 }
3874 }
3875
3876 let len = pair.close_range.end - pair.open_range.start;
3877
3878 if let Some((existing_open, existing_close)) = &result {
3879 let existing_len = existing_close.end - existing_open.start;
3880 if len > existing_len {
3881 continue;
3882 }
3883 }
3884
3885 result = Some((pair.open_range, pair.close_range));
3886 }
3887
3888 result
3889 }
3890
3891 /// Returns anchor ranges for any matches of the redaction query.
3892 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3893 /// will be run on the relevant section of the buffer.
3894 pub fn redacted_ranges<T: ToOffset>(
3895 &self,
3896 range: Range<T>,
3897 ) -> impl Iterator<Item = Range<usize>> + '_ {
3898 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3899 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3900 grammar
3901 .redactions_config
3902 .as_ref()
3903 .map(|config| &config.query)
3904 });
3905
3906 let configs = syntax_matches
3907 .grammars()
3908 .iter()
3909 .map(|grammar| grammar.redactions_config.as_ref())
3910 .collect::<Vec<_>>();
3911
3912 iter::from_fn(move || {
3913 let redacted_range = syntax_matches
3914 .peek()
3915 .and_then(|mat| {
3916 configs[mat.grammar_index].and_then(|config| {
3917 mat.captures
3918 .iter()
3919 .find(|capture| capture.index == config.redaction_capture_ix)
3920 })
3921 })
3922 .map(|mat| mat.node.byte_range());
3923 syntax_matches.advance();
3924 redacted_range
3925 })
3926 }
3927
3928 pub fn injections_intersecting_range<T: ToOffset>(
3929 &self,
3930 range: Range<T>,
3931 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3932 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3933
3934 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3935 grammar
3936 .injection_config
3937 .as_ref()
3938 .map(|config| &config.query)
3939 });
3940
3941 let configs = syntax_matches
3942 .grammars()
3943 .iter()
3944 .map(|grammar| grammar.injection_config.as_ref())
3945 .collect::<Vec<_>>();
3946
3947 iter::from_fn(move || {
3948 let ranges = syntax_matches.peek().and_then(|mat| {
3949 let config = &configs[mat.grammar_index]?;
3950 let content_capture_range = mat.captures.iter().find_map(|capture| {
3951 if capture.index == config.content_capture_ix {
3952 Some(capture.node.byte_range())
3953 } else {
3954 None
3955 }
3956 })?;
3957 let language = self.language_at(content_capture_range.start)?;
3958 Some((content_capture_range, language))
3959 });
3960 syntax_matches.advance();
3961 ranges
3962 })
3963 }
3964
3965 pub fn runnable_ranges(
3966 &self,
3967 offset_range: Range<usize>,
3968 ) -> impl Iterator<Item = RunnableRange> + '_ {
3969 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3970 grammar.runnable_config.as_ref().map(|config| &config.query)
3971 });
3972
3973 let test_configs = syntax_matches
3974 .grammars()
3975 .iter()
3976 .map(|grammar| grammar.runnable_config.as_ref())
3977 .collect::<Vec<_>>();
3978
3979 iter::from_fn(move || {
3980 loop {
3981 let mat = syntax_matches.peek()?;
3982
3983 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3984 let mut run_range = None;
3985 let full_range = mat.captures.iter().fold(
3986 Range {
3987 start: usize::MAX,
3988 end: 0,
3989 },
3990 |mut acc, next| {
3991 let byte_range = next.node.byte_range();
3992 if acc.start > byte_range.start {
3993 acc.start = byte_range.start;
3994 }
3995 if acc.end < byte_range.end {
3996 acc.end = byte_range.end;
3997 }
3998 acc
3999 },
4000 );
4001 if full_range.start > full_range.end {
4002 // We did not find a full spanning range of this match.
4003 return None;
4004 }
4005 let extra_captures: SmallVec<[_; 1]> =
4006 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4007 test_configs
4008 .extra_captures
4009 .get(capture.index as usize)
4010 .cloned()
4011 .and_then(|tag_name| match tag_name {
4012 RunnableCapture::Named(name) => {
4013 Some((capture.node.byte_range(), name))
4014 }
4015 RunnableCapture::Run => {
4016 let _ = run_range.insert(capture.node.byte_range());
4017 None
4018 }
4019 })
4020 }));
4021 let run_range = run_range?;
4022 let tags = test_configs
4023 .query
4024 .property_settings(mat.pattern_index)
4025 .iter()
4026 .filter_map(|property| {
4027 if *property.key == *"tag" {
4028 property
4029 .value
4030 .as_ref()
4031 .map(|value| RunnableTag(value.to_string().into()))
4032 } else {
4033 None
4034 }
4035 })
4036 .collect();
4037 let extra_captures = extra_captures
4038 .into_iter()
4039 .map(|(range, name)| {
4040 (
4041 name.to_string(),
4042 self.text_for_range(range.clone()).collect::<String>(),
4043 )
4044 })
4045 .collect();
4046 // All tags should have the same range.
4047 Some(RunnableRange {
4048 run_range,
4049 full_range,
4050 runnable: Runnable {
4051 tags,
4052 language: mat.language,
4053 buffer: self.remote_id(),
4054 },
4055 extra_captures,
4056 buffer_id: self.remote_id(),
4057 })
4058 });
4059
4060 syntax_matches.advance();
4061 if test_range.is_some() {
4062 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4063 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4064 return test_range;
4065 }
4066 }
4067 })
4068 }
4069
4070 /// Returns selections for remote peers intersecting the given range.
4071 #[allow(clippy::type_complexity)]
4072 pub fn selections_in_range(
4073 &self,
4074 range: Range<Anchor>,
4075 include_local: bool,
4076 ) -> impl Iterator<
4077 Item = (
4078 ReplicaId,
4079 bool,
4080 CursorShape,
4081 impl Iterator<Item = &Selection<Anchor>> + '_,
4082 ),
4083 > + '_ {
4084 self.remote_selections
4085 .iter()
4086 .filter(move |(replica_id, set)| {
4087 (include_local || **replica_id != self.text.replica_id())
4088 && !set.selections.is_empty()
4089 })
4090 .map(move |(replica_id, set)| {
4091 let start_ix = match set.selections.binary_search_by(|probe| {
4092 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4093 }) {
4094 Ok(ix) | Err(ix) => ix,
4095 };
4096 let end_ix = match set.selections.binary_search_by(|probe| {
4097 probe.start.cmp(&range.end, self).then(Ordering::Less)
4098 }) {
4099 Ok(ix) | Err(ix) => ix,
4100 };
4101
4102 (
4103 *replica_id,
4104 set.line_mode,
4105 set.cursor_shape,
4106 set.selections[start_ix..end_ix].iter(),
4107 )
4108 })
4109 }
4110
4111 /// Returns if the buffer contains any diagnostics.
4112 pub fn has_diagnostics(&self) -> bool {
4113 !self.diagnostics.is_empty()
4114 }
4115
4116 /// Returns all the diagnostics intersecting the given range.
4117 pub fn diagnostics_in_range<'a, T, O>(
4118 &'a self,
4119 search_range: Range<T>,
4120 reversed: bool,
4121 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4122 where
4123 T: 'a + Clone + ToOffset,
4124 O: 'a + FromAnchor,
4125 {
4126 let mut iterators: Vec<_> = self
4127 .diagnostics
4128 .iter()
4129 .map(|(_, collection)| {
4130 collection
4131 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4132 .peekable()
4133 })
4134 .collect();
4135
4136 std::iter::from_fn(move || {
4137 let (next_ix, _) = iterators
4138 .iter_mut()
4139 .enumerate()
4140 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4141 .min_by(|(_, a), (_, b)| {
4142 let cmp = a
4143 .range
4144 .start
4145 .cmp(&b.range.start, self)
4146 // when range is equal, sort by diagnostic severity
4147 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4148 // and stabilize order with group_id
4149 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4150 if reversed { cmp.reverse() } else { cmp }
4151 })?;
4152 iterators[next_ix]
4153 .next()
4154 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4155 diagnostic,
4156 range: FromAnchor::from_anchor(&range.start, self)
4157 ..FromAnchor::from_anchor(&range.end, self),
4158 })
4159 })
4160 }
4161
4162 /// Returns all the diagnostic groups associated with the given
4163 /// language server ID. If no language server ID is provided,
4164 /// all diagnostics groups are returned.
4165 pub fn diagnostic_groups(
4166 &self,
4167 language_server_id: Option<LanguageServerId>,
4168 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4169 let mut groups = Vec::new();
4170
4171 if let Some(language_server_id) = language_server_id {
4172 if let Ok(ix) = self
4173 .diagnostics
4174 .binary_search_by_key(&language_server_id, |e| e.0)
4175 {
4176 self.diagnostics[ix]
4177 .1
4178 .groups(language_server_id, &mut groups, self);
4179 }
4180 } else {
4181 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4182 diagnostics.groups(*language_server_id, &mut groups, self);
4183 }
4184 }
4185
4186 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4187 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4188 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4189 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4190 });
4191
4192 groups
4193 }
4194
4195 /// Returns an iterator over the diagnostics for the given group.
4196 pub fn diagnostic_group<O>(
4197 &self,
4198 group_id: usize,
4199 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4200 where
4201 O: FromAnchor + 'static,
4202 {
4203 self.diagnostics
4204 .iter()
4205 .flat_map(move |(_, set)| set.group(group_id, self))
4206 }
4207
4208 /// An integer version number that accounts for all updates besides
4209 /// the buffer's text itself (which is versioned via a version vector).
4210 pub fn non_text_state_update_count(&self) -> usize {
4211 self.non_text_state_update_count
4212 }
4213
4214 /// Returns a snapshot of underlying file.
4215 pub fn file(&self) -> Option<&Arc<dyn File>> {
4216 self.file.as_ref()
4217 }
4218
4219 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4220 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4221 if let Some(file) = self.file() {
4222 if file.path().file_name().is_none() || include_root {
4223 Some(file.full_path(cx))
4224 } else {
4225 Some(file.path().to_path_buf())
4226 }
4227 } else {
4228 None
4229 }
4230 }
4231
4232 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4233 let query_str = query.fuzzy_contents;
4234 if query_str.map_or(false, |query| query.is_empty()) {
4235 return BTreeMap::default();
4236 }
4237
4238 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4239 language,
4240 override_id: None,
4241 }));
4242
4243 let mut query_ix = 0;
4244 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4245 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4246
4247 let mut words = BTreeMap::default();
4248 let mut current_word_start_ix = None;
4249 let mut chunk_ix = query.range.start;
4250 for chunk in self.chunks(query.range, false) {
4251 for (i, c) in chunk.text.char_indices() {
4252 let ix = chunk_ix + i;
4253 if classifier.is_word(c) {
4254 if current_word_start_ix.is_none() {
4255 current_word_start_ix = Some(ix);
4256 }
4257
4258 if let Some(query_chars) = &query_chars {
4259 if query_ix < query_len {
4260 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4261 query_ix += 1;
4262 }
4263 }
4264 }
4265 continue;
4266 } else if let Some(word_start) = current_word_start_ix.take() {
4267 if query_ix == query_len {
4268 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4269 let mut word_text = self.text_for_range(word_start..ix).peekable();
4270 let first_char = word_text
4271 .peek()
4272 .and_then(|first_chunk| first_chunk.chars().next());
4273 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4274 if !query.skip_digits
4275 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4276 {
4277 words.insert(word_text.collect(), word_range);
4278 }
4279 }
4280 }
4281 query_ix = 0;
4282 }
4283 chunk_ix += chunk.text.len();
4284 }
4285
4286 words
4287 }
4288}
4289
4290pub struct WordsQuery<'a> {
4291 /// Only returns words with all chars from the fuzzy string in them.
4292 pub fuzzy_contents: Option<&'a str>,
4293 /// Skips words that start with a digit.
4294 pub skip_digits: bool,
4295 /// Buffer offset range, to look for words.
4296 pub range: Range<usize>,
4297}
4298
4299fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4300 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4301}
4302
4303fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4304 let mut result = IndentSize::spaces(0);
4305 for c in text {
4306 let kind = match c {
4307 ' ' => IndentKind::Space,
4308 '\t' => IndentKind::Tab,
4309 _ => break,
4310 };
4311 if result.len == 0 {
4312 result.kind = kind;
4313 }
4314 result.len += 1;
4315 }
4316 result
4317}
4318
4319impl Clone for BufferSnapshot {
4320 fn clone(&self) -> Self {
4321 Self {
4322 text: self.text.clone(),
4323 syntax: self.syntax.clone(),
4324 file: self.file.clone(),
4325 remote_selections: self.remote_selections.clone(),
4326 diagnostics: self.diagnostics.clone(),
4327 language: self.language.clone(),
4328 non_text_state_update_count: self.non_text_state_update_count,
4329 }
4330 }
4331}
4332
4333impl Deref for BufferSnapshot {
4334 type Target = text::BufferSnapshot;
4335
4336 fn deref(&self) -> &Self::Target {
4337 &self.text
4338 }
4339}
4340
4341unsafe impl Send for BufferChunks<'_> {}
4342
4343impl<'a> BufferChunks<'a> {
4344 pub(crate) fn new(
4345 text: &'a Rope,
4346 range: Range<usize>,
4347 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4348 diagnostics: bool,
4349 buffer_snapshot: Option<&'a BufferSnapshot>,
4350 ) -> Self {
4351 let mut highlights = None;
4352 if let Some((captures, highlight_maps)) = syntax {
4353 highlights = Some(BufferChunkHighlights {
4354 captures,
4355 next_capture: None,
4356 stack: Default::default(),
4357 highlight_maps,
4358 })
4359 }
4360
4361 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4362 let chunks = text.chunks_in_range(range.clone());
4363
4364 let mut this = BufferChunks {
4365 range,
4366 buffer_snapshot,
4367 chunks,
4368 diagnostic_endpoints,
4369 error_depth: 0,
4370 warning_depth: 0,
4371 information_depth: 0,
4372 hint_depth: 0,
4373 unnecessary_depth: 0,
4374 highlights,
4375 };
4376 this.initialize_diagnostic_endpoints();
4377 this
4378 }
4379
4380 /// Seeks to the given byte offset in the buffer.
4381 pub fn seek(&mut self, range: Range<usize>) {
4382 let old_range = std::mem::replace(&mut self.range, range.clone());
4383 self.chunks.set_range(self.range.clone());
4384 if let Some(highlights) = self.highlights.as_mut() {
4385 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4386 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4387 highlights
4388 .stack
4389 .retain(|(end_offset, _)| *end_offset > range.start);
4390 if let Some(capture) = &highlights.next_capture {
4391 if range.start >= capture.node.start_byte() {
4392 let next_capture_end = capture.node.end_byte();
4393 if range.start < next_capture_end {
4394 highlights.stack.push((
4395 next_capture_end,
4396 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4397 ));
4398 }
4399 highlights.next_capture.take();
4400 }
4401 }
4402 } else if let Some(snapshot) = self.buffer_snapshot {
4403 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4404 *highlights = BufferChunkHighlights {
4405 captures,
4406 next_capture: None,
4407 stack: Default::default(),
4408 highlight_maps,
4409 };
4410 } else {
4411 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4412 // Seeking such BufferChunks is not supported.
4413 debug_assert!(
4414 false,
4415 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4416 );
4417 }
4418
4419 highlights.captures.set_byte_range(self.range.clone());
4420 self.initialize_diagnostic_endpoints();
4421 }
4422 }
4423
4424 fn initialize_diagnostic_endpoints(&mut self) {
4425 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4426 if let Some(buffer) = self.buffer_snapshot {
4427 let mut diagnostic_endpoints = Vec::new();
4428 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4429 diagnostic_endpoints.push(DiagnosticEndpoint {
4430 offset: entry.range.start,
4431 is_start: true,
4432 severity: entry.diagnostic.severity,
4433 is_unnecessary: entry.diagnostic.is_unnecessary,
4434 });
4435 diagnostic_endpoints.push(DiagnosticEndpoint {
4436 offset: entry.range.end,
4437 is_start: false,
4438 severity: entry.diagnostic.severity,
4439 is_unnecessary: entry.diagnostic.is_unnecessary,
4440 });
4441 }
4442 diagnostic_endpoints
4443 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4444 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4445 self.hint_depth = 0;
4446 self.error_depth = 0;
4447 self.warning_depth = 0;
4448 self.information_depth = 0;
4449 }
4450 }
4451 }
4452
4453 /// The current byte offset in the buffer.
4454 pub fn offset(&self) -> usize {
4455 self.range.start
4456 }
4457
4458 pub fn range(&self) -> Range<usize> {
4459 self.range.clone()
4460 }
4461
4462 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4463 let depth = match endpoint.severity {
4464 DiagnosticSeverity::ERROR => &mut self.error_depth,
4465 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4466 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4467 DiagnosticSeverity::HINT => &mut self.hint_depth,
4468 _ => return,
4469 };
4470 if endpoint.is_start {
4471 *depth += 1;
4472 } else {
4473 *depth -= 1;
4474 }
4475
4476 if endpoint.is_unnecessary {
4477 if endpoint.is_start {
4478 self.unnecessary_depth += 1;
4479 } else {
4480 self.unnecessary_depth -= 1;
4481 }
4482 }
4483 }
4484
4485 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4486 if self.error_depth > 0 {
4487 Some(DiagnosticSeverity::ERROR)
4488 } else if self.warning_depth > 0 {
4489 Some(DiagnosticSeverity::WARNING)
4490 } else if self.information_depth > 0 {
4491 Some(DiagnosticSeverity::INFORMATION)
4492 } else if self.hint_depth > 0 {
4493 Some(DiagnosticSeverity::HINT)
4494 } else {
4495 None
4496 }
4497 }
4498
4499 fn current_code_is_unnecessary(&self) -> bool {
4500 self.unnecessary_depth > 0
4501 }
4502}
4503
4504impl<'a> Iterator for BufferChunks<'a> {
4505 type Item = Chunk<'a>;
4506
4507 fn next(&mut self) -> Option<Self::Item> {
4508 let mut next_capture_start = usize::MAX;
4509 let mut next_diagnostic_endpoint = usize::MAX;
4510
4511 if let Some(highlights) = self.highlights.as_mut() {
4512 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4513 if *parent_capture_end <= self.range.start {
4514 highlights.stack.pop();
4515 } else {
4516 break;
4517 }
4518 }
4519
4520 if highlights.next_capture.is_none() {
4521 highlights.next_capture = highlights.captures.next();
4522 }
4523
4524 while let Some(capture) = highlights.next_capture.as_ref() {
4525 if self.range.start < capture.node.start_byte() {
4526 next_capture_start = capture.node.start_byte();
4527 break;
4528 } else {
4529 let highlight_id =
4530 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4531 highlights
4532 .stack
4533 .push((capture.node.end_byte(), highlight_id));
4534 highlights.next_capture = highlights.captures.next();
4535 }
4536 }
4537 }
4538
4539 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4540 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4541 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4542 if endpoint.offset <= self.range.start {
4543 self.update_diagnostic_depths(endpoint);
4544 diagnostic_endpoints.next();
4545 } else {
4546 next_diagnostic_endpoint = endpoint.offset;
4547 break;
4548 }
4549 }
4550 }
4551 self.diagnostic_endpoints = diagnostic_endpoints;
4552
4553 if let Some(chunk) = self.chunks.peek() {
4554 let chunk_start = self.range.start;
4555 let mut chunk_end = (self.chunks.offset() + chunk.len())
4556 .min(next_capture_start)
4557 .min(next_diagnostic_endpoint);
4558 let mut highlight_id = None;
4559 if let Some(highlights) = self.highlights.as_ref() {
4560 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4561 chunk_end = chunk_end.min(*parent_capture_end);
4562 highlight_id = Some(*parent_highlight_id);
4563 }
4564 }
4565
4566 let slice =
4567 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4568 self.range.start = chunk_end;
4569 if self.range.start == self.chunks.offset() + chunk.len() {
4570 self.chunks.next().unwrap();
4571 }
4572
4573 Some(Chunk {
4574 text: slice,
4575 syntax_highlight_id: highlight_id,
4576 diagnostic_severity: self.current_diagnostic_severity(),
4577 is_unnecessary: self.current_code_is_unnecessary(),
4578 ..Default::default()
4579 })
4580 } else {
4581 None
4582 }
4583 }
4584}
4585
4586impl operation_queue::Operation for Operation {
4587 fn lamport_timestamp(&self) -> clock::Lamport {
4588 match self {
4589 Operation::Buffer(_) => {
4590 unreachable!("buffer operations should never be deferred at this layer")
4591 }
4592 Operation::UpdateDiagnostics {
4593 lamport_timestamp, ..
4594 }
4595 | Operation::UpdateSelections {
4596 lamport_timestamp, ..
4597 }
4598 | Operation::UpdateCompletionTriggers {
4599 lamport_timestamp, ..
4600 } => *lamport_timestamp,
4601 }
4602 }
4603}
4604
4605impl Default for Diagnostic {
4606 fn default() -> Self {
4607 Self {
4608 source: Default::default(),
4609 code: None,
4610 code_description: None,
4611 severity: DiagnosticSeverity::ERROR,
4612 message: Default::default(),
4613 markdown: None,
4614 group_id: 0,
4615 is_primary: false,
4616 is_disk_based: false,
4617 is_unnecessary: false,
4618 data: None,
4619 }
4620 }
4621}
4622
4623impl IndentSize {
4624 /// Returns an [`IndentSize`] representing the given spaces.
4625 pub fn spaces(len: u32) -> Self {
4626 Self {
4627 len,
4628 kind: IndentKind::Space,
4629 }
4630 }
4631
4632 /// Returns an [`IndentSize`] representing a tab.
4633 pub fn tab() -> Self {
4634 Self {
4635 len: 1,
4636 kind: IndentKind::Tab,
4637 }
4638 }
4639
4640 /// An iterator over the characters represented by this [`IndentSize`].
4641 pub fn chars(&self) -> impl Iterator<Item = char> {
4642 iter::repeat(self.char()).take(self.len as usize)
4643 }
4644
4645 /// The character representation of this [`IndentSize`].
4646 pub fn char(&self) -> char {
4647 match self.kind {
4648 IndentKind::Space => ' ',
4649 IndentKind::Tab => '\t',
4650 }
4651 }
4652
4653 /// Consumes the current [`IndentSize`] and returns a new one that has
4654 /// been shrunk or enlarged by the given size along the given direction.
4655 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4656 match direction {
4657 Ordering::Less => {
4658 if self.kind == size.kind && self.len >= size.len {
4659 self.len -= size.len;
4660 }
4661 }
4662 Ordering::Equal => {}
4663 Ordering::Greater => {
4664 if self.len == 0 {
4665 self = size;
4666 } else if self.kind == size.kind {
4667 self.len += size.len;
4668 }
4669 }
4670 }
4671 self
4672 }
4673
4674 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4675 match self.kind {
4676 IndentKind::Space => self.len as usize,
4677 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4678 }
4679 }
4680}
4681
4682#[cfg(any(test, feature = "test-support"))]
4683pub struct TestFile {
4684 pub path: Arc<Path>,
4685 pub root_name: String,
4686 pub local_root: Option<PathBuf>,
4687}
4688
4689#[cfg(any(test, feature = "test-support"))]
4690impl File for TestFile {
4691 fn path(&self) -> &Arc<Path> {
4692 &self.path
4693 }
4694
4695 fn full_path(&self, _: &gpui::App) -> PathBuf {
4696 PathBuf::from(&self.root_name).join(self.path.as_ref())
4697 }
4698
4699 fn as_local(&self) -> Option<&dyn LocalFile> {
4700 if self.local_root.is_some() {
4701 Some(self)
4702 } else {
4703 None
4704 }
4705 }
4706
4707 fn disk_state(&self) -> DiskState {
4708 unimplemented!()
4709 }
4710
4711 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4712 self.path().file_name().unwrap_or(self.root_name.as_ref())
4713 }
4714
4715 fn worktree_id(&self, _: &App) -> WorktreeId {
4716 WorktreeId::from_usize(0)
4717 }
4718
4719 fn to_proto(&self, _: &App) -> rpc::proto::File {
4720 unimplemented!()
4721 }
4722
4723 fn is_private(&self) -> bool {
4724 false
4725 }
4726}
4727
4728#[cfg(any(test, feature = "test-support"))]
4729impl LocalFile for TestFile {
4730 fn abs_path(&self, _cx: &App) -> PathBuf {
4731 PathBuf::from(self.local_root.as_ref().unwrap())
4732 .join(&self.root_name)
4733 .join(self.path.as_ref())
4734 }
4735
4736 fn load(&self, _cx: &App) -> Task<Result<String>> {
4737 unimplemented!()
4738 }
4739
4740 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4741 unimplemented!()
4742 }
4743}
4744
4745pub(crate) fn contiguous_ranges(
4746 values: impl Iterator<Item = u32>,
4747 max_len: usize,
4748) -> impl Iterator<Item = Range<u32>> {
4749 let mut values = values;
4750 let mut current_range: Option<Range<u32>> = None;
4751 std::iter::from_fn(move || {
4752 loop {
4753 if let Some(value) = values.next() {
4754 if let Some(range) = &mut current_range {
4755 if value == range.end && range.len() < max_len {
4756 range.end += 1;
4757 continue;
4758 }
4759 }
4760
4761 let prev_range = current_range.clone();
4762 current_range = Some(value..(value + 1));
4763 if prev_range.is_some() {
4764 return prev_range;
4765 }
4766 } else {
4767 return current_range.take();
4768 }
4769 }
4770 })
4771}
4772
4773#[derive(Default, Debug)]
4774pub struct CharClassifier {
4775 scope: Option<LanguageScope>,
4776 for_completion: bool,
4777 ignore_punctuation: bool,
4778}
4779
4780impl CharClassifier {
4781 pub fn new(scope: Option<LanguageScope>) -> Self {
4782 Self {
4783 scope,
4784 for_completion: false,
4785 ignore_punctuation: false,
4786 }
4787 }
4788
4789 pub fn for_completion(self, for_completion: bool) -> Self {
4790 Self {
4791 for_completion,
4792 ..self
4793 }
4794 }
4795
4796 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4797 Self {
4798 ignore_punctuation,
4799 ..self
4800 }
4801 }
4802
4803 pub fn is_whitespace(&self, c: char) -> bool {
4804 self.kind(c) == CharKind::Whitespace
4805 }
4806
4807 pub fn is_word(&self, c: char) -> bool {
4808 self.kind(c) == CharKind::Word
4809 }
4810
4811 pub fn is_punctuation(&self, c: char) -> bool {
4812 self.kind(c) == CharKind::Punctuation
4813 }
4814
4815 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4816 if c.is_alphanumeric() || c == '_' {
4817 return CharKind::Word;
4818 }
4819
4820 if let Some(scope) = &self.scope {
4821 let characters = if self.for_completion {
4822 scope.completion_query_characters()
4823 } else {
4824 scope.word_characters()
4825 };
4826 if let Some(characters) = characters {
4827 if characters.contains(&c) {
4828 return CharKind::Word;
4829 }
4830 }
4831 }
4832
4833 if c.is_whitespace() {
4834 return CharKind::Whitespace;
4835 }
4836
4837 if ignore_punctuation {
4838 CharKind::Word
4839 } else {
4840 CharKind::Punctuation
4841 }
4842 }
4843
4844 pub fn kind(&self, c: char) -> CharKind {
4845 self.kind_with(c, self.ignore_punctuation)
4846 }
4847}
4848
4849/// Find all of the ranges of whitespace that occur at the ends of lines
4850/// in the given rope.
4851///
4852/// This could also be done with a regex search, but this implementation
4853/// avoids copying text.
4854pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4855 let mut ranges = Vec::new();
4856
4857 let mut offset = 0;
4858 let mut prev_chunk_trailing_whitespace_range = 0..0;
4859 for chunk in rope.chunks() {
4860 let mut prev_line_trailing_whitespace_range = 0..0;
4861 for (i, line) in chunk.split('\n').enumerate() {
4862 let line_end_offset = offset + line.len();
4863 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4864 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4865
4866 if i == 0 && trimmed_line_len == 0 {
4867 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4868 }
4869 if !prev_line_trailing_whitespace_range.is_empty() {
4870 ranges.push(prev_line_trailing_whitespace_range);
4871 }
4872
4873 offset = line_end_offset + 1;
4874 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4875 }
4876
4877 offset -= 1;
4878 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4879 }
4880
4881 if !prev_chunk_trailing_whitespace_range.is_empty() {
4882 ranges.push(prev_chunk_trailing_whitespace_range);
4883 }
4884
4885 ranges
4886}