1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 pub code_description: Option<lsp::Url>,
205 /// Whether this diagnostic is a hint, warning, or error.
206 pub severity: DiagnosticSeverity,
207 /// The human-readable message associated with this diagnostic.
208 pub message: String,
209 /// The human-readable message (in markdown format)
210 pub markdown: Option<String>,
211 /// An id that identifies the group to which this diagnostic belongs.
212 ///
213 /// When a language server produces a diagnostic with
214 /// one or more associated diagnostics, those diagnostics are all
215 /// assigned a single group ID.
216 pub group_id: usize,
217 /// Whether this diagnostic is the primary diagnostic for its group.
218 ///
219 /// In a given group, the primary diagnostic is the top-level diagnostic
220 /// returned by the language server. The non-primary diagnostics are the
221 /// associated diagnostics.
222 pub is_primary: bool,
223 /// Whether this diagnostic is considered to originate from an analysis of
224 /// files on disk, as opposed to any unsaved buffer contents. This is a
225 /// property of a given diagnostic source, and is configured for a given
226 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
227 /// for the language server.
228 pub is_disk_based: bool,
229 /// Whether this diagnostic marks unnecessary code.
230 pub is_unnecessary: bool,
231 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
232 pub data: Option<Value>,
233 /// Whether to underline the corresponding text range in the editor.
234 pub underline: bool,
235}
236
237/// An operation used to synchronize this buffer with its other replicas.
238#[derive(Clone, Debug, PartialEq)]
239pub enum Operation {
240 /// A text operation.
241 Buffer(text::Operation),
242
243 /// An update to the buffer's diagnostics.
244 UpdateDiagnostics {
245 /// The id of the language server that produced the new diagnostics.
246 server_id: LanguageServerId,
247 /// The diagnostics.
248 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
249 /// The buffer's lamport timestamp.
250 lamport_timestamp: clock::Lamport,
251 },
252
253 /// An update to the most recent selections in this buffer.
254 UpdateSelections {
255 /// The selections.
256 selections: Arc<[Selection<Anchor>]>,
257 /// The buffer's lamport timestamp.
258 lamport_timestamp: clock::Lamport,
259 /// Whether the selections are in 'line mode'.
260 line_mode: bool,
261 /// The [`CursorShape`] associated with these selections.
262 cursor_shape: CursorShape,
263 },
264
265 /// An update to the characters that should trigger autocompletion
266 /// for this buffer.
267 UpdateCompletionTriggers {
268 /// The characters that trigger autocompletion.
269 triggers: Vec<String>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 /// The language server ID.
273 server_id: LanguageServerId,
274 },
275}
276
277/// An event that occurs in a buffer.
278#[derive(Clone, Debug, PartialEq)]
279pub enum BufferEvent {
280 /// The buffer was changed in a way that must be
281 /// propagated to its other replicas.
282 Operation {
283 operation: Operation,
284 is_local: bool,
285 },
286 /// The buffer was edited.
287 Edited,
288 /// The buffer's `dirty` bit changed.
289 DirtyChanged,
290 /// The buffer was saved.
291 Saved,
292 /// The buffer's file was changed on disk.
293 FileHandleChanged,
294 /// The buffer was reloaded.
295 Reloaded,
296 /// The buffer is in need of a reload
297 ReloadNeeded,
298 /// The buffer's language was changed.
299 LanguageChanged,
300 /// The buffer's syntax trees were updated.
301 Reparsed,
302 /// The buffer's diagnostics were updated.
303 DiagnosticsUpdated,
304 /// The buffer gained or lost editing capabilities.
305 CapabilityChanged,
306 /// The buffer was explicitly requested to close.
307 Closed,
308 /// The buffer was discarded when closing.
309 Discarded,
310}
311
312/// The file associated with a buffer.
313pub trait File: Send + Sync + Any {
314 /// Returns the [`LocalFile`] associated with this file, if the
315 /// file is local.
316 fn as_local(&self) -> Option<&dyn LocalFile>;
317
318 /// Returns whether this file is local.
319 fn is_local(&self) -> bool {
320 self.as_local().is_some()
321 }
322
323 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
324 /// only available in some states, such as modification time.
325 fn disk_state(&self) -> DiskState;
326
327 /// Returns the path of this file relative to the worktree's root directory.
328 fn path(&self) -> &Arc<Path>;
329
330 /// Returns the path of this file relative to the worktree's parent directory (this means it
331 /// includes the name of the worktree's root folder).
332 fn full_path(&self, cx: &App) -> PathBuf;
333
334 /// Returns the last component of this handle's absolute path. If this handle refers to the root
335 /// of its worktree, then this method will return the name of the worktree itself.
336 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
337
338 /// Returns the id of the worktree to which this file belongs.
339 ///
340 /// This is needed for looking up project-specific settings.
341 fn worktree_id(&self, cx: &App) -> WorktreeId;
342
343 /// Converts this file into a protobuf message.
344 fn to_proto(&self, cx: &App) -> rpc::proto::File;
345
346 /// Return whether Zed considers this to be a private file.
347 fn is_private(&self) -> bool;
348}
349
350/// The file's storage status - whether it's stored (`Present`), and if so when it was last
351/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
352/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
353/// indicator for new files.
354#[derive(Copy, Clone, Debug, PartialEq)]
355pub enum DiskState {
356 /// File created in Zed that has not been saved.
357 New,
358 /// File present on the filesystem.
359 Present { mtime: MTime },
360 /// Deleted file that was previously present.
361 Deleted,
362}
363
364impl DiskState {
365 /// Returns the file's last known modification time on disk.
366 pub fn mtime(self) -> Option<MTime> {
367 match self {
368 DiskState::New => None,
369 DiskState::Present { mtime } => Some(mtime),
370 DiskState::Deleted => None,
371 }
372 }
373
374 pub fn exists(&self) -> bool {
375 match self {
376 DiskState::New => false,
377 DiskState::Present { .. } => true,
378 DiskState::Deleted => false,
379 }
380 }
381}
382
383/// The file associated with a buffer, in the case where the file is on the local disk.
384pub trait LocalFile: File {
385 /// Returns the absolute path of this file
386 fn abs_path(&self, cx: &App) -> PathBuf;
387
388 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
389 fn load(&self, cx: &App) -> Task<Result<String>>;
390
391 /// Loads the file's contents from disk.
392 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
393}
394
395/// The auto-indent behavior associated with an editing operation.
396/// For some editing operations, each affected line of text has its
397/// indentation recomputed. For other operations, the entire block
398/// of edited text is adjusted uniformly.
399#[derive(Clone, Debug)]
400pub enum AutoindentMode {
401 /// Indent each line of inserted text.
402 EachLine,
403 /// Apply the same indentation adjustment to all of the lines
404 /// in a given insertion.
405 Block {
406 /// The original indentation column of the first line of each
407 /// insertion, if it has been copied.
408 ///
409 /// Knowing this makes it possible to preserve the relative indentation
410 /// of every line in the insertion from when it was copied.
411 ///
412 /// If the original indent column is `a`, and the first line of insertion
413 /// is then auto-indented to column `b`, then every other line of
414 /// the insertion will be auto-indented to column `b - a`
415 original_indent_columns: Vec<Option<u32>>,
416 },
417}
418
419#[derive(Clone)]
420struct AutoindentRequest {
421 before_edit: BufferSnapshot,
422 entries: Vec<AutoindentRequestEntry>,
423 is_block_mode: bool,
424 ignore_empty_lines: bool,
425}
426
427#[derive(Debug, Clone)]
428struct AutoindentRequestEntry {
429 /// A range of the buffer whose indentation should be adjusted.
430 range: Range<Anchor>,
431 /// Whether or not these lines should be considered brand new, for the
432 /// purpose of auto-indent. When text is not new, its indentation will
433 /// only be adjusted if the suggested indentation level has *changed*
434 /// since the edit was made.
435 first_line_is_new: bool,
436 indent_size: IndentSize,
437 original_indent_column: Option<u32>,
438}
439
440#[derive(Debug)]
441struct IndentSuggestion {
442 basis_row: u32,
443 delta: Ordering,
444 within_error: bool,
445}
446
447struct BufferChunkHighlights<'a> {
448 captures: SyntaxMapCaptures<'a>,
449 next_capture: Option<SyntaxMapCapture<'a>>,
450 stack: Vec<(usize, HighlightId)>,
451 highlight_maps: Vec<HighlightMap>,
452}
453
454/// An iterator that yields chunks of a buffer's text, along with their
455/// syntax highlights and diagnostic status.
456pub struct BufferChunks<'a> {
457 buffer_snapshot: Option<&'a BufferSnapshot>,
458 range: Range<usize>,
459 chunks: text::Chunks<'a>,
460 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
461 error_depth: usize,
462 warning_depth: usize,
463 information_depth: usize,
464 hint_depth: usize,
465 unnecessary_depth: usize,
466 underline: bool,
467 highlights: Option<BufferChunkHighlights<'a>>,
468}
469
470/// A chunk of a buffer's text, along with its syntax highlight and
471/// diagnostic status.
472#[derive(Clone, Debug, Default)]
473pub struct Chunk<'a> {
474 /// The text of the chunk.
475 pub text: &'a str,
476 /// The syntax highlighting style of the chunk.
477 pub syntax_highlight_id: Option<HighlightId>,
478 /// The highlight style that has been applied to this chunk in
479 /// the editor.
480 pub highlight_style: Option<HighlightStyle>,
481 /// The severity of diagnostic associated with this chunk, if any.
482 pub diagnostic_severity: Option<DiagnosticSeverity>,
483 /// Whether this chunk of text is marked as unnecessary.
484 pub is_unnecessary: bool,
485 /// Whether this chunk of text was originally a tab character.
486 pub is_tab: bool,
487 /// Whether to underline the corresponding text range in the editor.
488 pub underline: bool,
489}
490
491/// A set of edits to a given version of a buffer, computed asynchronously.
492#[derive(Debug)]
493pub struct Diff {
494 pub base_version: clock::Global,
495 pub line_ending: LineEnding,
496 pub edits: Vec<(Range<usize>, Arc<str>)>,
497}
498
499#[derive(Debug, Clone, Copy)]
500pub(crate) struct DiagnosticEndpoint {
501 offset: usize,
502 is_start: bool,
503 underline: bool,
504 severity: DiagnosticSeverity,
505 is_unnecessary: bool,
506}
507
508/// A class of characters, used for characterizing a run of text.
509#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
510pub enum CharKind {
511 /// Whitespace.
512 Whitespace,
513 /// Punctuation.
514 Punctuation,
515 /// Word.
516 Word,
517}
518
519/// A runnable is a set of data about a region that could be resolved into a task
520pub struct Runnable {
521 pub tags: SmallVec<[RunnableTag; 1]>,
522 pub language: Arc<Language>,
523 pub buffer: BufferId,
524}
525
526#[derive(Default, Clone, Debug)]
527pub struct HighlightedText {
528 pub text: SharedString,
529 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
530}
531
532#[derive(Default, Debug)]
533struct HighlightedTextBuilder {
534 pub text: String,
535 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
536}
537
538impl HighlightedText {
539 pub fn from_buffer_range<T: ToOffset>(
540 range: Range<T>,
541 snapshot: &text::BufferSnapshot,
542 syntax_snapshot: &SyntaxSnapshot,
543 override_style: Option<HighlightStyle>,
544 syntax_theme: &SyntaxTheme,
545 ) -> Self {
546 let mut highlighted_text = HighlightedTextBuilder::default();
547 highlighted_text.add_text_from_buffer_range(
548 range,
549 snapshot,
550 syntax_snapshot,
551 override_style,
552 syntax_theme,
553 );
554 highlighted_text.build()
555 }
556
557 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
558 gpui::StyledText::new(self.text.clone())
559 .with_default_highlights(default_style, self.highlights.iter().cloned())
560 }
561
562 /// Returns the first line without leading whitespace unless highlighted
563 /// and a boolean indicating if there are more lines after
564 pub fn first_line_preview(self) -> (Self, bool) {
565 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
566 let first_line = &self.text[..newline_ix];
567
568 // Trim leading whitespace, unless an edit starts prior to it.
569 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
570 if let Some((first_highlight_range, _)) = self.highlights.first() {
571 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
572 }
573
574 let preview_text = &first_line[preview_start_ix..];
575 let preview_highlights = self
576 .highlights
577 .into_iter()
578 .take_while(|(range, _)| range.start < newline_ix)
579 .filter_map(|(mut range, highlight)| {
580 range.start = range.start.saturating_sub(preview_start_ix);
581 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
582 if range.is_empty() {
583 None
584 } else {
585 Some((range, highlight))
586 }
587 });
588
589 let preview = Self {
590 text: SharedString::new(preview_text),
591 highlights: preview_highlights.collect(),
592 };
593
594 (preview, self.text.len() > newline_ix)
595 }
596}
597
598impl HighlightedTextBuilder {
599 pub fn build(self) -> HighlightedText {
600 HighlightedText {
601 text: self.text.into(),
602 highlights: self.highlights,
603 }
604 }
605
606 pub fn add_text_from_buffer_range<T: ToOffset>(
607 &mut self,
608 range: Range<T>,
609 snapshot: &text::BufferSnapshot,
610 syntax_snapshot: &SyntaxSnapshot,
611 override_style: Option<HighlightStyle>,
612 syntax_theme: &SyntaxTheme,
613 ) {
614 let range = range.to_offset(snapshot);
615 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
616 let start = self.text.len();
617 self.text.push_str(chunk.text);
618 let end = self.text.len();
619
620 if let Some(mut highlight_style) = chunk
621 .syntax_highlight_id
622 .and_then(|id| id.style(syntax_theme))
623 {
624 if let Some(override_style) = override_style {
625 highlight_style.highlight(override_style);
626 }
627 self.highlights.push((start..end, highlight_style));
628 } else if let Some(override_style) = override_style {
629 self.highlights.push((start..end, override_style));
630 }
631 }
632 }
633
634 fn highlighted_chunks<'a>(
635 range: Range<usize>,
636 snapshot: &'a text::BufferSnapshot,
637 syntax_snapshot: &'a SyntaxSnapshot,
638 ) -> BufferChunks<'a> {
639 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
640 grammar.highlights_query.as_ref()
641 });
642
643 let highlight_maps = captures
644 .grammars()
645 .iter()
646 .map(|grammar| grammar.highlight_map())
647 .collect();
648
649 BufferChunks::new(
650 snapshot.as_rope(),
651 range,
652 Some((captures, highlight_maps)),
653 false,
654 None,
655 )
656 }
657}
658
659#[derive(Clone)]
660pub struct EditPreview {
661 old_snapshot: text::BufferSnapshot,
662 applied_edits_snapshot: text::BufferSnapshot,
663 syntax_snapshot: SyntaxSnapshot,
664}
665
666impl EditPreview {
667 pub fn highlight_edits(
668 &self,
669 current_snapshot: &BufferSnapshot,
670 edits: &[(Range<Anchor>, String)],
671 include_deletions: bool,
672 cx: &App,
673 ) -> HighlightedText {
674 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
675 return HighlightedText::default();
676 };
677
678 let mut highlighted_text = HighlightedTextBuilder::default();
679
680 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
681
682 let insertion_highlight_style = HighlightStyle {
683 background_color: Some(cx.theme().status().created_background),
684 ..Default::default()
685 };
686 let deletion_highlight_style = HighlightStyle {
687 background_color: Some(cx.theme().status().deleted_background),
688 ..Default::default()
689 };
690 let syntax_theme = cx.theme().syntax();
691
692 for (range, edit_text) in edits {
693 let edit_new_end_in_preview_snapshot = range
694 .end
695 .bias_right(&self.old_snapshot)
696 .to_offset(&self.applied_edits_snapshot);
697 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
698
699 let unchanged_range_in_preview_snapshot =
700 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
701 if !unchanged_range_in_preview_snapshot.is_empty() {
702 highlighted_text.add_text_from_buffer_range(
703 unchanged_range_in_preview_snapshot,
704 &self.applied_edits_snapshot,
705 &self.syntax_snapshot,
706 None,
707 &syntax_theme,
708 );
709 }
710
711 let range_in_current_snapshot = range.to_offset(current_snapshot);
712 if include_deletions && !range_in_current_snapshot.is_empty() {
713 highlighted_text.add_text_from_buffer_range(
714 range_in_current_snapshot,
715 ¤t_snapshot.text,
716 ¤t_snapshot.syntax,
717 Some(deletion_highlight_style),
718 &syntax_theme,
719 );
720 }
721
722 if !edit_text.is_empty() {
723 highlighted_text.add_text_from_buffer_range(
724 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
725 &self.applied_edits_snapshot,
726 &self.syntax_snapshot,
727 Some(insertion_highlight_style),
728 &syntax_theme,
729 );
730 }
731
732 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
733 }
734
735 highlighted_text.add_text_from_buffer_range(
736 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 None,
740 &syntax_theme,
741 );
742
743 highlighted_text.build()
744 }
745
746 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
747 let (first, _) = edits.first()?;
748 let (last, _) = edits.last()?;
749
750 let start = first
751 .start
752 .bias_left(&self.old_snapshot)
753 .to_point(&self.applied_edits_snapshot);
754 let end = last
755 .end
756 .bias_right(&self.old_snapshot)
757 .to_point(&self.applied_edits_snapshot);
758
759 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
760 let range = Point::new(start.row, 0)
761 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
762
763 Some(range.to_offset(&self.applied_edits_snapshot))
764 }
765}
766
767#[derive(Clone, Debug, PartialEq, Eq)]
768pub struct BracketMatch {
769 pub open_range: Range<usize>,
770 pub close_range: Range<usize>,
771 pub newline_only: bool,
772}
773
774impl Buffer {
775 /// Create a new buffer with the given base text.
776 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
777 Self::build(
778 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
779 None,
780 Capability::ReadWrite,
781 )
782 }
783
784 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
785 pub fn local_normalized(
786 base_text_normalized: Rope,
787 line_ending: LineEnding,
788 cx: &Context<Self>,
789 ) -> Self {
790 Self::build(
791 TextBuffer::new_normalized(
792 0,
793 cx.entity_id().as_non_zero_u64().into(),
794 line_ending,
795 base_text_normalized,
796 ),
797 None,
798 Capability::ReadWrite,
799 )
800 }
801
802 /// Create a new buffer that is a replica of a remote buffer.
803 pub fn remote(
804 remote_id: BufferId,
805 replica_id: ReplicaId,
806 capability: Capability,
807 base_text: impl Into<String>,
808 ) -> Self {
809 Self::build(
810 TextBuffer::new(replica_id, remote_id, base_text.into()),
811 None,
812 capability,
813 )
814 }
815
816 /// Create a new buffer that is a replica of a remote buffer, populating its
817 /// state from the given protobuf message.
818 pub fn from_proto(
819 replica_id: ReplicaId,
820 capability: Capability,
821 message: proto::BufferState,
822 file: Option<Arc<dyn File>>,
823 ) -> Result<Self> {
824 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
825 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
826 let mut this = Self::build(buffer, file, capability);
827 this.text.set_line_ending(proto::deserialize_line_ending(
828 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
829 ));
830 this.saved_version = proto::deserialize_version(&message.saved_version);
831 this.saved_mtime = message.saved_mtime.map(|time| time.into());
832 Ok(this)
833 }
834
835 /// Serialize the buffer's state to a protobuf message.
836 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
837 proto::BufferState {
838 id: self.remote_id().into(),
839 file: self.file.as_ref().map(|f| f.to_proto(cx)),
840 base_text: self.base_text().to_string(),
841 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
842 saved_version: proto::serialize_version(&self.saved_version),
843 saved_mtime: self.saved_mtime.map(|time| time.into()),
844 }
845 }
846
847 /// Serialize as protobufs all of the changes to the buffer since the given version.
848 pub fn serialize_ops(
849 &self,
850 since: Option<clock::Global>,
851 cx: &App,
852 ) -> Task<Vec<proto::Operation>> {
853 let mut operations = Vec::new();
854 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
855
856 operations.extend(self.remote_selections.iter().map(|(_, set)| {
857 proto::serialize_operation(&Operation::UpdateSelections {
858 selections: set.selections.clone(),
859 lamport_timestamp: set.lamport_timestamp,
860 line_mode: set.line_mode,
861 cursor_shape: set.cursor_shape,
862 })
863 }));
864
865 for (server_id, diagnostics) in &self.diagnostics {
866 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
867 lamport_timestamp: self.diagnostics_timestamp,
868 server_id: *server_id,
869 diagnostics: diagnostics.iter().cloned().collect(),
870 }));
871 }
872
873 for (server_id, completions) in &self.completion_triggers_per_language_server {
874 operations.push(proto::serialize_operation(
875 &Operation::UpdateCompletionTriggers {
876 triggers: completions.iter().cloned().collect(),
877 lamport_timestamp: self.completion_triggers_timestamp,
878 server_id: *server_id,
879 },
880 ));
881 }
882
883 let text_operations = self.text.operations().clone();
884 cx.background_spawn(async move {
885 let since = since.unwrap_or_default();
886 operations.extend(
887 text_operations
888 .iter()
889 .filter(|(_, op)| !since.observed(op.timestamp()))
890 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
891 );
892 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
893 operations
894 })
895 }
896
897 /// Assign a language to the buffer, returning the buffer.
898 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
899 self.set_language(Some(language), cx);
900 self
901 }
902
903 /// Returns the [`Capability`] of this buffer.
904 pub fn capability(&self) -> Capability {
905 self.capability
906 }
907
908 /// Whether this buffer can only be read.
909 pub fn read_only(&self) -> bool {
910 self.capability == Capability::ReadOnly
911 }
912
913 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
914 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
915 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
916 let snapshot = buffer.snapshot();
917 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
918 Self {
919 saved_mtime,
920 saved_version: buffer.version(),
921 preview_version: buffer.version(),
922 reload_task: None,
923 transaction_depth: 0,
924 was_dirty_before_starting_transaction: None,
925 has_unsaved_edits: Cell::new((buffer.version(), false)),
926 text: buffer,
927 branch_state: None,
928 file,
929 capability,
930 syntax_map,
931 reparse: None,
932 non_text_state_update_count: 0,
933 sync_parse_timeout: Duration::from_millis(1),
934 parse_status: watch::channel(ParseStatus::Idle),
935 autoindent_requests: Default::default(),
936 pending_autoindent: Default::default(),
937 language: None,
938 remote_selections: Default::default(),
939 diagnostics: Default::default(),
940 diagnostics_timestamp: Default::default(),
941 completion_triggers: Default::default(),
942 completion_triggers_per_language_server: Default::default(),
943 completion_triggers_timestamp: Default::default(),
944 deferred_ops: OperationQueue::new(),
945 has_conflict: false,
946 change_bits: Default::default(),
947 _subscriptions: Vec::new(),
948 }
949 }
950
951 pub fn build_snapshot(
952 text: Rope,
953 language: Option<Arc<Language>>,
954 language_registry: Option<Arc<LanguageRegistry>>,
955 cx: &mut App,
956 ) -> impl Future<Output = BufferSnapshot> + use<> {
957 let entity_id = cx.reserve_entity::<Self>().entity_id();
958 let buffer_id = entity_id.as_non_zero_u64().into();
959 async move {
960 let text =
961 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
962 let mut syntax = SyntaxMap::new(&text).snapshot();
963 if let Some(language) = language.clone() {
964 let text = text.clone();
965 let language = language.clone();
966 let language_registry = language_registry.clone();
967 syntax.reparse(&text, language_registry, language);
968 }
969 BufferSnapshot {
970 text,
971 syntax,
972 file: None,
973 diagnostics: Default::default(),
974 remote_selections: Default::default(),
975 language,
976 non_text_state_update_count: 0,
977 }
978 }
979 }
980
981 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
982 let entity_id = cx.reserve_entity::<Self>().entity_id();
983 let buffer_id = entity_id.as_non_zero_u64().into();
984 let text =
985 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
986 let syntax = SyntaxMap::new(&text).snapshot();
987 BufferSnapshot {
988 text,
989 syntax,
990 file: None,
991 diagnostics: Default::default(),
992 remote_selections: Default::default(),
993 language: None,
994 non_text_state_update_count: 0,
995 }
996 }
997
998 #[cfg(any(test, feature = "test-support"))]
999 pub fn build_snapshot_sync(
1000 text: Rope,
1001 language: Option<Arc<Language>>,
1002 language_registry: Option<Arc<LanguageRegistry>>,
1003 cx: &mut App,
1004 ) -> BufferSnapshot {
1005 let entity_id = cx.reserve_entity::<Self>().entity_id();
1006 let buffer_id = entity_id.as_non_zero_u64().into();
1007 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1008 let mut syntax = SyntaxMap::new(&text).snapshot();
1009 if let Some(language) = language.clone() {
1010 let text = text.clone();
1011 let language = language.clone();
1012 let language_registry = language_registry.clone();
1013 syntax.reparse(&text, language_registry, language);
1014 }
1015 BufferSnapshot {
1016 text,
1017 syntax,
1018 file: None,
1019 diagnostics: Default::default(),
1020 remote_selections: Default::default(),
1021 language,
1022 non_text_state_update_count: 0,
1023 }
1024 }
1025
1026 /// Retrieve a snapshot of the buffer's current state. This is computationally
1027 /// cheap, and allows reading from the buffer on a background thread.
1028 pub fn snapshot(&self) -> BufferSnapshot {
1029 let text = self.text.snapshot();
1030 let mut syntax_map = self.syntax_map.lock();
1031 syntax_map.interpolate(&text);
1032 let syntax = syntax_map.snapshot();
1033
1034 BufferSnapshot {
1035 text,
1036 syntax,
1037 file: self.file.clone(),
1038 remote_selections: self.remote_selections.clone(),
1039 diagnostics: self.diagnostics.clone(),
1040 language: self.language.clone(),
1041 non_text_state_update_count: self.non_text_state_update_count,
1042 }
1043 }
1044
1045 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1046 let this = cx.entity();
1047 cx.new(|cx| {
1048 let mut branch = Self {
1049 branch_state: Some(BufferBranchState {
1050 base_buffer: this.clone(),
1051 merged_operations: Default::default(),
1052 }),
1053 language: self.language.clone(),
1054 has_conflict: self.has_conflict,
1055 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1056 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1057 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1058 };
1059 if let Some(language_registry) = self.language_registry() {
1060 branch.set_language_registry(language_registry);
1061 }
1062
1063 // Reparse the branch buffer so that we get syntax highlighting immediately.
1064 branch.reparse(cx);
1065
1066 branch
1067 })
1068 }
1069
1070 pub fn preview_edits(
1071 &self,
1072 edits: Arc<[(Range<Anchor>, String)]>,
1073 cx: &App,
1074 ) -> Task<EditPreview> {
1075 let registry = self.language_registry();
1076 let language = self.language().cloned();
1077 let old_snapshot = self.text.snapshot();
1078 let mut branch_buffer = self.text.branch();
1079 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1080 cx.background_spawn(async move {
1081 if !edits.is_empty() {
1082 if let Some(language) = language.clone() {
1083 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1084 }
1085
1086 branch_buffer.edit(edits.iter().cloned());
1087 let snapshot = branch_buffer.snapshot();
1088 syntax_snapshot.interpolate(&snapshot);
1089
1090 if let Some(language) = language {
1091 syntax_snapshot.reparse(&snapshot, registry, language);
1092 }
1093 }
1094 EditPreview {
1095 old_snapshot,
1096 applied_edits_snapshot: branch_buffer.snapshot(),
1097 syntax_snapshot,
1098 }
1099 })
1100 }
1101
1102 /// Applies all of the changes in this buffer that intersect any of the
1103 /// given `ranges` to its base buffer.
1104 ///
1105 /// If `ranges` is empty, then all changes will be applied. This buffer must
1106 /// be a branch buffer to call this method.
1107 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1108 let Some(base_buffer) = self.base_buffer() else {
1109 debug_panic!("not a branch buffer");
1110 return;
1111 };
1112
1113 let mut ranges = if ranges.is_empty() {
1114 &[0..usize::MAX]
1115 } else {
1116 ranges.as_slice()
1117 }
1118 .into_iter()
1119 .peekable();
1120
1121 let mut edits = Vec::new();
1122 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1123 let mut is_included = false;
1124 while let Some(range) = ranges.peek() {
1125 if range.end < edit.new.start {
1126 ranges.next().unwrap();
1127 } else {
1128 if range.start <= edit.new.end {
1129 is_included = true;
1130 }
1131 break;
1132 }
1133 }
1134
1135 if is_included {
1136 edits.push((
1137 edit.old.clone(),
1138 self.text_for_range(edit.new.clone()).collect::<String>(),
1139 ));
1140 }
1141 }
1142
1143 let operation = base_buffer.update(cx, |base_buffer, cx| {
1144 // cx.emit(BufferEvent::DiffBaseChanged);
1145 base_buffer.edit(edits, None, cx)
1146 });
1147
1148 if let Some(operation) = operation {
1149 if let Some(BufferBranchState {
1150 merged_operations, ..
1151 }) = &mut self.branch_state
1152 {
1153 merged_operations.push(operation);
1154 }
1155 }
1156 }
1157
1158 fn on_base_buffer_event(
1159 &mut self,
1160 _: Entity<Buffer>,
1161 event: &BufferEvent,
1162 cx: &mut Context<Self>,
1163 ) {
1164 let BufferEvent::Operation { operation, .. } = event else {
1165 return;
1166 };
1167 let Some(BufferBranchState {
1168 merged_operations, ..
1169 }) = &mut self.branch_state
1170 else {
1171 return;
1172 };
1173
1174 let mut operation_to_undo = None;
1175 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1176 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1177 merged_operations.remove(ix);
1178 operation_to_undo = Some(operation.timestamp);
1179 }
1180 }
1181
1182 self.apply_ops([operation.clone()], cx);
1183
1184 if let Some(timestamp) = operation_to_undo {
1185 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1186 self.undo_operations(counts, cx);
1187 }
1188 }
1189
1190 #[cfg(test)]
1191 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1192 &self.text
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's raw text, without any
1196 /// language-related state like the syntax tree or diagnostics.
1197 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1198 self.text.snapshot()
1199 }
1200
1201 /// The file associated with the buffer, if any.
1202 pub fn file(&self) -> Option<&Arc<dyn File>> {
1203 self.file.as_ref()
1204 }
1205
1206 /// The version of the buffer that was last saved or reloaded from disk.
1207 pub fn saved_version(&self) -> &clock::Global {
1208 &self.saved_version
1209 }
1210
1211 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1212 pub fn saved_mtime(&self) -> Option<MTime> {
1213 self.saved_mtime
1214 }
1215
1216 /// Assign a language to the buffer.
1217 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1218 self.non_text_state_update_count += 1;
1219 self.syntax_map.lock().clear(&self.text);
1220 self.language = language;
1221 self.was_changed();
1222 self.reparse(cx);
1223 cx.emit(BufferEvent::LanguageChanged);
1224 }
1225
1226 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1227 /// other languages if parts of the buffer are written in different languages.
1228 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1229 self.syntax_map
1230 .lock()
1231 .set_language_registry(language_registry);
1232 }
1233
1234 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1235 self.syntax_map.lock().language_registry()
1236 }
1237
1238 /// Assign the buffer a new [`Capability`].
1239 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1240 self.capability = capability;
1241 cx.emit(BufferEvent::CapabilityChanged)
1242 }
1243
1244 /// This method is called to signal that the buffer has been saved.
1245 pub fn did_save(
1246 &mut self,
1247 version: clock::Global,
1248 mtime: Option<MTime>,
1249 cx: &mut Context<Self>,
1250 ) {
1251 self.saved_version = version;
1252 self.has_unsaved_edits
1253 .set((self.saved_version().clone(), false));
1254 self.has_conflict = false;
1255 self.saved_mtime = mtime;
1256 self.was_changed();
1257 cx.emit(BufferEvent::Saved);
1258 cx.notify();
1259 }
1260
1261 /// This method is called to signal that the buffer has been discarded.
1262 pub fn discarded(&self, cx: &mut Context<Self>) {
1263 cx.emit(BufferEvent::Discarded);
1264 cx.notify();
1265 }
1266
1267 /// Reloads the contents of the buffer from disk.
1268 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1269 let (tx, rx) = futures::channel::oneshot::channel();
1270 let prev_version = self.text.version();
1271 self.reload_task = Some(cx.spawn(async move |this, cx| {
1272 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1273 let file = this.file.as_ref()?.as_local()?;
1274
1275 Some((file.disk_state().mtime(), file.load(cx)))
1276 })?
1277 else {
1278 return Ok(());
1279 };
1280
1281 let new_text = new_text.await?;
1282 let diff = this
1283 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1284 .await;
1285 this.update(cx, |this, cx| {
1286 if this.version() == diff.base_version {
1287 this.finalize_last_transaction();
1288 this.apply_diff(diff, cx);
1289 tx.send(this.finalize_last_transaction().cloned()).ok();
1290 this.has_conflict = false;
1291 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1292 } else {
1293 if !diff.edits.is_empty()
1294 || this
1295 .edits_since::<usize>(&diff.base_version)
1296 .next()
1297 .is_some()
1298 {
1299 this.has_conflict = true;
1300 }
1301
1302 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1303 }
1304
1305 this.reload_task.take();
1306 })
1307 }));
1308 rx
1309 }
1310
1311 /// This method is called to signal that the buffer has been reloaded.
1312 pub fn did_reload(
1313 &mut self,
1314 version: clock::Global,
1315 line_ending: LineEnding,
1316 mtime: Option<MTime>,
1317 cx: &mut Context<Self>,
1318 ) {
1319 self.saved_version = version;
1320 self.has_unsaved_edits
1321 .set((self.saved_version.clone(), false));
1322 self.text.set_line_ending(line_ending);
1323 self.saved_mtime = mtime;
1324 cx.emit(BufferEvent::Reloaded);
1325 cx.notify();
1326 }
1327
1328 /// Updates the [`File`] backing this buffer. This should be called when
1329 /// the file has changed or has been deleted.
1330 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1331 let was_dirty = self.is_dirty();
1332 let mut file_changed = false;
1333
1334 if let Some(old_file) = self.file.as_ref() {
1335 if new_file.path() != old_file.path() {
1336 file_changed = true;
1337 }
1338
1339 let old_state = old_file.disk_state();
1340 let new_state = new_file.disk_state();
1341 if old_state != new_state {
1342 file_changed = true;
1343 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1344 cx.emit(BufferEvent::ReloadNeeded)
1345 }
1346 }
1347 } else {
1348 file_changed = true;
1349 };
1350
1351 self.file = Some(new_file);
1352 if file_changed {
1353 self.was_changed();
1354 self.non_text_state_update_count += 1;
1355 if was_dirty != self.is_dirty() {
1356 cx.emit(BufferEvent::DirtyChanged);
1357 }
1358 cx.emit(BufferEvent::FileHandleChanged);
1359 cx.notify();
1360 }
1361 }
1362
1363 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1364 Some(self.branch_state.as_ref()?.base_buffer.clone())
1365 }
1366
1367 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1368 pub fn language(&self) -> Option<&Arc<Language>> {
1369 self.language.as_ref()
1370 }
1371
1372 /// Returns the [`Language`] at the given location.
1373 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1374 let offset = position.to_offset(self);
1375 self.syntax_map
1376 .lock()
1377 .layers_for_range(offset..offset, &self.text, false)
1378 .last()
1379 .map(|info| info.language.clone())
1380 .or_else(|| self.language.clone())
1381 }
1382
1383 /// Returns each [`Language`] for the active syntax layers at the given location.
1384 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1385 let offset = position.to_offset(self);
1386 let mut languages: Vec<Arc<Language>> = self
1387 .syntax_map
1388 .lock()
1389 .layers_for_range(offset..offset, &self.text, false)
1390 .map(|info| info.language.clone())
1391 .collect();
1392
1393 if languages.is_empty() {
1394 if let Some(buffer_language) = self.language() {
1395 languages.push(buffer_language.clone());
1396 }
1397 }
1398
1399 languages
1400 }
1401
1402 /// An integer version number that accounts for all updates besides
1403 /// the buffer's text itself (which is versioned via a version vector).
1404 pub fn non_text_state_update_count(&self) -> usize {
1405 self.non_text_state_update_count
1406 }
1407
1408 /// Whether the buffer is being parsed in the background.
1409 #[cfg(any(test, feature = "test-support"))]
1410 pub fn is_parsing(&self) -> bool {
1411 self.reparse.is_some()
1412 }
1413
1414 /// Indicates whether the buffer contains any regions that may be
1415 /// written in a language that hasn't been loaded yet.
1416 pub fn contains_unknown_injections(&self) -> bool {
1417 self.syntax_map.lock().contains_unknown_injections()
1418 }
1419
1420 #[cfg(test)]
1421 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1422 self.sync_parse_timeout = timeout;
1423 }
1424
1425 /// Called after an edit to synchronize the buffer's main parse tree with
1426 /// the buffer's new underlying state.
1427 ///
1428 /// Locks the syntax map and interpolates the edits since the last reparse
1429 /// into the foreground syntax tree.
1430 ///
1431 /// Then takes a stable snapshot of the syntax map before unlocking it.
1432 /// The snapshot with the interpolated edits is sent to a background thread,
1433 /// where we ask Tree-sitter to perform an incremental parse.
1434 ///
1435 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1436 /// waiting on the parse to complete. As soon as it completes, we proceed
1437 /// synchronously, unless a 1ms timeout elapses.
1438 ///
1439 /// If we time out waiting on the parse, we spawn a second task waiting
1440 /// until the parse does complete and return with the interpolated tree still
1441 /// in the foreground. When the background parse completes, call back into
1442 /// the main thread and assign the foreground parse state.
1443 ///
1444 /// If the buffer or grammar changed since the start of the background parse,
1445 /// initiate an additional reparse recursively. To avoid concurrent parses
1446 /// for the same buffer, we only initiate a new parse if we are not already
1447 /// parsing in the background.
1448 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1449 if self.reparse.is_some() {
1450 return;
1451 }
1452 let language = if let Some(language) = self.language.clone() {
1453 language
1454 } else {
1455 return;
1456 };
1457
1458 let text = self.text_snapshot();
1459 let parsed_version = self.version();
1460
1461 let mut syntax_map = self.syntax_map.lock();
1462 syntax_map.interpolate(&text);
1463 let language_registry = syntax_map.language_registry();
1464 let mut syntax_snapshot = syntax_map.snapshot();
1465 drop(syntax_map);
1466
1467 let parse_task = cx.background_spawn({
1468 let language = language.clone();
1469 let language_registry = language_registry.clone();
1470 async move {
1471 syntax_snapshot.reparse(&text, language_registry, language);
1472 syntax_snapshot
1473 }
1474 });
1475
1476 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1477 match cx
1478 .background_executor()
1479 .block_with_timeout(self.sync_parse_timeout, parse_task)
1480 {
1481 Ok(new_syntax_snapshot) => {
1482 self.did_finish_parsing(new_syntax_snapshot, cx);
1483 self.reparse = None;
1484 }
1485 Err(parse_task) => {
1486 self.reparse = Some(cx.spawn(async move |this, cx| {
1487 let new_syntax_map = parse_task.await;
1488 this.update(cx, move |this, cx| {
1489 let grammar_changed =
1490 this.language.as_ref().map_or(true, |current_language| {
1491 !Arc::ptr_eq(&language, current_language)
1492 });
1493 let language_registry_changed = new_syntax_map
1494 .contains_unknown_injections()
1495 && language_registry.map_or(false, |registry| {
1496 registry.version() != new_syntax_map.language_registry_version()
1497 });
1498 let parse_again = language_registry_changed
1499 || grammar_changed
1500 || this.version.changed_since(&parsed_version);
1501 this.did_finish_parsing(new_syntax_map, cx);
1502 this.reparse = None;
1503 if parse_again {
1504 this.reparse(cx);
1505 }
1506 })
1507 .ok();
1508 }));
1509 }
1510 }
1511 }
1512
1513 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1514 self.was_changed();
1515 self.non_text_state_update_count += 1;
1516 self.syntax_map.lock().did_parse(syntax_snapshot);
1517 self.request_autoindent(cx);
1518 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1519 cx.emit(BufferEvent::Reparsed);
1520 cx.notify();
1521 }
1522
1523 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1524 self.parse_status.1.clone()
1525 }
1526
1527 /// Assign to the buffer a set of diagnostics created by a given language server.
1528 pub fn update_diagnostics(
1529 &mut self,
1530 server_id: LanguageServerId,
1531 diagnostics: DiagnosticSet,
1532 cx: &mut Context<Self>,
1533 ) {
1534 let lamport_timestamp = self.text.lamport_clock.tick();
1535 let op = Operation::UpdateDiagnostics {
1536 server_id,
1537 diagnostics: diagnostics.iter().cloned().collect(),
1538 lamport_timestamp,
1539 };
1540 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1541 self.send_operation(op, true, cx);
1542 }
1543
1544 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1545 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1546 return None;
1547 };
1548 Some(&self.diagnostics[idx].1)
1549 }
1550
1551 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1552 if let Some(indent_sizes) = self.compute_autoindents() {
1553 let indent_sizes = cx.background_spawn(indent_sizes);
1554 match cx
1555 .background_executor()
1556 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1557 {
1558 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1559 Err(indent_sizes) => {
1560 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1561 let indent_sizes = indent_sizes.await;
1562 this.update(cx, |this, cx| {
1563 this.apply_autoindents(indent_sizes, cx);
1564 })
1565 .ok();
1566 }));
1567 }
1568 }
1569 } else {
1570 self.autoindent_requests.clear();
1571 }
1572 }
1573
1574 fn compute_autoindents(
1575 &self,
1576 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1577 let max_rows_between_yields = 100;
1578 let snapshot = self.snapshot();
1579 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1580 return None;
1581 }
1582
1583 let autoindent_requests = self.autoindent_requests.clone();
1584 Some(async move {
1585 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1586 for request in autoindent_requests {
1587 // Resolve each edited range to its row in the current buffer and in the
1588 // buffer before this batch of edits.
1589 let mut row_ranges = Vec::new();
1590 let mut old_to_new_rows = BTreeMap::new();
1591 let mut language_indent_sizes_by_new_row = Vec::new();
1592 for entry in &request.entries {
1593 let position = entry.range.start;
1594 let new_row = position.to_point(&snapshot).row;
1595 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1596 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1597
1598 if !entry.first_line_is_new {
1599 let old_row = position.to_point(&request.before_edit).row;
1600 old_to_new_rows.insert(old_row, new_row);
1601 }
1602 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1603 }
1604
1605 // Build a map containing the suggested indentation for each of the edited lines
1606 // with respect to the state of the buffer before these edits. This map is keyed
1607 // by the rows for these lines in the current state of the buffer.
1608 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1609 let old_edited_ranges =
1610 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1611 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1612 let mut language_indent_size = IndentSize::default();
1613 for old_edited_range in old_edited_ranges {
1614 let suggestions = request
1615 .before_edit
1616 .suggest_autoindents(old_edited_range.clone())
1617 .into_iter()
1618 .flatten();
1619 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1620 if let Some(suggestion) = suggestion {
1621 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1622
1623 // Find the indent size based on the language for this row.
1624 while let Some((row, size)) = language_indent_sizes.peek() {
1625 if *row > new_row {
1626 break;
1627 }
1628 language_indent_size = *size;
1629 language_indent_sizes.next();
1630 }
1631
1632 let suggested_indent = old_to_new_rows
1633 .get(&suggestion.basis_row)
1634 .and_then(|from_row| {
1635 Some(old_suggestions.get(from_row).copied()?.0)
1636 })
1637 .unwrap_or_else(|| {
1638 request
1639 .before_edit
1640 .indent_size_for_line(suggestion.basis_row)
1641 })
1642 .with_delta(suggestion.delta, language_indent_size);
1643 old_suggestions
1644 .insert(new_row, (suggested_indent, suggestion.within_error));
1645 }
1646 }
1647 yield_now().await;
1648 }
1649
1650 // Compute new suggestions for each line, but only include them in the result
1651 // if they differ from the old suggestion for that line.
1652 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1653 let mut language_indent_size = IndentSize::default();
1654 for (row_range, original_indent_column) in row_ranges {
1655 let new_edited_row_range = if request.is_block_mode {
1656 row_range.start..row_range.start + 1
1657 } else {
1658 row_range.clone()
1659 };
1660
1661 let suggestions = snapshot
1662 .suggest_autoindents(new_edited_row_range.clone())
1663 .into_iter()
1664 .flatten();
1665 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1666 if let Some(suggestion) = suggestion {
1667 // Find the indent size based on the language for this row.
1668 while let Some((row, size)) = language_indent_sizes.peek() {
1669 if *row > new_row {
1670 break;
1671 }
1672 language_indent_size = *size;
1673 language_indent_sizes.next();
1674 }
1675
1676 let suggested_indent = indent_sizes
1677 .get(&suggestion.basis_row)
1678 .copied()
1679 .map(|e| e.0)
1680 .unwrap_or_else(|| {
1681 snapshot.indent_size_for_line(suggestion.basis_row)
1682 })
1683 .with_delta(suggestion.delta, language_indent_size);
1684
1685 if old_suggestions.get(&new_row).map_or(
1686 true,
1687 |(old_indentation, was_within_error)| {
1688 suggested_indent != *old_indentation
1689 && (!suggestion.within_error || *was_within_error)
1690 },
1691 ) {
1692 indent_sizes.insert(
1693 new_row,
1694 (suggested_indent, request.ignore_empty_lines),
1695 );
1696 }
1697 }
1698 }
1699
1700 if let (true, Some(original_indent_column)) =
1701 (request.is_block_mode, original_indent_column)
1702 {
1703 let new_indent =
1704 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1705 *indent
1706 } else {
1707 snapshot.indent_size_for_line(row_range.start)
1708 };
1709 let delta = new_indent.len as i64 - original_indent_column as i64;
1710 if delta != 0 {
1711 for row in row_range.skip(1) {
1712 indent_sizes.entry(row).or_insert_with(|| {
1713 let mut size = snapshot.indent_size_for_line(row);
1714 if size.kind == new_indent.kind {
1715 match delta.cmp(&0) {
1716 Ordering::Greater => size.len += delta as u32,
1717 Ordering::Less => {
1718 size.len = size.len.saturating_sub(-delta as u32)
1719 }
1720 Ordering::Equal => {}
1721 }
1722 }
1723 (size, request.ignore_empty_lines)
1724 });
1725 }
1726 }
1727 }
1728
1729 yield_now().await;
1730 }
1731 }
1732
1733 indent_sizes
1734 .into_iter()
1735 .filter_map(|(row, (indent, ignore_empty_lines))| {
1736 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1737 None
1738 } else {
1739 Some((row, indent))
1740 }
1741 })
1742 .collect()
1743 })
1744 }
1745
1746 fn apply_autoindents(
1747 &mut self,
1748 indent_sizes: BTreeMap<u32, IndentSize>,
1749 cx: &mut Context<Self>,
1750 ) {
1751 self.autoindent_requests.clear();
1752
1753 let edits: Vec<_> = indent_sizes
1754 .into_iter()
1755 .filter_map(|(row, indent_size)| {
1756 let current_size = indent_size_for_line(self, row);
1757 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1758 })
1759 .collect();
1760
1761 let preserve_preview = self.preserve_preview();
1762 self.edit(edits, None, cx);
1763 if preserve_preview {
1764 self.refresh_preview();
1765 }
1766 }
1767
1768 /// Create a minimal edit that will cause the given row to be indented
1769 /// with the given size. After applying this edit, the length of the line
1770 /// will always be at least `new_size.len`.
1771 pub fn edit_for_indent_size_adjustment(
1772 row: u32,
1773 current_size: IndentSize,
1774 new_size: IndentSize,
1775 ) -> Option<(Range<Point>, String)> {
1776 if new_size.kind == current_size.kind {
1777 match new_size.len.cmp(¤t_size.len) {
1778 Ordering::Greater => {
1779 let point = Point::new(row, 0);
1780 Some((
1781 point..point,
1782 iter::repeat(new_size.char())
1783 .take((new_size.len - current_size.len) as usize)
1784 .collect::<String>(),
1785 ))
1786 }
1787
1788 Ordering::Less => Some((
1789 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1790 String::new(),
1791 )),
1792
1793 Ordering::Equal => None,
1794 }
1795 } else {
1796 Some((
1797 Point::new(row, 0)..Point::new(row, current_size.len),
1798 iter::repeat(new_size.char())
1799 .take(new_size.len as usize)
1800 .collect::<String>(),
1801 ))
1802 }
1803 }
1804
1805 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1806 /// and the given new text.
1807 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1808 let old_text = self.as_rope().clone();
1809 let base_version = self.version();
1810 cx.background_executor()
1811 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1812 let old_text = old_text.to_string();
1813 let line_ending = LineEnding::detect(&new_text);
1814 LineEnding::normalize(&mut new_text);
1815 let edits = text_diff(&old_text, &new_text);
1816 Diff {
1817 base_version,
1818 line_ending,
1819 edits,
1820 }
1821 })
1822 }
1823
1824 /// Spawns a background task that searches the buffer for any whitespace
1825 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1826 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1827 let old_text = self.as_rope().clone();
1828 let line_ending = self.line_ending();
1829 let base_version = self.version();
1830 cx.background_spawn(async move {
1831 let ranges = trailing_whitespace_ranges(&old_text);
1832 let empty = Arc::<str>::from("");
1833 Diff {
1834 base_version,
1835 line_ending,
1836 edits: ranges
1837 .into_iter()
1838 .map(|range| (range, empty.clone()))
1839 .collect(),
1840 }
1841 })
1842 }
1843
1844 /// Ensures that the buffer ends with a single newline character, and
1845 /// no other whitespace.
1846 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1847 let len = self.len();
1848 let mut offset = len;
1849 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1850 let non_whitespace_len = chunk
1851 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1852 .len();
1853 offset -= chunk.len();
1854 offset += non_whitespace_len;
1855 if non_whitespace_len != 0 {
1856 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1857 return;
1858 }
1859 break;
1860 }
1861 }
1862 self.edit([(offset..len, "\n")], None, cx);
1863 }
1864
1865 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1866 /// calculated, then adjust the diff to account for those changes, and discard any
1867 /// parts of the diff that conflict with those changes.
1868 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1869 let snapshot = self.snapshot();
1870 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1871 let mut delta = 0;
1872 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1873 while let Some(edit_since) = edits_since.peek() {
1874 // If the edit occurs after a diff hunk, then it does not
1875 // affect that hunk.
1876 if edit_since.old.start > range.end {
1877 break;
1878 }
1879 // If the edit precedes the diff hunk, then adjust the hunk
1880 // to reflect the edit.
1881 else if edit_since.old.end < range.start {
1882 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1883 edits_since.next();
1884 }
1885 // If the edit intersects a diff hunk, then discard that hunk.
1886 else {
1887 return None;
1888 }
1889 }
1890
1891 let start = (range.start as i64 + delta) as usize;
1892 let end = (range.end as i64 + delta) as usize;
1893 Some((start..end, new_text))
1894 });
1895
1896 self.start_transaction();
1897 self.text.set_line_ending(diff.line_ending);
1898 self.edit(adjusted_edits, None, cx);
1899 self.end_transaction(cx)
1900 }
1901
1902 fn has_unsaved_edits(&self) -> bool {
1903 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1904
1905 if last_version == self.version {
1906 self.has_unsaved_edits
1907 .set((last_version, has_unsaved_edits));
1908 return has_unsaved_edits;
1909 }
1910
1911 let has_edits = self.has_edits_since(&self.saved_version);
1912 self.has_unsaved_edits
1913 .set((self.version.clone(), has_edits));
1914 has_edits
1915 }
1916
1917 /// Checks if the buffer has unsaved changes.
1918 pub fn is_dirty(&self) -> bool {
1919 if self.capability == Capability::ReadOnly {
1920 return false;
1921 }
1922 if self.has_conflict {
1923 return true;
1924 }
1925 match self.file.as_ref().map(|f| f.disk_state()) {
1926 Some(DiskState::New) | Some(DiskState::Deleted) => {
1927 !self.is_empty() && self.has_unsaved_edits()
1928 }
1929 _ => self.has_unsaved_edits(),
1930 }
1931 }
1932
1933 /// Checks if the buffer and its file have both changed since the buffer
1934 /// was last saved or reloaded.
1935 pub fn has_conflict(&self) -> bool {
1936 if self.has_conflict {
1937 return true;
1938 }
1939 let Some(file) = self.file.as_ref() else {
1940 return false;
1941 };
1942 match file.disk_state() {
1943 DiskState::New => false,
1944 DiskState::Present { mtime } => match self.saved_mtime {
1945 Some(saved_mtime) => {
1946 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1947 }
1948 None => true,
1949 },
1950 DiskState::Deleted => false,
1951 }
1952 }
1953
1954 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1955 pub fn subscribe(&mut self) -> Subscription {
1956 self.text.subscribe()
1957 }
1958
1959 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1960 ///
1961 /// This allows downstream code to check if the buffer's text has changed without
1962 /// waiting for an effect cycle, which would be required if using eents.
1963 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1964 if let Err(ix) = self
1965 .change_bits
1966 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1967 {
1968 self.change_bits.insert(ix, bit);
1969 }
1970 }
1971
1972 fn was_changed(&mut self) {
1973 self.change_bits.retain(|change_bit| {
1974 change_bit.upgrade().map_or(false, |bit| {
1975 bit.replace(true);
1976 true
1977 })
1978 });
1979 }
1980
1981 /// Starts a transaction, if one is not already in-progress. When undoing or
1982 /// redoing edits, all of the edits performed within a transaction are undone
1983 /// or redone together.
1984 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1985 self.start_transaction_at(Instant::now())
1986 }
1987
1988 /// Starts a transaction, providing the current time. Subsequent transactions
1989 /// that occur within a short period of time will be grouped together. This
1990 /// is controlled by the buffer's undo grouping duration.
1991 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1992 self.transaction_depth += 1;
1993 if self.was_dirty_before_starting_transaction.is_none() {
1994 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1995 }
1996 self.text.start_transaction_at(now)
1997 }
1998
1999 /// Terminates the current transaction, if this is the outermost transaction.
2000 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2001 self.end_transaction_at(Instant::now(), cx)
2002 }
2003
2004 /// Terminates the current transaction, providing the current time. Subsequent transactions
2005 /// that occur within a short period of time will be grouped together. This
2006 /// is controlled by the buffer's undo grouping duration.
2007 pub fn end_transaction_at(
2008 &mut self,
2009 now: Instant,
2010 cx: &mut Context<Self>,
2011 ) -> Option<TransactionId> {
2012 assert!(self.transaction_depth > 0);
2013 self.transaction_depth -= 1;
2014 let was_dirty = if self.transaction_depth == 0 {
2015 self.was_dirty_before_starting_transaction.take().unwrap()
2016 } else {
2017 false
2018 };
2019 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2020 self.did_edit(&start_version, was_dirty, cx);
2021 Some(transaction_id)
2022 } else {
2023 None
2024 }
2025 }
2026
2027 /// Manually add a transaction to the buffer's undo history.
2028 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2029 self.text.push_transaction(transaction, now);
2030 }
2031
2032 /// Prevent the last transaction from being grouped with any subsequent transactions,
2033 /// even if they occur with the buffer's undo grouping duration.
2034 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2035 self.text.finalize_last_transaction()
2036 }
2037
2038 /// Manually group all changes since a given transaction.
2039 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2040 self.text.group_until_transaction(transaction_id);
2041 }
2042
2043 /// Manually remove a transaction from the buffer's undo history
2044 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2045 self.text.forget_transaction(transaction_id)
2046 }
2047
2048 /// Retrieve a transaction from the buffer's undo history
2049 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2050 self.text.get_transaction(transaction_id)
2051 }
2052
2053 /// Manually merge two transactions in the buffer's undo history.
2054 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2055 self.text.merge_transactions(transaction, destination);
2056 }
2057
2058 /// Waits for the buffer to receive operations with the given timestamps.
2059 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2060 &mut self,
2061 edit_ids: It,
2062 ) -> impl Future<Output = Result<()>> + use<It> {
2063 self.text.wait_for_edits(edit_ids)
2064 }
2065
2066 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2067 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2068 &mut self,
2069 anchors: It,
2070 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2071 self.text.wait_for_anchors(anchors)
2072 }
2073
2074 /// Waits for the buffer to receive operations up to the given version.
2075 pub fn wait_for_version(
2076 &mut self,
2077 version: clock::Global,
2078 ) -> impl Future<Output = Result<()>> + use<> {
2079 self.text.wait_for_version(version)
2080 }
2081
2082 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2083 /// [`Buffer::wait_for_version`] to resolve with an error.
2084 pub fn give_up_waiting(&mut self) {
2085 self.text.give_up_waiting();
2086 }
2087
2088 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2089 pub fn set_active_selections(
2090 &mut self,
2091 selections: Arc<[Selection<Anchor>]>,
2092 line_mode: bool,
2093 cursor_shape: CursorShape,
2094 cx: &mut Context<Self>,
2095 ) {
2096 let lamport_timestamp = self.text.lamport_clock.tick();
2097 self.remote_selections.insert(
2098 self.text.replica_id(),
2099 SelectionSet {
2100 selections: selections.clone(),
2101 lamport_timestamp,
2102 line_mode,
2103 cursor_shape,
2104 },
2105 );
2106 self.send_operation(
2107 Operation::UpdateSelections {
2108 selections,
2109 line_mode,
2110 lamport_timestamp,
2111 cursor_shape,
2112 },
2113 true,
2114 cx,
2115 );
2116 self.non_text_state_update_count += 1;
2117 cx.notify();
2118 }
2119
2120 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2121 /// this replica.
2122 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2123 if self
2124 .remote_selections
2125 .get(&self.text.replica_id())
2126 .map_or(true, |set| !set.selections.is_empty())
2127 {
2128 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2129 }
2130 }
2131
2132 pub fn set_agent_selections(
2133 &mut self,
2134 selections: Arc<[Selection<Anchor>]>,
2135 line_mode: bool,
2136 cursor_shape: CursorShape,
2137 cx: &mut Context<Self>,
2138 ) {
2139 let lamport_timestamp = self.text.lamport_clock.tick();
2140 self.remote_selections.insert(
2141 AGENT_REPLICA_ID,
2142 SelectionSet {
2143 selections: selections.clone(),
2144 lamport_timestamp,
2145 line_mode,
2146 cursor_shape,
2147 },
2148 );
2149 self.non_text_state_update_count += 1;
2150 cx.notify();
2151 }
2152
2153 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2154 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2155 }
2156
2157 /// Replaces the buffer's entire text.
2158 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2159 where
2160 T: Into<Arc<str>>,
2161 {
2162 self.autoindent_requests.clear();
2163 self.edit([(0..self.len(), text)], None, cx)
2164 }
2165
2166 /// Appends the given text to the end of the buffer.
2167 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2168 where
2169 T: Into<Arc<str>>,
2170 {
2171 self.edit([(self.len()..self.len(), text)], None, cx)
2172 }
2173
2174 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2175 /// delete, and a string of text to insert at that location.
2176 ///
2177 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2178 /// request for the edited ranges, which will be processed when the buffer finishes
2179 /// parsing.
2180 ///
2181 /// Parsing takes place at the end of a transaction, and may compute synchronously
2182 /// or asynchronously, depending on the changes.
2183 pub fn edit<I, S, T>(
2184 &mut self,
2185 edits_iter: I,
2186 autoindent_mode: Option<AutoindentMode>,
2187 cx: &mut Context<Self>,
2188 ) -> Option<clock::Lamport>
2189 where
2190 I: IntoIterator<Item = (Range<S>, T)>,
2191 S: ToOffset,
2192 T: Into<Arc<str>>,
2193 {
2194 // Skip invalid edits and coalesce contiguous ones.
2195 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2196
2197 for (range, new_text) in edits_iter {
2198 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2199
2200 if range.start > range.end {
2201 mem::swap(&mut range.start, &mut range.end);
2202 }
2203 let new_text = new_text.into();
2204 if !new_text.is_empty() || !range.is_empty() {
2205 if let Some((prev_range, prev_text)) = edits.last_mut() {
2206 if prev_range.end >= range.start {
2207 prev_range.end = cmp::max(prev_range.end, range.end);
2208 *prev_text = format!("{prev_text}{new_text}").into();
2209 } else {
2210 edits.push((range, new_text));
2211 }
2212 } else {
2213 edits.push((range, new_text));
2214 }
2215 }
2216 }
2217 if edits.is_empty() {
2218 return None;
2219 }
2220
2221 self.start_transaction();
2222 self.pending_autoindent.take();
2223 let autoindent_request = autoindent_mode
2224 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2225
2226 let edit_operation = self.text.edit(edits.iter().cloned());
2227 let edit_id = edit_operation.timestamp();
2228
2229 if let Some((before_edit, mode)) = autoindent_request {
2230 let mut delta = 0isize;
2231 let entries = edits
2232 .into_iter()
2233 .enumerate()
2234 .zip(&edit_operation.as_edit().unwrap().new_text)
2235 .map(|((ix, (range, _)), new_text)| {
2236 let new_text_length = new_text.len();
2237 let old_start = range.start.to_point(&before_edit);
2238 let new_start = (delta + range.start as isize) as usize;
2239 let range_len = range.end - range.start;
2240 delta += new_text_length as isize - range_len as isize;
2241
2242 // Decide what range of the insertion to auto-indent, and whether
2243 // the first line of the insertion should be considered a newly-inserted line
2244 // or an edit to an existing line.
2245 let mut range_of_insertion_to_indent = 0..new_text_length;
2246 let mut first_line_is_new = true;
2247
2248 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2249 let old_line_end = before_edit.line_len(old_start.row);
2250
2251 if old_start.column > old_line_start {
2252 first_line_is_new = false;
2253 }
2254
2255 if !new_text.contains('\n')
2256 && (old_start.column + (range_len as u32) < old_line_end
2257 || old_line_end == old_line_start)
2258 {
2259 first_line_is_new = false;
2260 }
2261
2262 // When inserting text starting with a newline, avoid auto-indenting the
2263 // previous line.
2264 if new_text.starts_with('\n') {
2265 range_of_insertion_to_indent.start += 1;
2266 first_line_is_new = true;
2267 }
2268
2269 let mut original_indent_column = None;
2270 if let AutoindentMode::Block {
2271 original_indent_columns,
2272 } = &mode
2273 {
2274 original_indent_column = Some(if new_text.starts_with('\n') {
2275 indent_size_for_text(
2276 new_text[range_of_insertion_to_indent.clone()].chars(),
2277 )
2278 .len
2279 } else {
2280 original_indent_columns
2281 .get(ix)
2282 .copied()
2283 .flatten()
2284 .unwrap_or_else(|| {
2285 indent_size_for_text(
2286 new_text[range_of_insertion_to_indent.clone()].chars(),
2287 )
2288 .len
2289 })
2290 });
2291
2292 // Avoid auto-indenting the line after the edit.
2293 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2294 range_of_insertion_to_indent.end -= 1;
2295 }
2296 }
2297
2298 AutoindentRequestEntry {
2299 first_line_is_new,
2300 original_indent_column,
2301 indent_size: before_edit.language_indent_size_at(range.start, cx),
2302 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2303 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2304 }
2305 })
2306 .collect();
2307
2308 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2309 before_edit,
2310 entries,
2311 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2312 ignore_empty_lines: false,
2313 }));
2314 }
2315
2316 self.end_transaction(cx);
2317 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2318 Some(edit_id)
2319 }
2320
2321 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2322 self.was_changed();
2323
2324 if self.edits_since::<usize>(old_version).next().is_none() {
2325 return;
2326 }
2327
2328 self.reparse(cx);
2329 cx.emit(BufferEvent::Edited);
2330 if was_dirty != self.is_dirty() {
2331 cx.emit(BufferEvent::DirtyChanged);
2332 }
2333 cx.notify();
2334 }
2335
2336 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2337 where
2338 I: IntoIterator<Item = Range<T>>,
2339 T: ToOffset + Copy,
2340 {
2341 let before_edit = self.snapshot();
2342 let entries = ranges
2343 .into_iter()
2344 .map(|range| AutoindentRequestEntry {
2345 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2346 first_line_is_new: true,
2347 indent_size: before_edit.language_indent_size_at(range.start, cx),
2348 original_indent_column: None,
2349 })
2350 .collect();
2351 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2352 before_edit,
2353 entries,
2354 is_block_mode: false,
2355 ignore_empty_lines: true,
2356 }));
2357 self.request_autoindent(cx);
2358 }
2359
2360 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2361 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2362 pub fn insert_empty_line(
2363 &mut self,
2364 position: impl ToPoint,
2365 space_above: bool,
2366 space_below: bool,
2367 cx: &mut Context<Self>,
2368 ) -> Point {
2369 let mut position = position.to_point(self);
2370
2371 self.start_transaction();
2372
2373 self.edit(
2374 [(position..position, "\n")],
2375 Some(AutoindentMode::EachLine),
2376 cx,
2377 );
2378
2379 if position.column > 0 {
2380 position += Point::new(1, 0);
2381 }
2382
2383 if !self.is_line_blank(position.row) {
2384 self.edit(
2385 [(position..position, "\n")],
2386 Some(AutoindentMode::EachLine),
2387 cx,
2388 );
2389 }
2390
2391 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2392 self.edit(
2393 [(position..position, "\n")],
2394 Some(AutoindentMode::EachLine),
2395 cx,
2396 );
2397 position.row += 1;
2398 }
2399
2400 if space_below
2401 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2402 {
2403 self.edit(
2404 [(position..position, "\n")],
2405 Some(AutoindentMode::EachLine),
2406 cx,
2407 );
2408 }
2409
2410 self.end_transaction(cx);
2411
2412 position
2413 }
2414
2415 /// Applies the given remote operations to the buffer.
2416 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2417 self.pending_autoindent.take();
2418 let was_dirty = self.is_dirty();
2419 let old_version = self.version.clone();
2420 let mut deferred_ops = Vec::new();
2421 let buffer_ops = ops
2422 .into_iter()
2423 .filter_map(|op| match op {
2424 Operation::Buffer(op) => Some(op),
2425 _ => {
2426 if self.can_apply_op(&op) {
2427 self.apply_op(op, cx);
2428 } else {
2429 deferred_ops.push(op);
2430 }
2431 None
2432 }
2433 })
2434 .collect::<Vec<_>>();
2435 for operation in buffer_ops.iter() {
2436 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2437 }
2438 self.text.apply_ops(buffer_ops);
2439 self.deferred_ops.insert(deferred_ops);
2440 self.flush_deferred_ops(cx);
2441 self.did_edit(&old_version, was_dirty, cx);
2442 // Notify independently of whether the buffer was edited as the operations could include a
2443 // selection update.
2444 cx.notify();
2445 }
2446
2447 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2448 let mut deferred_ops = Vec::new();
2449 for op in self.deferred_ops.drain().iter().cloned() {
2450 if self.can_apply_op(&op) {
2451 self.apply_op(op, cx);
2452 } else {
2453 deferred_ops.push(op);
2454 }
2455 }
2456 self.deferred_ops.insert(deferred_ops);
2457 }
2458
2459 pub fn has_deferred_ops(&self) -> bool {
2460 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2461 }
2462
2463 fn can_apply_op(&self, operation: &Operation) -> bool {
2464 match operation {
2465 Operation::Buffer(_) => {
2466 unreachable!("buffer operations should never be applied at this layer")
2467 }
2468 Operation::UpdateDiagnostics {
2469 diagnostics: diagnostic_set,
2470 ..
2471 } => diagnostic_set.iter().all(|diagnostic| {
2472 self.text.can_resolve(&diagnostic.range.start)
2473 && self.text.can_resolve(&diagnostic.range.end)
2474 }),
2475 Operation::UpdateSelections { selections, .. } => selections
2476 .iter()
2477 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2478 Operation::UpdateCompletionTriggers { .. } => true,
2479 }
2480 }
2481
2482 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2483 match operation {
2484 Operation::Buffer(_) => {
2485 unreachable!("buffer operations should never be applied at this layer")
2486 }
2487 Operation::UpdateDiagnostics {
2488 server_id,
2489 diagnostics: diagnostic_set,
2490 lamport_timestamp,
2491 } => {
2492 let snapshot = self.snapshot();
2493 self.apply_diagnostic_update(
2494 server_id,
2495 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2496 lamport_timestamp,
2497 cx,
2498 );
2499 }
2500 Operation::UpdateSelections {
2501 selections,
2502 lamport_timestamp,
2503 line_mode,
2504 cursor_shape,
2505 } => {
2506 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2507 if set.lamport_timestamp > lamport_timestamp {
2508 return;
2509 }
2510 }
2511
2512 self.remote_selections.insert(
2513 lamport_timestamp.replica_id,
2514 SelectionSet {
2515 selections,
2516 lamport_timestamp,
2517 line_mode,
2518 cursor_shape,
2519 },
2520 );
2521 self.text.lamport_clock.observe(lamport_timestamp);
2522 self.non_text_state_update_count += 1;
2523 }
2524 Operation::UpdateCompletionTriggers {
2525 triggers,
2526 lamport_timestamp,
2527 server_id,
2528 } => {
2529 if triggers.is_empty() {
2530 self.completion_triggers_per_language_server
2531 .remove(&server_id);
2532 self.completion_triggers = self
2533 .completion_triggers_per_language_server
2534 .values()
2535 .flat_map(|triggers| triggers.into_iter().cloned())
2536 .collect();
2537 } else {
2538 self.completion_triggers_per_language_server
2539 .insert(server_id, triggers.iter().cloned().collect());
2540 self.completion_triggers.extend(triggers);
2541 }
2542 self.text.lamport_clock.observe(lamport_timestamp);
2543 }
2544 }
2545 }
2546
2547 fn apply_diagnostic_update(
2548 &mut self,
2549 server_id: LanguageServerId,
2550 diagnostics: DiagnosticSet,
2551 lamport_timestamp: clock::Lamport,
2552 cx: &mut Context<Self>,
2553 ) {
2554 if lamport_timestamp > self.diagnostics_timestamp {
2555 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2556 if diagnostics.is_empty() {
2557 if let Ok(ix) = ix {
2558 self.diagnostics.remove(ix);
2559 }
2560 } else {
2561 match ix {
2562 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2563 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2564 };
2565 }
2566 self.diagnostics_timestamp = lamport_timestamp;
2567 self.non_text_state_update_count += 1;
2568 self.text.lamport_clock.observe(lamport_timestamp);
2569 cx.notify();
2570 cx.emit(BufferEvent::DiagnosticsUpdated);
2571 }
2572 }
2573
2574 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2575 self.was_changed();
2576 cx.emit(BufferEvent::Operation {
2577 operation,
2578 is_local,
2579 });
2580 }
2581
2582 /// Removes the selections for a given peer.
2583 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2584 self.remote_selections.remove(&replica_id);
2585 cx.notify();
2586 }
2587
2588 /// Undoes the most recent transaction.
2589 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2590 let was_dirty = self.is_dirty();
2591 let old_version = self.version.clone();
2592
2593 if let Some((transaction_id, operation)) = self.text.undo() {
2594 self.send_operation(Operation::Buffer(operation), true, cx);
2595 self.did_edit(&old_version, was_dirty, cx);
2596 Some(transaction_id)
2597 } else {
2598 None
2599 }
2600 }
2601
2602 /// Manually undoes a specific transaction in the buffer's undo history.
2603 pub fn undo_transaction(
2604 &mut self,
2605 transaction_id: TransactionId,
2606 cx: &mut Context<Self>,
2607 ) -> bool {
2608 let was_dirty = self.is_dirty();
2609 let old_version = self.version.clone();
2610 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2611 self.send_operation(Operation::Buffer(operation), true, cx);
2612 self.did_edit(&old_version, was_dirty, cx);
2613 true
2614 } else {
2615 false
2616 }
2617 }
2618
2619 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2620 pub fn undo_to_transaction(
2621 &mut self,
2622 transaction_id: TransactionId,
2623 cx: &mut Context<Self>,
2624 ) -> bool {
2625 let was_dirty = self.is_dirty();
2626 let old_version = self.version.clone();
2627
2628 let operations = self.text.undo_to_transaction(transaction_id);
2629 let undone = !operations.is_empty();
2630 for operation in operations {
2631 self.send_operation(Operation::Buffer(operation), true, cx);
2632 }
2633 if undone {
2634 self.did_edit(&old_version, was_dirty, cx)
2635 }
2636 undone
2637 }
2638
2639 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2640 let was_dirty = self.is_dirty();
2641 let operation = self.text.undo_operations(counts);
2642 let old_version = self.version.clone();
2643 self.send_operation(Operation::Buffer(operation), true, cx);
2644 self.did_edit(&old_version, was_dirty, cx);
2645 }
2646
2647 /// Manually redoes a specific transaction in the buffer's redo history.
2648 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2649 let was_dirty = self.is_dirty();
2650 let old_version = self.version.clone();
2651
2652 if let Some((transaction_id, operation)) = self.text.redo() {
2653 self.send_operation(Operation::Buffer(operation), true, cx);
2654 self.did_edit(&old_version, was_dirty, cx);
2655 Some(transaction_id)
2656 } else {
2657 None
2658 }
2659 }
2660
2661 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2662 pub fn redo_to_transaction(
2663 &mut self,
2664 transaction_id: TransactionId,
2665 cx: &mut Context<Self>,
2666 ) -> bool {
2667 let was_dirty = self.is_dirty();
2668 let old_version = self.version.clone();
2669
2670 let operations = self.text.redo_to_transaction(transaction_id);
2671 let redone = !operations.is_empty();
2672 for operation in operations {
2673 self.send_operation(Operation::Buffer(operation), true, cx);
2674 }
2675 if redone {
2676 self.did_edit(&old_version, was_dirty, cx)
2677 }
2678 redone
2679 }
2680
2681 /// Override current completion triggers with the user-provided completion triggers.
2682 pub fn set_completion_triggers(
2683 &mut self,
2684 server_id: LanguageServerId,
2685 triggers: BTreeSet<String>,
2686 cx: &mut Context<Self>,
2687 ) {
2688 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2689 if triggers.is_empty() {
2690 self.completion_triggers_per_language_server
2691 .remove(&server_id);
2692 self.completion_triggers = self
2693 .completion_triggers_per_language_server
2694 .values()
2695 .flat_map(|triggers| triggers.into_iter().cloned())
2696 .collect();
2697 } else {
2698 self.completion_triggers_per_language_server
2699 .insert(server_id, triggers.clone());
2700 self.completion_triggers.extend(triggers.iter().cloned());
2701 }
2702 self.send_operation(
2703 Operation::UpdateCompletionTriggers {
2704 triggers: triggers.into_iter().collect(),
2705 lamport_timestamp: self.completion_triggers_timestamp,
2706 server_id,
2707 },
2708 true,
2709 cx,
2710 );
2711 cx.notify();
2712 }
2713
2714 /// Returns a list of strings which trigger a completion menu for this language.
2715 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2716 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2717 &self.completion_triggers
2718 }
2719
2720 /// Call this directly after performing edits to prevent the preview tab
2721 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2722 /// to return false until there are additional edits.
2723 pub fn refresh_preview(&mut self) {
2724 self.preview_version = self.version.clone();
2725 }
2726
2727 /// Whether we should preserve the preview status of a tab containing this buffer.
2728 pub fn preserve_preview(&self) -> bool {
2729 !self.has_edits_since(&self.preview_version)
2730 }
2731}
2732
2733#[doc(hidden)]
2734#[cfg(any(test, feature = "test-support"))]
2735impl Buffer {
2736 pub fn edit_via_marked_text(
2737 &mut self,
2738 marked_string: &str,
2739 autoindent_mode: Option<AutoindentMode>,
2740 cx: &mut Context<Self>,
2741 ) {
2742 let edits = self.edits_for_marked_text(marked_string);
2743 self.edit(edits, autoindent_mode, cx);
2744 }
2745
2746 pub fn set_group_interval(&mut self, group_interval: Duration) {
2747 self.text.set_group_interval(group_interval);
2748 }
2749
2750 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2751 where
2752 T: rand::Rng,
2753 {
2754 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2755 let mut last_end = None;
2756 for _ in 0..old_range_count {
2757 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2758 break;
2759 }
2760
2761 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2762 let mut range = self.random_byte_range(new_start, rng);
2763 if rng.gen_bool(0.2) {
2764 mem::swap(&mut range.start, &mut range.end);
2765 }
2766 last_end = Some(range.end);
2767
2768 let new_text_len = rng.gen_range(0..10);
2769 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2770 new_text = new_text.to_uppercase();
2771
2772 edits.push((range, new_text));
2773 }
2774 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2775 self.edit(edits, None, cx);
2776 }
2777
2778 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2779 let was_dirty = self.is_dirty();
2780 let old_version = self.version.clone();
2781
2782 let ops = self.text.randomly_undo_redo(rng);
2783 if !ops.is_empty() {
2784 for op in ops {
2785 self.send_operation(Operation::Buffer(op), true, cx);
2786 self.did_edit(&old_version, was_dirty, cx);
2787 }
2788 }
2789 }
2790}
2791
2792impl EventEmitter<BufferEvent> for Buffer {}
2793
2794impl Deref for Buffer {
2795 type Target = TextBuffer;
2796
2797 fn deref(&self) -> &Self::Target {
2798 &self.text
2799 }
2800}
2801
2802impl BufferSnapshot {
2803 /// Returns [`IndentSize`] for a given line that respects user settings and
2804 /// language preferences.
2805 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2806 indent_size_for_line(self, row)
2807 }
2808
2809 /// Returns [`IndentSize`] for a given position that respects user settings
2810 /// and language preferences.
2811 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2812 let settings = language_settings(
2813 self.language_at(position).map(|l| l.name()),
2814 self.file(),
2815 cx,
2816 );
2817 if settings.hard_tabs {
2818 IndentSize::tab()
2819 } else {
2820 IndentSize::spaces(settings.tab_size.get())
2821 }
2822 }
2823
2824 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2825 /// is passed in as `single_indent_size`.
2826 pub fn suggested_indents(
2827 &self,
2828 rows: impl Iterator<Item = u32>,
2829 single_indent_size: IndentSize,
2830 ) -> BTreeMap<u32, IndentSize> {
2831 let mut result = BTreeMap::new();
2832
2833 for row_range in contiguous_ranges(rows, 10) {
2834 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2835 Some(suggestions) => suggestions,
2836 _ => break,
2837 };
2838
2839 for (row, suggestion) in row_range.zip(suggestions) {
2840 let indent_size = if let Some(suggestion) = suggestion {
2841 result
2842 .get(&suggestion.basis_row)
2843 .copied()
2844 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2845 .with_delta(suggestion.delta, single_indent_size)
2846 } else {
2847 self.indent_size_for_line(row)
2848 };
2849
2850 result.insert(row, indent_size);
2851 }
2852 }
2853
2854 result
2855 }
2856
2857 fn suggest_autoindents(
2858 &self,
2859 row_range: Range<u32>,
2860 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2861 let config = &self.language.as_ref()?.config;
2862 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2863 let significant_indentation = config.significant_indentation;
2864
2865 // Find the suggested indentation ranges based on the syntax tree.
2866 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2867 let end = Point::new(row_range.end, 0);
2868 let range = (start..end).to_offset(&self.text);
2869 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2870 Some(&grammar.indents_config.as_ref()?.query)
2871 });
2872 let indent_configs = matches
2873 .grammars()
2874 .iter()
2875 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2876 .collect::<Vec<_>>();
2877
2878 let mut indent_ranges = Vec::<Range<Point>>::new();
2879 let mut outdent_positions = Vec::<Point>::new();
2880 while let Some(mat) = matches.peek() {
2881 let mut start: Option<Point> = None;
2882 let mut end: Option<Point> = None;
2883 let mut outdent: Option<Point> = None;
2884
2885 let config = &indent_configs[mat.grammar_index];
2886 for capture in mat.captures {
2887 if capture.index == config.indent_capture_ix {
2888 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2889 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2890 } else if Some(capture.index) == config.start_capture_ix {
2891 start = Some(Point::from_ts_point(capture.node.end_position()));
2892 } else if Some(capture.index) == config.end_capture_ix {
2893 end = Some(Point::from_ts_point(capture.node.start_position()));
2894 } else if Some(capture.index) == config.outdent_capture_ix {
2895 let point = Point::from_ts_point(capture.node.start_position());
2896 outdent.get_or_insert(point);
2897 outdent_positions.push(point);
2898 }
2899 }
2900
2901 matches.advance();
2902 // in case of significant indentation expand end to outdent position
2903 let end = if significant_indentation {
2904 outdent.or(end)
2905 } else {
2906 end
2907 };
2908 if let Some((start, end)) = start.zip(end) {
2909 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2910 continue;
2911 }
2912 let range = start..end;
2913 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2914 Err(ix) => indent_ranges.insert(ix, range),
2915 Ok(ix) => {
2916 let prev_range = &mut indent_ranges[ix];
2917 prev_range.end = prev_range.end.max(range.end);
2918 }
2919 }
2920 }
2921 }
2922
2923 let mut error_ranges = Vec::<Range<Point>>::new();
2924 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2925 grammar.error_query.as_ref()
2926 });
2927 while let Some(mat) = matches.peek() {
2928 let node = mat.captures[0].node;
2929 let start = Point::from_ts_point(node.start_position());
2930 let end = Point::from_ts_point(node.end_position());
2931 let range = start..end;
2932 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2933 Ok(ix) | Err(ix) => ix,
2934 };
2935 let mut end_ix = ix;
2936 while let Some(existing_range) = error_ranges.get(end_ix) {
2937 if existing_range.end < end {
2938 end_ix += 1;
2939 } else {
2940 break;
2941 }
2942 }
2943 error_ranges.splice(ix..end_ix, [range]);
2944 matches.advance();
2945 }
2946
2947 // we don't use outdent positions to truncate in case of significant indentation
2948 // rather we use them to expand (handled above)
2949 if !significant_indentation {
2950 outdent_positions.sort();
2951 for outdent_position in outdent_positions {
2952 // find the innermost indent range containing this outdent_position
2953 // set its end to the outdent position
2954 if let Some(range_to_truncate) = indent_ranges
2955 .iter_mut()
2956 .filter(|indent_range| indent_range.contains(&outdent_position))
2957 .next_back()
2958 {
2959 range_to_truncate.end = outdent_position;
2960 }
2961 }
2962 }
2963
2964 // Find the suggested indentation increases and decreased based on regexes.
2965 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2966 self.for_each_line(
2967 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2968 ..Point::new(row_range.end, 0),
2969 |row, line| {
2970 if config
2971 .decrease_indent_pattern
2972 .as_ref()
2973 .map_or(false, |regex| regex.is_match(line))
2974 {
2975 indent_change_rows.push((row, Ordering::Less));
2976 }
2977 if config
2978 .increase_indent_pattern
2979 .as_ref()
2980 .map_or(false, |regex| regex.is_match(line))
2981 {
2982 indent_change_rows.push((row + 1, Ordering::Greater));
2983 }
2984 },
2985 );
2986
2987 let mut indent_changes = indent_change_rows.into_iter().peekable();
2988 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2989 prev_non_blank_row.unwrap_or(0)
2990 } else {
2991 row_range.start.saturating_sub(1)
2992 };
2993 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2994 Some(row_range.map(move |row| {
2995 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2996
2997 let mut indent_from_prev_row = false;
2998 let mut outdent_from_prev_row = false;
2999 let mut outdent_to_row = u32::MAX;
3000 let mut from_regex = false;
3001
3002 while let Some((indent_row, delta)) = indent_changes.peek() {
3003 match indent_row.cmp(&row) {
3004 Ordering::Equal => match delta {
3005 Ordering::Less => {
3006 from_regex = true;
3007 outdent_from_prev_row = true
3008 }
3009 Ordering::Greater => {
3010 indent_from_prev_row = true;
3011 from_regex = true
3012 }
3013 _ => {}
3014 },
3015
3016 Ordering::Greater => break,
3017 Ordering::Less => {}
3018 }
3019
3020 indent_changes.next();
3021 }
3022
3023 for range in &indent_ranges {
3024 if range.start.row >= row {
3025 break;
3026 }
3027 if range.start.row == prev_row && range.end > row_start {
3028 indent_from_prev_row = true;
3029 }
3030 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3031 {
3032 indent_from_prev_row = true;
3033 }
3034 if !significant_indentation || !self.is_line_blank(row) {
3035 if range.end > prev_row_start && range.end <= row_start {
3036 outdent_to_row = outdent_to_row.min(range.start.row);
3037 }
3038 }
3039 }
3040
3041 let within_error = error_ranges
3042 .iter()
3043 .any(|e| e.start.row < row && e.end > row_start);
3044
3045 let suggestion = if outdent_to_row == prev_row
3046 || (outdent_from_prev_row && indent_from_prev_row)
3047 {
3048 Some(IndentSuggestion {
3049 basis_row: prev_row,
3050 delta: Ordering::Equal,
3051 within_error: within_error && !from_regex,
3052 })
3053 } else if indent_from_prev_row {
3054 Some(IndentSuggestion {
3055 basis_row: prev_row,
3056 delta: Ordering::Greater,
3057 within_error: within_error && !from_regex,
3058 })
3059 } else if outdent_to_row < prev_row {
3060 Some(IndentSuggestion {
3061 basis_row: outdent_to_row,
3062 delta: Ordering::Equal,
3063 within_error: within_error && !from_regex,
3064 })
3065 } else if outdent_from_prev_row {
3066 Some(IndentSuggestion {
3067 basis_row: prev_row,
3068 delta: Ordering::Less,
3069 within_error: within_error && !from_regex,
3070 })
3071 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3072 {
3073 Some(IndentSuggestion {
3074 basis_row: prev_row,
3075 delta: Ordering::Equal,
3076 within_error: within_error && !from_regex,
3077 })
3078 } else {
3079 None
3080 };
3081
3082 prev_row = row;
3083 prev_row_start = row_start;
3084 suggestion
3085 }))
3086 }
3087
3088 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3089 while row > 0 {
3090 row -= 1;
3091 if !self.is_line_blank(row) {
3092 return Some(row);
3093 }
3094 }
3095 None
3096 }
3097
3098 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3099 let captures = self.syntax.captures(range, &self.text, |grammar| {
3100 grammar.highlights_query.as_ref()
3101 });
3102 let highlight_maps = captures
3103 .grammars()
3104 .iter()
3105 .map(|grammar| grammar.highlight_map())
3106 .collect();
3107 (captures, highlight_maps)
3108 }
3109
3110 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3111 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3112 /// returned in chunks where each chunk has a single syntax highlighting style and
3113 /// diagnostic status.
3114 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3115 let range = range.start.to_offset(self)..range.end.to_offset(self);
3116
3117 let mut syntax = None;
3118 if language_aware {
3119 syntax = Some(self.get_highlights(range.clone()));
3120 }
3121 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3122 let diagnostics = language_aware;
3123 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3124 }
3125
3126 pub fn highlighted_text_for_range<T: ToOffset>(
3127 &self,
3128 range: Range<T>,
3129 override_style: Option<HighlightStyle>,
3130 syntax_theme: &SyntaxTheme,
3131 ) -> HighlightedText {
3132 HighlightedText::from_buffer_range(
3133 range,
3134 &self.text,
3135 &self.syntax,
3136 override_style,
3137 syntax_theme,
3138 )
3139 }
3140
3141 /// Invokes the given callback for each line of text in the given range of the buffer.
3142 /// Uses callback to avoid allocating a string for each line.
3143 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3144 let mut line = String::new();
3145 let mut row = range.start.row;
3146 for chunk in self
3147 .as_rope()
3148 .chunks_in_range(range.to_offset(self))
3149 .chain(["\n"])
3150 {
3151 for (newline_ix, text) in chunk.split('\n').enumerate() {
3152 if newline_ix > 0 {
3153 callback(row, &line);
3154 row += 1;
3155 line.clear();
3156 }
3157 line.push_str(text);
3158 }
3159 }
3160 }
3161
3162 /// Iterates over every [`SyntaxLayer`] in the buffer.
3163 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3164 self.syntax
3165 .layers_for_range(0..self.len(), &self.text, true)
3166 }
3167
3168 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3169 let offset = position.to_offset(self);
3170 self.syntax
3171 .layers_for_range(offset..offset, &self.text, false)
3172 .filter(|l| l.node().end_byte() > offset)
3173 .last()
3174 }
3175
3176 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3177 &self,
3178 range: Range<D>,
3179 ) -> Option<SyntaxLayer> {
3180 let range = range.to_offset(self);
3181 return self
3182 .syntax
3183 .layers_for_range(range, &self.text, false)
3184 .max_by(|a, b| {
3185 if a.depth != b.depth {
3186 a.depth.cmp(&b.depth)
3187 } else if a.offset.0 != b.offset.0 {
3188 a.offset.0.cmp(&b.offset.0)
3189 } else {
3190 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3191 }
3192 });
3193 }
3194
3195 /// Returns the main [`Language`].
3196 pub fn language(&self) -> Option<&Arc<Language>> {
3197 self.language.as_ref()
3198 }
3199
3200 /// Returns the [`Language`] at the given location.
3201 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3202 self.syntax_layer_at(position)
3203 .map(|info| info.language)
3204 .or(self.language.as_ref())
3205 }
3206
3207 /// Returns the settings for the language at the given location.
3208 pub fn settings_at<'a, D: ToOffset>(
3209 &'a self,
3210 position: D,
3211 cx: &'a App,
3212 ) -> Cow<'a, LanguageSettings> {
3213 language_settings(
3214 self.language_at(position).map(|l| l.name()),
3215 self.file.as_ref(),
3216 cx,
3217 )
3218 }
3219
3220 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3221 CharClassifier::new(self.language_scope_at(point))
3222 }
3223
3224 /// Returns the [`LanguageScope`] at the given location.
3225 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3226 let offset = position.to_offset(self);
3227 let mut scope = None;
3228 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3229
3230 // Use the layer that has the smallest node intersecting the given point.
3231 for layer in self
3232 .syntax
3233 .layers_for_range(offset..offset, &self.text, false)
3234 {
3235 let mut cursor = layer.node().walk();
3236
3237 let mut range = None;
3238 loop {
3239 let child_range = cursor.node().byte_range();
3240 if !child_range.contains(&offset) {
3241 break;
3242 }
3243
3244 range = Some(child_range);
3245 if cursor.goto_first_child_for_byte(offset).is_none() {
3246 break;
3247 }
3248 }
3249
3250 if let Some(range) = range {
3251 if smallest_range_and_depth.as_ref().map_or(
3252 true,
3253 |(smallest_range, smallest_range_depth)| {
3254 if layer.depth > *smallest_range_depth {
3255 true
3256 } else if layer.depth == *smallest_range_depth {
3257 range.len() < smallest_range.len()
3258 } else {
3259 false
3260 }
3261 },
3262 ) {
3263 smallest_range_and_depth = Some((range, layer.depth));
3264 scope = Some(LanguageScope {
3265 language: layer.language.clone(),
3266 override_id: layer.override_id(offset, &self.text),
3267 });
3268 }
3269 }
3270 }
3271
3272 scope.or_else(|| {
3273 self.language.clone().map(|language| LanguageScope {
3274 language,
3275 override_id: None,
3276 })
3277 })
3278 }
3279
3280 /// Returns a tuple of the range and character kind of the word
3281 /// surrounding the given position.
3282 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3283 let mut start = start.to_offset(self);
3284 let mut end = start;
3285 let mut next_chars = self.chars_at(start).take(128).peekable();
3286 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3287
3288 let classifier = self.char_classifier_at(start);
3289 let word_kind = cmp::max(
3290 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3291 next_chars.peek().copied().map(|c| classifier.kind(c)),
3292 );
3293
3294 for ch in prev_chars {
3295 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3296 start -= ch.len_utf8();
3297 } else {
3298 break;
3299 }
3300 }
3301
3302 for ch in next_chars {
3303 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3304 end += ch.len_utf8();
3305 } else {
3306 break;
3307 }
3308 }
3309
3310 (start..end, word_kind)
3311 }
3312
3313 /// Returns the closest syntax node enclosing the given range.
3314 pub fn syntax_ancestor<'a, T: ToOffset>(
3315 &'a self,
3316 range: Range<T>,
3317 ) -> Option<tree_sitter::Node<'a>> {
3318 let range = range.start.to_offset(self)..range.end.to_offset(self);
3319 let mut result: Option<tree_sitter::Node<'a>> = None;
3320 'outer: for layer in self
3321 .syntax
3322 .layers_for_range(range.clone(), &self.text, true)
3323 {
3324 let mut cursor = layer.node().walk();
3325
3326 // Descend to the first leaf that touches the start of the range.
3327 //
3328 // If the range is non-empty and the current node ends exactly at the start,
3329 // move to the next sibling to find a node that extends beyond the start.
3330 //
3331 // If the range is empty and the current node starts after the range position,
3332 // move to the previous sibling to find the node that contains the position.
3333 while cursor.goto_first_child_for_byte(range.start).is_some() {
3334 if !range.is_empty() && cursor.node().end_byte() == range.start {
3335 cursor.goto_next_sibling();
3336 }
3337 if range.is_empty() && cursor.node().start_byte() > range.start {
3338 cursor.goto_previous_sibling();
3339 }
3340 }
3341
3342 // Ascend to the smallest ancestor that strictly contains the range.
3343 loop {
3344 let node_range = cursor.node().byte_range();
3345 if node_range.start <= range.start
3346 && node_range.end >= range.end
3347 && node_range.len() > range.len()
3348 {
3349 break;
3350 }
3351 if !cursor.goto_parent() {
3352 continue 'outer;
3353 }
3354 }
3355
3356 let left_node = cursor.node();
3357 let mut layer_result = left_node;
3358
3359 // For an empty range, try to find another node immediately to the right of the range.
3360 if left_node.end_byte() == range.start {
3361 let mut right_node = None;
3362 while !cursor.goto_next_sibling() {
3363 if !cursor.goto_parent() {
3364 break;
3365 }
3366 }
3367
3368 while cursor.node().start_byte() == range.start {
3369 right_node = Some(cursor.node());
3370 if !cursor.goto_first_child() {
3371 break;
3372 }
3373 }
3374
3375 // If there is a candidate node on both sides of the (empty) range, then
3376 // decide between the two by favoring a named node over an anonymous token.
3377 // If both nodes are the same in that regard, favor the right one.
3378 if let Some(right_node) = right_node {
3379 if right_node.is_named() || !left_node.is_named() {
3380 layer_result = right_node;
3381 }
3382 }
3383 }
3384
3385 if let Some(previous_result) = &result {
3386 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3387 continue;
3388 }
3389 }
3390 result = Some(layer_result);
3391 }
3392
3393 result
3394 }
3395
3396 /// Returns the root syntax node within the given row
3397 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3398 let start_offset = position.to_offset(self);
3399
3400 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3401
3402 let layer = self
3403 .syntax
3404 .layers_for_range(start_offset..start_offset, &self.text, true)
3405 .next()?;
3406
3407 let mut cursor = layer.node().walk();
3408
3409 // Descend to the first leaf that touches the start of the range.
3410 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3411 if cursor.node().end_byte() == start_offset {
3412 cursor.goto_next_sibling();
3413 }
3414 }
3415
3416 // Ascend to the root node within the same row.
3417 while cursor.goto_parent() {
3418 if cursor.node().start_position().row != row {
3419 break;
3420 }
3421 }
3422
3423 return Some(cursor.node());
3424 }
3425
3426 /// Returns the outline for the buffer.
3427 ///
3428 /// This method allows passing an optional [`SyntaxTheme`] to
3429 /// syntax-highlight the returned symbols.
3430 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3431 self.outline_items_containing(0..self.len(), true, theme)
3432 .map(Outline::new)
3433 }
3434
3435 /// Returns all the symbols that contain the given position.
3436 ///
3437 /// This method allows passing an optional [`SyntaxTheme`] to
3438 /// syntax-highlight the returned symbols.
3439 pub fn symbols_containing<T: ToOffset>(
3440 &self,
3441 position: T,
3442 theme: Option<&SyntaxTheme>,
3443 ) -> Option<Vec<OutlineItem<Anchor>>> {
3444 let position = position.to_offset(self);
3445 let mut items = self.outline_items_containing(
3446 position.saturating_sub(1)..self.len().min(position + 1),
3447 false,
3448 theme,
3449 )?;
3450 let mut prev_depth = None;
3451 items.retain(|item| {
3452 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3453 prev_depth = Some(item.depth);
3454 result
3455 });
3456 Some(items)
3457 }
3458
3459 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3460 let range = range.to_offset(self);
3461 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3462 grammar.outline_config.as_ref().map(|c| &c.query)
3463 });
3464 let configs = matches
3465 .grammars()
3466 .iter()
3467 .map(|g| g.outline_config.as_ref().unwrap())
3468 .collect::<Vec<_>>();
3469
3470 while let Some(mat) = matches.peek() {
3471 let config = &configs[mat.grammar_index];
3472 let containing_item_node = maybe!({
3473 let item_node = mat.captures.iter().find_map(|cap| {
3474 if cap.index == config.item_capture_ix {
3475 Some(cap.node)
3476 } else {
3477 None
3478 }
3479 })?;
3480
3481 let item_byte_range = item_node.byte_range();
3482 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3483 None
3484 } else {
3485 Some(item_node)
3486 }
3487 });
3488
3489 if let Some(item_node) = containing_item_node {
3490 return Some(
3491 Point::from_ts_point(item_node.start_position())
3492 ..Point::from_ts_point(item_node.end_position()),
3493 );
3494 }
3495
3496 matches.advance();
3497 }
3498 None
3499 }
3500
3501 pub fn outline_items_containing<T: ToOffset>(
3502 &self,
3503 range: Range<T>,
3504 include_extra_context: bool,
3505 theme: Option<&SyntaxTheme>,
3506 ) -> Option<Vec<OutlineItem<Anchor>>> {
3507 let range = range.to_offset(self);
3508 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3509 grammar.outline_config.as_ref().map(|c| &c.query)
3510 });
3511 let configs = matches
3512 .grammars()
3513 .iter()
3514 .map(|g| g.outline_config.as_ref().unwrap())
3515 .collect::<Vec<_>>();
3516
3517 let mut items = Vec::new();
3518 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3519 while let Some(mat) = matches.peek() {
3520 let config = &configs[mat.grammar_index];
3521 if let Some(item) =
3522 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3523 {
3524 items.push(item);
3525 } else if let Some(capture) = mat
3526 .captures
3527 .iter()
3528 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3529 {
3530 let capture_range = capture.node.start_position()..capture.node.end_position();
3531 let mut capture_row_range =
3532 capture_range.start.row as u32..capture_range.end.row as u32;
3533 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3534 {
3535 capture_row_range.end -= 1;
3536 }
3537 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3538 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3539 last_row_range.end = capture_row_range.end;
3540 } else {
3541 annotation_row_ranges.push(capture_row_range);
3542 }
3543 } else {
3544 annotation_row_ranges.push(capture_row_range);
3545 }
3546 }
3547 matches.advance();
3548 }
3549
3550 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3551
3552 // Assign depths based on containment relationships and convert to anchors.
3553 let mut item_ends_stack = Vec::<Point>::new();
3554 let mut anchor_items = Vec::new();
3555 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3556 for item in items {
3557 while let Some(last_end) = item_ends_stack.last().copied() {
3558 if last_end < item.range.end {
3559 item_ends_stack.pop();
3560 } else {
3561 break;
3562 }
3563 }
3564
3565 let mut annotation_row_range = None;
3566 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3567 let row_preceding_item = item.range.start.row.saturating_sub(1);
3568 if next_annotation_row_range.end < row_preceding_item {
3569 annotation_row_ranges.next();
3570 } else {
3571 if next_annotation_row_range.end == row_preceding_item {
3572 annotation_row_range = Some(next_annotation_row_range.clone());
3573 annotation_row_ranges.next();
3574 }
3575 break;
3576 }
3577 }
3578
3579 anchor_items.push(OutlineItem {
3580 depth: item_ends_stack.len(),
3581 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3582 text: item.text,
3583 highlight_ranges: item.highlight_ranges,
3584 name_ranges: item.name_ranges,
3585 body_range: item.body_range.map(|body_range| {
3586 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3587 }),
3588 annotation_range: annotation_row_range.map(|annotation_range| {
3589 self.anchor_after(Point::new(annotation_range.start, 0))
3590 ..self.anchor_before(Point::new(
3591 annotation_range.end,
3592 self.line_len(annotation_range.end),
3593 ))
3594 }),
3595 });
3596 item_ends_stack.push(item.range.end);
3597 }
3598
3599 Some(anchor_items)
3600 }
3601
3602 fn next_outline_item(
3603 &self,
3604 config: &OutlineConfig,
3605 mat: &SyntaxMapMatch,
3606 range: &Range<usize>,
3607 include_extra_context: bool,
3608 theme: Option<&SyntaxTheme>,
3609 ) -> Option<OutlineItem<Point>> {
3610 let item_node = mat.captures.iter().find_map(|cap| {
3611 if cap.index == config.item_capture_ix {
3612 Some(cap.node)
3613 } else {
3614 None
3615 }
3616 })?;
3617
3618 let item_byte_range = item_node.byte_range();
3619 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3620 return None;
3621 }
3622 let item_point_range = Point::from_ts_point(item_node.start_position())
3623 ..Point::from_ts_point(item_node.end_position());
3624
3625 let mut open_point = None;
3626 let mut close_point = None;
3627 let mut buffer_ranges = Vec::new();
3628 for capture in mat.captures {
3629 let node_is_name;
3630 if capture.index == config.name_capture_ix {
3631 node_is_name = true;
3632 } else if Some(capture.index) == config.context_capture_ix
3633 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3634 {
3635 node_is_name = false;
3636 } else {
3637 if Some(capture.index) == config.open_capture_ix {
3638 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3639 } else if Some(capture.index) == config.close_capture_ix {
3640 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3641 }
3642
3643 continue;
3644 }
3645
3646 let mut range = capture.node.start_byte()..capture.node.end_byte();
3647 let start = capture.node.start_position();
3648 if capture.node.end_position().row > start.row {
3649 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3650 }
3651
3652 if !range.is_empty() {
3653 buffer_ranges.push((range, node_is_name));
3654 }
3655 }
3656 if buffer_ranges.is_empty() {
3657 return None;
3658 }
3659 let mut text = String::new();
3660 let mut highlight_ranges = Vec::new();
3661 let mut name_ranges = Vec::new();
3662 let mut chunks = self.chunks(
3663 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3664 true,
3665 );
3666 let mut last_buffer_range_end = 0;
3667
3668 for (buffer_range, is_name) in buffer_ranges {
3669 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3670 if space_added {
3671 text.push(' ');
3672 }
3673 let before_append_len = text.len();
3674 let mut offset = buffer_range.start;
3675 chunks.seek(buffer_range.clone());
3676 for mut chunk in chunks.by_ref() {
3677 if chunk.text.len() > buffer_range.end - offset {
3678 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3679 offset = buffer_range.end;
3680 } else {
3681 offset += chunk.text.len();
3682 }
3683 let style = chunk
3684 .syntax_highlight_id
3685 .zip(theme)
3686 .and_then(|(highlight, theme)| highlight.style(theme));
3687 if let Some(style) = style {
3688 let start = text.len();
3689 let end = start + chunk.text.len();
3690 highlight_ranges.push((start..end, style));
3691 }
3692 text.push_str(chunk.text);
3693 if offset >= buffer_range.end {
3694 break;
3695 }
3696 }
3697 if is_name {
3698 let after_append_len = text.len();
3699 let start = if space_added && !name_ranges.is_empty() {
3700 before_append_len - 1
3701 } else {
3702 before_append_len
3703 };
3704 name_ranges.push(start..after_append_len);
3705 }
3706 last_buffer_range_end = buffer_range.end;
3707 }
3708
3709 Some(OutlineItem {
3710 depth: 0, // We'll calculate the depth later
3711 range: item_point_range,
3712 text,
3713 highlight_ranges,
3714 name_ranges,
3715 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3716 annotation_range: None,
3717 })
3718 }
3719
3720 pub fn function_body_fold_ranges<T: ToOffset>(
3721 &self,
3722 within: Range<T>,
3723 ) -> impl Iterator<Item = Range<usize>> + '_ {
3724 self.text_object_ranges(within, TreeSitterOptions::default())
3725 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3726 }
3727
3728 /// For each grammar in the language, runs the provided
3729 /// [`tree_sitter::Query`] against the given range.
3730 pub fn matches(
3731 &self,
3732 range: Range<usize>,
3733 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3734 ) -> SyntaxMapMatches {
3735 self.syntax.matches(range, self, query)
3736 }
3737
3738 pub fn all_bracket_ranges(
3739 &self,
3740 range: Range<usize>,
3741 ) -> impl Iterator<Item = BracketMatch> + '_ {
3742 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3743 grammar.brackets_config.as_ref().map(|c| &c.query)
3744 });
3745 let configs = matches
3746 .grammars()
3747 .iter()
3748 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3749 .collect::<Vec<_>>();
3750
3751 iter::from_fn(move || {
3752 while let Some(mat) = matches.peek() {
3753 let mut open = None;
3754 let mut close = None;
3755 let config = &configs[mat.grammar_index];
3756 let pattern = &config.patterns[mat.pattern_index];
3757 for capture in mat.captures {
3758 if capture.index == config.open_capture_ix {
3759 open = Some(capture.node.byte_range());
3760 } else if capture.index == config.close_capture_ix {
3761 close = Some(capture.node.byte_range());
3762 }
3763 }
3764
3765 matches.advance();
3766
3767 let Some((open_range, close_range)) = open.zip(close) else {
3768 continue;
3769 };
3770
3771 let bracket_range = open_range.start..=close_range.end;
3772 if !bracket_range.overlaps(&range) {
3773 continue;
3774 }
3775
3776 return Some(BracketMatch {
3777 open_range,
3778 close_range,
3779 newline_only: pattern.newline_only,
3780 });
3781 }
3782 None
3783 })
3784 }
3785
3786 /// Returns bracket range pairs overlapping or adjacent to `range`
3787 pub fn bracket_ranges<T: ToOffset>(
3788 &self,
3789 range: Range<T>,
3790 ) -> impl Iterator<Item = BracketMatch> + '_ {
3791 // Find bracket pairs that *inclusively* contain the given range.
3792 let range = range.start.to_offset(self).saturating_sub(1)
3793 ..self.len().min(range.end.to_offset(self) + 1);
3794 self.all_bracket_ranges(range)
3795 .filter(|pair| !pair.newline_only)
3796 }
3797
3798 pub fn text_object_ranges<T: ToOffset>(
3799 &self,
3800 range: Range<T>,
3801 options: TreeSitterOptions,
3802 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3803 let range = range.start.to_offset(self).saturating_sub(1)
3804 ..self.len().min(range.end.to_offset(self) + 1);
3805
3806 let mut matches =
3807 self.syntax
3808 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3809 grammar.text_object_config.as_ref().map(|c| &c.query)
3810 });
3811
3812 let configs = matches
3813 .grammars()
3814 .iter()
3815 .map(|grammar| grammar.text_object_config.as_ref())
3816 .collect::<Vec<_>>();
3817
3818 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3819
3820 iter::from_fn(move || {
3821 loop {
3822 while let Some(capture) = captures.pop() {
3823 if capture.0.overlaps(&range) {
3824 return Some(capture);
3825 }
3826 }
3827
3828 let mat = matches.peek()?;
3829
3830 let Some(config) = configs[mat.grammar_index].as_ref() else {
3831 matches.advance();
3832 continue;
3833 };
3834
3835 for capture in mat.captures {
3836 let Some(ix) = config
3837 .text_objects_by_capture_ix
3838 .binary_search_by_key(&capture.index, |e| e.0)
3839 .ok()
3840 else {
3841 continue;
3842 };
3843 let text_object = config.text_objects_by_capture_ix[ix].1;
3844 let byte_range = capture.node.byte_range();
3845
3846 let mut found = false;
3847 for (range, existing) in captures.iter_mut() {
3848 if existing == &text_object {
3849 range.start = range.start.min(byte_range.start);
3850 range.end = range.end.max(byte_range.end);
3851 found = true;
3852 break;
3853 }
3854 }
3855
3856 if !found {
3857 captures.push((byte_range, text_object));
3858 }
3859 }
3860
3861 matches.advance();
3862 }
3863 })
3864 }
3865
3866 /// Returns enclosing bracket ranges containing the given range
3867 pub fn enclosing_bracket_ranges<T: ToOffset>(
3868 &self,
3869 range: Range<T>,
3870 ) -> impl Iterator<Item = BracketMatch> + '_ {
3871 let range = range.start.to_offset(self)..range.end.to_offset(self);
3872
3873 self.bracket_ranges(range.clone()).filter(move |pair| {
3874 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3875 })
3876 }
3877
3878 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3879 ///
3880 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3881 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3882 &self,
3883 range: Range<T>,
3884 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3885 ) -> Option<(Range<usize>, Range<usize>)> {
3886 let range = range.start.to_offset(self)..range.end.to_offset(self);
3887
3888 // Get the ranges of the innermost pair of brackets.
3889 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3890
3891 for pair in self.enclosing_bracket_ranges(range.clone()) {
3892 if let Some(range_filter) = range_filter {
3893 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3894 continue;
3895 }
3896 }
3897
3898 let len = pair.close_range.end - pair.open_range.start;
3899
3900 if let Some((existing_open, existing_close)) = &result {
3901 let existing_len = existing_close.end - existing_open.start;
3902 if len > existing_len {
3903 continue;
3904 }
3905 }
3906
3907 result = Some((pair.open_range, pair.close_range));
3908 }
3909
3910 result
3911 }
3912
3913 /// Returns anchor ranges for any matches of the redaction query.
3914 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3915 /// will be run on the relevant section of the buffer.
3916 pub fn redacted_ranges<T: ToOffset>(
3917 &self,
3918 range: Range<T>,
3919 ) -> impl Iterator<Item = Range<usize>> + '_ {
3920 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3921 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3922 grammar
3923 .redactions_config
3924 .as_ref()
3925 .map(|config| &config.query)
3926 });
3927
3928 let configs = syntax_matches
3929 .grammars()
3930 .iter()
3931 .map(|grammar| grammar.redactions_config.as_ref())
3932 .collect::<Vec<_>>();
3933
3934 iter::from_fn(move || {
3935 let redacted_range = syntax_matches
3936 .peek()
3937 .and_then(|mat| {
3938 configs[mat.grammar_index].and_then(|config| {
3939 mat.captures
3940 .iter()
3941 .find(|capture| capture.index == config.redaction_capture_ix)
3942 })
3943 })
3944 .map(|mat| mat.node.byte_range());
3945 syntax_matches.advance();
3946 redacted_range
3947 })
3948 }
3949
3950 pub fn injections_intersecting_range<T: ToOffset>(
3951 &self,
3952 range: Range<T>,
3953 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3954 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3955
3956 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3957 grammar
3958 .injection_config
3959 .as_ref()
3960 .map(|config| &config.query)
3961 });
3962
3963 let configs = syntax_matches
3964 .grammars()
3965 .iter()
3966 .map(|grammar| grammar.injection_config.as_ref())
3967 .collect::<Vec<_>>();
3968
3969 iter::from_fn(move || {
3970 let ranges = syntax_matches.peek().and_then(|mat| {
3971 let config = &configs[mat.grammar_index]?;
3972 let content_capture_range = mat.captures.iter().find_map(|capture| {
3973 if capture.index == config.content_capture_ix {
3974 Some(capture.node.byte_range())
3975 } else {
3976 None
3977 }
3978 })?;
3979 let language = self.language_at(content_capture_range.start)?;
3980 Some((content_capture_range, language))
3981 });
3982 syntax_matches.advance();
3983 ranges
3984 })
3985 }
3986
3987 pub fn runnable_ranges(
3988 &self,
3989 offset_range: Range<usize>,
3990 ) -> impl Iterator<Item = RunnableRange> + '_ {
3991 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3992 grammar.runnable_config.as_ref().map(|config| &config.query)
3993 });
3994
3995 let test_configs = syntax_matches
3996 .grammars()
3997 .iter()
3998 .map(|grammar| grammar.runnable_config.as_ref())
3999 .collect::<Vec<_>>();
4000
4001 iter::from_fn(move || {
4002 loop {
4003 let mat = syntax_matches.peek()?;
4004
4005 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4006 let mut run_range = None;
4007 let full_range = mat.captures.iter().fold(
4008 Range {
4009 start: usize::MAX,
4010 end: 0,
4011 },
4012 |mut acc, next| {
4013 let byte_range = next.node.byte_range();
4014 if acc.start > byte_range.start {
4015 acc.start = byte_range.start;
4016 }
4017 if acc.end < byte_range.end {
4018 acc.end = byte_range.end;
4019 }
4020 acc
4021 },
4022 );
4023 if full_range.start > full_range.end {
4024 // We did not find a full spanning range of this match.
4025 return None;
4026 }
4027 let extra_captures: SmallVec<[_; 1]> =
4028 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4029 test_configs
4030 .extra_captures
4031 .get(capture.index as usize)
4032 .cloned()
4033 .and_then(|tag_name| match tag_name {
4034 RunnableCapture::Named(name) => {
4035 Some((capture.node.byte_range(), name))
4036 }
4037 RunnableCapture::Run => {
4038 let _ = run_range.insert(capture.node.byte_range());
4039 None
4040 }
4041 })
4042 }));
4043 let run_range = run_range?;
4044 let tags = test_configs
4045 .query
4046 .property_settings(mat.pattern_index)
4047 .iter()
4048 .filter_map(|property| {
4049 if *property.key == *"tag" {
4050 property
4051 .value
4052 .as_ref()
4053 .map(|value| RunnableTag(value.to_string().into()))
4054 } else {
4055 None
4056 }
4057 })
4058 .collect();
4059 let extra_captures = extra_captures
4060 .into_iter()
4061 .map(|(range, name)| {
4062 (
4063 name.to_string(),
4064 self.text_for_range(range.clone()).collect::<String>(),
4065 )
4066 })
4067 .collect();
4068 // All tags should have the same range.
4069 Some(RunnableRange {
4070 run_range,
4071 full_range,
4072 runnable: Runnable {
4073 tags,
4074 language: mat.language,
4075 buffer: self.remote_id(),
4076 },
4077 extra_captures,
4078 buffer_id: self.remote_id(),
4079 })
4080 });
4081
4082 syntax_matches.advance();
4083 if test_range.is_some() {
4084 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4085 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4086 return test_range;
4087 }
4088 }
4089 })
4090 }
4091
4092 /// Returns selections for remote peers intersecting the given range.
4093 #[allow(clippy::type_complexity)]
4094 pub fn selections_in_range(
4095 &self,
4096 range: Range<Anchor>,
4097 include_local: bool,
4098 ) -> impl Iterator<
4099 Item = (
4100 ReplicaId,
4101 bool,
4102 CursorShape,
4103 impl Iterator<Item = &Selection<Anchor>> + '_,
4104 ),
4105 > + '_ {
4106 self.remote_selections
4107 .iter()
4108 .filter(move |(replica_id, set)| {
4109 (include_local || **replica_id != self.text.replica_id())
4110 && !set.selections.is_empty()
4111 })
4112 .map(move |(replica_id, set)| {
4113 let start_ix = match set.selections.binary_search_by(|probe| {
4114 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4115 }) {
4116 Ok(ix) | Err(ix) => ix,
4117 };
4118 let end_ix = match set.selections.binary_search_by(|probe| {
4119 probe.start.cmp(&range.end, self).then(Ordering::Less)
4120 }) {
4121 Ok(ix) | Err(ix) => ix,
4122 };
4123
4124 (
4125 *replica_id,
4126 set.line_mode,
4127 set.cursor_shape,
4128 set.selections[start_ix..end_ix].iter(),
4129 )
4130 })
4131 }
4132
4133 /// Returns if the buffer contains any diagnostics.
4134 pub fn has_diagnostics(&self) -> bool {
4135 !self.diagnostics.is_empty()
4136 }
4137
4138 /// Returns all the diagnostics intersecting the given range.
4139 pub fn diagnostics_in_range<'a, T, O>(
4140 &'a self,
4141 search_range: Range<T>,
4142 reversed: bool,
4143 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4144 where
4145 T: 'a + Clone + ToOffset,
4146 O: 'a + FromAnchor,
4147 {
4148 let mut iterators: Vec<_> = self
4149 .diagnostics
4150 .iter()
4151 .map(|(_, collection)| {
4152 collection
4153 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4154 .peekable()
4155 })
4156 .collect();
4157
4158 std::iter::from_fn(move || {
4159 let (next_ix, _) = iterators
4160 .iter_mut()
4161 .enumerate()
4162 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4163 .min_by(|(_, a), (_, b)| {
4164 let cmp = a
4165 .range
4166 .start
4167 .cmp(&b.range.start, self)
4168 // when range is equal, sort by diagnostic severity
4169 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4170 // and stabilize order with group_id
4171 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4172 if reversed { cmp.reverse() } else { cmp }
4173 })?;
4174 iterators[next_ix]
4175 .next()
4176 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4177 diagnostic,
4178 range: FromAnchor::from_anchor(&range.start, self)
4179 ..FromAnchor::from_anchor(&range.end, self),
4180 })
4181 })
4182 }
4183
4184 /// Returns all the diagnostic groups associated with the given
4185 /// language server ID. If no language server ID is provided,
4186 /// all diagnostics groups are returned.
4187 pub fn diagnostic_groups(
4188 &self,
4189 language_server_id: Option<LanguageServerId>,
4190 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4191 let mut groups = Vec::new();
4192
4193 if let Some(language_server_id) = language_server_id {
4194 if let Ok(ix) = self
4195 .diagnostics
4196 .binary_search_by_key(&language_server_id, |e| e.0)
4197 {
4198 self.diagnostics[ix]
4199 .1
4200 .groups(language_server_id, &mut groups, self);
4201 }
4202 } else {
4203 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4204 diagnostics.groups(*language_server_id, &mut groups, self);
4205 }
4206 }
4207
4208 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4209 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4210 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4211 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4212 });
4213
4214 groups
4215 }
4216
4217 /// Returns an iterator over the diagnostics for the given group.
4218 pub fn diagnostic_group<O>(
4219 &self,
4220 group_id: usize,
4221 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4222 where
4223 O: FromAnchor + 'static,
4224 {
4225 self.diagnostics
4226 .iter()
4227 .flat_map(move |(_, set)| set.group(group_id, self))
4228 }
4229
4230 /// An integer version number that accounts for all updates besides
4231 /// the buffer's text itself (which is versioned via a version vector).
4232 pub fn non_text_state_update_count(&self) -> usize {
4233 self.non_text_state_update_count
4234 }
4235
4236 /// Returns a snapshot of underlying file.
4237 pub fn file(&self) -> Option<&Arc<dyn File>> {
4238 self.file.as_ref()
4239 }
4240
4241 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4242 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4243 if let Some(file) = self.file() {
4244 if file.path().file_name().is_none() || include_root {
4245 Some(file.full_path(cx))
4246 } else {
4247 Some(file.path().to_path_buf())
4248 }
4249 } else {
4250 None
4251 }
4252 }
4253
4254 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4255 let query_str = query.fuzzy_contents;
4256 if query_str.map_or(false, |query| query.is_empty()) {
4257 return BTreeMap::default();
4258 }
4259
4260 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4261 language,
4262 override_id: None,
4263 }));
4264
4265 let mut query_ix = 0;
4266 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4267 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4268
4269 let mut words = BTreeMap::default();
4270 let mut current_word_start_ix = None;
4271 let mut chunk_ix = query.range.start;
4272 for chunk in self.chunks(query.range, false) {
4273 for (i, c) in chunk.text.char_indices() {
4274 let ix = chunk_ix + i;
4275 if classifier.is_word(c) {
4276 if current_word_start_ix.is_none() {
4277 current_word_start_ix = Some(ix);
4278 }
4279
4280 if let Some(query_chars) = &query_chars {
4281 if query_ix < query_len {
4282 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4283 query_ix += 1;
4284 }
4285 }
4286 }
4287 continue;
4288 } else if let Some(word_start) = current_word_start_ix.take() {
4289 if query_ix == query_len {
4290 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4291 let mut word_text = self.text_for_range(word_start..ix).peekable();
4292 let first_char = word_text
4293 .peek()
4294 .and_then(|first_chunk| first_chunk.chars().next());
4295 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4296 if !query.skip_digits
4297 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4298 {
4299 words.insert(word_text.collect(), word_range);
4300 }
4301 }
4302 }
4303 query_ix = 0;
4304 }
4305 chunk_ix += chunk.text.len();
4306 }
4307
4308 words
4309 }
4310}
4311
4312pub struct WordsQuery<'a> {
4313 /// Only returns words with all chars from the fuzzy string in them.
4314 pub fuzzy_contents: Option<&'a str>,
4315 /// Skips words that start with a digit.
4316 pub skip_digits: bool,
4317 /// Buffer offset range, to look for words.
4318 pub range: Range<usize>,
4319}
4320
4321fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4322 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4323}
4324
4325fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4326 let mut result = IndentSize::spaces(0);
4327 for c in text {
4328 let kind = match c {
4329 ' ' => IndentKind::Space,
4330 '\t' => IndentKind::Tab,
4331 _ => break,
4332 };
4333 if result.len == 0 {
4334 result.kind = kind;
4335 }
4336 result.len += 1;
4337 }
4338 result
4339}
4340
4341impl Clone for BufferSnapshot {
4342 fn clone(&self) -> Self {
4343 Self {
4344 text: self.text.clone(),
4345 syntax: self.syntax.clone(),
4346 file: self.file.clone(),
4347 remote_selections: self.remote_selections.clone(),
4348 diagnostics: self.diagnostics.clone(),
4349 language: self.language.clone(),
4350 non_text_state_update_count: self.non_text_state_update_count,
4351 }
4352 }
4353}
4354
4355impl Deref for BufferSnapshot {
4356 type Target = text::BufferSnapshot;
4357
4358 fn deref(&self) -> &Self::Target {
4359 &self.text
4360 }
4361}
4362
4363unsafe impl Send for BufferChunks<'_> {}
4364
4365impl<'a> BufferChunks<'a> {
4366 pub(crate) fn new(
4367 text: &'a Rope,
4368 range: Range<usize>,
4369 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4370 diagnostics: bool,
4371 buffer_snapshot: Option<&'a BufferSnapshot>,
4372 ) -> Self {
4373 let mut highlights = None;
4374 if let Some((captures, highlight_maps)) = syntax {
4375 highlights = Some(BufferChunkHighlights {
4376 captures,
4377 next_capture: None,
4378 stack: Default::default(),
4379 highlight_maps,
4380 })
4381 }
4382
4383 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4384 let chunks = text.chunks_in_range(range.clone());
4385
4386 let mut this = BufferChunks {
4387 range,
4388 buffer_snapshot,
4389 chunks,
4390 diagnostic_endpoints,
4391 error_depth: 0,
4392 warning_depth: 0,
4393 information_depth: 0,
4394 hint_depth: 0,
4395 unnecessary_depth: 0,
4396 underline: true,
4397 highlights,
4398 };
4399 this.initialize_diagnostic_endpoints();
4400 this
4401 }
4402
4403 /// Seeks to the given byte offset in the buffer.
4404 pub fn seek(&mut self, range: Range<usize>) {
4405 let old_range = std::mem::replace(&mut self.range, range.clone());
4406 self.chunks.set_range(self.range.clone());
4407 if let Some(highlights) = self.highlights.as_mut() {
4408 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4409 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4410 highlights
4411 .stack
4412 .retain(|(end_offset, _)| *end_offset > range.start);
4413 if let Some(capture) = &highlights.next_capture {
4414 if range.start >= capture.node.start_byte() {
4415 let next_capture_end = capture.node.end_byte();
4416 if range.start < next_capture_end {
4417 highlights.stack.push((
4418 next_capture_end,
4419 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4420 ));
4421 }
4422 highlights.next_capture.take();
4423 }
4424 }
4425 } else if let Some(snapshot) = self.buffer_snapshot {
4426 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4427 *highlights = BufferChunkHighlights {
4428 captures,
4429 next_capture: None,
4430 stack: Default::default(),
4431 highlight_maps,
4432 };
4433 } else {
4434 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4435 // Seeking such BufferChunks is not supported.
4436 debug_assert!(
4437 false,
4438 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4439 );
4440 }
4441
4442 highlights.captures.set_byte_range(self.range.clone());
4443 self.initialize_diagnostic_endpoints();
4444 }
4445 }
4446
4447 fn initialize_diagnostic_endpoints(&mut self) {
4448 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4449 if let Some(buffer) = self.buffer_snapshot {
4450 let mut diagnostic_endpoints = Vec::new();
4451 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4452 diagnostic_endpoints.push(DiagnosticEndpoint {
4453 offset: entry.range.start,
4454 is_start: true,
4455 severity: entry.diagnostic.severity,
4456 is_unnecessary: entry.diagnostic.is_unnecessary,
4457 underline: entry.diagnostic.underline,
4458 });
4459 diagnostic_endpoints.push(DiagnosticEndpoint {
4460 offset: entry.range.end,
4461 is_start: false,
4462 severity: entry.diagnostic.severity,
4463 is_unnecessary: entry.diagnostic.is_unnecessary,
4464 underline: entry.diagnostic.underline,
4465 });
4466 }
4467 diagnostic_endpoints
4468 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4469 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4470 self.hint_depth = 0;
4471 self.error_depth = 0;
4472 self.warning_depth = 0;
4473 self.information_depth = 0;
4474 }
4475 }
4476 }
4477
4478 /// The current byte offset in the buffer.
4479 pub fn offset(&self) -> usize {
4480 self.range.start
4481 }
4482
4483 pub fn range(&self) -> Range<usize> {
4484 self.range.clone()
4485 }
4486
4487 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4488 let depth = match endpoint.severity {
4489 DiagnosticSeverity::ERROR => &mut self.error_depth,
4490 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4491 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4492 DiagnosticSeverity::HINT => &mut self.hint_depth,
4493 _ => return,
4494 };
4495 if endpoint.is_start {
4496 *depth += 1;
4497 } else {
4498 *depth -= 1;
4499 }
4500
4501 if endpoint.is_unnecessary {
4502 if endpoint.is_start {
4503 self.unnecessary_depth += 1;
4504 } else {
4505 self.unnecessary_depth -= 1;
4506 }
4507 }
4508 }
4509
4510 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4511 if self.error_depth > 0 {
4512 Some(DiagnosticSeverity::ERROR)
4513 } else if self.warning_depth > 0 {
4514 Some(DiagnosticSeverity::WARNING)
4515 } else if self.information_depth > 0 {
4516 Some(DiagnosticSeverity::INFORMATION)
4517 } else if self.hint_depth > 0 {
4518 Some(DiagnosticSeverity::HINT)
4519 } else {
4520 None
4521 }
4522 }
4523
4524 fn current_code_is_unnecessary(&self) -> bool {
4525 self.unnecessary_depth > 0
4526 }
4527}
4528
4529impl<'a> Iterator for BufferChunks<'a> {
4530 type Item = Chunk<'a>;
4531
4532 fn next(&mut self) -> Option<Self::Item> {
4533 let mut next_capture_start = usize::MAX;
4534 let mut next_diagnostic_endpoint = usize::MAX;
4535
4536 if let Some(highlights) = self.highlights.as_mut() {
4537 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4538 if *parent_capture_end <= self.range.start {
4539 highlights.stack.pop();
4540 } else {
4541 break;
4542 }
4543 }
4544
4545 if highlights.next_capture.is_none() {
4546 highlights.next_capture = highlights.captures.next();
4547 }
4548
4549 while let Some(capture) = highlights.next_capture.as_ref() {
4550 if self.range.start < capture.node.start_byte() {
4551 next_capture_start = capture.node.start_byte();
4552 break;
4553 } else {
4554 let highlight_id =
4555 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4556 highlights
4557 .stack
4558 .push((capture.node.end_byte(), highlight_id));
4559 highlights.next_capture = highlights.captures.next();
4560 }
4561 }
4562 }
4563
4564 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4565 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4566 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4567 if endpoint.offset <= self.range.start {
4568 self.update_diagnostic_depths(endpoint);
4569 diagnostic_endpoints.next();
4570 self.underline = endpoint.underline;
4571 } else {
4572 next_diagnostic_endpoint = endpoint.offset;
4573 break;
4574 }
4575 }
4576 }
4577 self.diagnostic_endpoints = diagnostic_endpoints;
4578
4579 if let Some(chunk) = self.chunks.peek() {
4580 let chunk_start = self.range.start;
4581 let mut chunk_end = (self.chunks.offset() + chunk.len())
4582 .min(next_capture_start)
4583 .min(next_diagnostic_endpoint);
4584 let mut highlight_id = None;
4585 if let Some(highlights) = self.highlights.as_ref() {
4586 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4587 chunk_end = chunk_end.min(*parent_capture_end);
4588 highlight_id = Some(*parent_highlight_id);
4589 }
4590 }
4591
4592 let slice =
4593 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4594 self.range.start = chunk_end;
4595 if self.range.start == self.chunks.offset() + chunk.len() {
4596 self.chunks.next().unwrap();
4597 }
4598
4599 Some(Chunk {
4600 text: slice,
4601 syntax_highlight_id: highlight_id,
4602 underline: self.underline,
4603 diagnostic_severity: self.current_diagnostic_severity(),
4604 is_unnecessary: self.current_code_is_unnecessary(),
4605 ..Chunk::default()
4606 })
4607 } else {
4608 None
4609 }
4610 }
4611}
4612
4613impl operation_queue::Operation for Operation {
4614 fn lamport_timestamp(&self) -> clock::Lamport {
4615 match self {
4616 Operation::Buffer(_) => {
4617 unreachable!("buffer operations should never be deferred at this layer")
4618 }
4619 Operation::UpdateDiagnostics {
4620 lamport_timestamp, ..
4621 }
4622 | Operation::UpdateSelections {
4623 lamport_timestamp, ..
4624 }
4625 | Operation::UpdateCompletionTriggers {
4626 lamport_timestamp, ..
4627 } => *lamport_timestamp,
4628 }
4629 }
4630}
4631
4632impl Default for Diagnostic {
4633 fn default() -> Self {
4634 Self {
4635 source: Default::default(),
4636 code: None,
4637 code_description: None,
4638 severity: DiagnosticSeverity::ERROR,
4639 message: Default::default(),
4640 markdown: None,
4641 group_id: 0,
4642 is_primary: false,
4643 is_disk_based: false,
4644 is_unnecessary: false,
4645 underline: true,
4646 data: None,
4647 }
4648 }
4649}
4650
4651impl IndentSize {
4652 /// Returns an [`IndentSize`] representing the given spaces.
4653 pub fn spaces(len: u32) -> Self {
4654 Self {
4655 len,
4656 kind: IndentKind::Space,
4657 }
4658 }
4659
4660 /// Returns an [`IndentSize`] representing a tab.
4661 pub fn tab() -> Self {
4662 Self {
4663 len: 1,
4664 kind: IndentKind::Tab,
4665 }
4666 }
4667
4668 /// An iterator over the characters represented by this [`IndentSize`].
4669 pub fn chars(&self) -> impl Iterator<Item = char> {
4670 iter::repeat(self.char()).take(self.len as usize)
4671 }
4672
4673 /// The character representation of this [`IndentSize`].
4674 pub fn char(&self) -> char {
4675 match self.kind {
4676 IndentKind::Space => ' ',
4677 IndentKind::Tab => '\t',
4678 }
4679 }
4680
4681 /// Consumes the current [`IndentSize`] and returns a new one that has
4682 /// been shrunk or enlarged by the given size along the given direction.
4683 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4684 match direction {
4685 Ordering::Less => {
4686 if self.kind == size.kind && self.len >= size.len {
4687 self.len -= size.len;
4688 }
4689 }
4690 Ordering::Equal => {}
4691 Ordering::Greater => {
4692 if self.len == 0 {
4693 self = size;
4694 } else if self.kind == size.kind {
4695 self.len += size.len;
4696 }
4697 }
4698 }
4699 self
4700 }
4701
4702 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4703 match self.kind {
4704 IndentKind::Space => self.len as usize,
4705 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4706 }
4707 }
4708}
4709
4710#[cfg(any(test, feature = "test-support"))]
4711pub struct TestFile {
4712 pub path: Arc<Path>,
4713 pub root_name: String,
4714 pub local_root: Option<PathBuf>,
4715}
4716
4717#[cfg(any(test, feature = "test-support"))]
4718impl File for TestFile {
4719 fn path(&self) -> &Arc<Path> {
4720 &self.path
4721 }
4722
4723 fn full_path(&self, _: &gpui::App) -> PathBuf {
4724 PathBuf::from(&self.root_name).join(self.path.as_ref())
4725 }
4726
4727 fn as_local(&self) -> Option<&dyn LocalFile> {
4728 if self.local_root.is_some() {
4729 Some(self)
4730 } else {
4731 None
4732 }
4733 }
4734
4735 fn disk_state(&self) -> DiskState {
4736 unimplemented!()
4737 }
4738
4739 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4740 self.path().file_name().unwrap_or(self.root_name.as_ref())
4741 }
4742
4743 fn worktree_id(&self, _: &App) -> WorktreeId {
4744 WorktreeId::from_usize(0)
4745 }
4746
4747 fn to_proto(&self, _: &App) -> rpc::proto::File {
4748 unimplemented!()
4749 }
4750
4751 fn is_private(&self) -> bool {
4752 false
4753 }
4754}
4755
4756#[cfg(any(test, feature = "test-support"))]
4757impl LocalFile for TestFile {
4758 fn abs_path(&self, _cx: &App) -> PathBuf {
4759 PathBuf::from(self.local_root.as_ref().unwrap())
4760 .join(&self.root_name)
4761 .join(self.path.as_ref())
4762 }
4763
4764 fn load(&self, _cx: &App) -> Task<Result<String>> {
4765 unimplemented!()
4766 }
4767
4768 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4769 unimplemented!()
4770 }
4771}
4772
4773pub(crate) fn contiguous_ranges(
4774 values: impl Iterator<Item = u32>,
4775 max_len: usize,
4776) -> impl Iterator<Item = Range<u32>> {
4777 let mut values = values;
4778 let mut current_range: Option<Range<u32>> = None;
4779 std::iter::from_fn(move || {
4780 loop {
4781 if let Some(value) = values.next() {
4782 if let Some(range) = &mut current_range {
4783 if value == range.end && range.len() < max_len {
4784 range.end += 1;
4785 continue;
4786 }
4787 }
4788
4789 let prev_range = current_range.clone();
4790 current_range = Some(value..(value + 1));
4791 if prev_range.is_some() {
4792 return prev_range;
4793 }
4794 } else {
4795 return current_range.take();
4796 }
4797 }
4798 })
4799}
4800
4801#[derive(Default, Debug)]
4802pub struct CharClassifier {
4803 scope: Option<LanguageScope>,
4804 for_completion: bool,
4805 ignore_punctuation: bool,
4806}
4807
4808impl CharClassifier {
4809 pub fn new(scope: Option<LanguageScope>) -> Self {
4810 Self {
4811 scope,
4812 for_completion: false,
4813 ignore_punctuation: false,
4814 }
4815 }
4816
4817 pub fn for_completion(self, for_completion: bool) -> Self {
4818 Self {
4819 for_completion,
4820 ..self
4821 }
4822 }
4823
4824 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4825 Self {
4826 ignore_punctuation,
4827 ..self
4828 }
4829 }
4830
4831 pub fn is_whitespace(&self, c: char) -> bool {
4832 self.kind(c) == CharKind::Whitespace
4833 }
4834
4835 pub fn is_word(&self, c: char) -> bool {
4836 self.kind(c) == CharKind::Word
4837 }
4838
4839 pub fn is_punctuation(&self, c: char) -> bool {
4840 self.kind(c) == CharKind::Punctuation
4841 }
4842
4843 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4844 if c.is_alphanumeric() || c == '_' {
4845 return CharKind::Word;
4846 }
4847
4848 if let Some(scope) = &self.scope {
4849 let characters = if self.for_completion {
4850 scope.completion_query_characters()
4851 } else {
4852 scope.word_characters()
4853 };
4854 if let Some(characters) = characters {
4855 if characters.contains(&c) {
4856 return CharKind::Word;
4857 }
4858 }
4859 }
4860
4861 if c.is_whitespace() {
4862 return CharKind::Whitespace;
4863 }
4864
4865 if ignore_punctuation {
4866 CharKind::Word
4867 } else {
4868 CharKind::Punctuation
4869 }
4870 }
4871
4872 pub fn kind(&self, c: char) -> CharKind {
4873 self.kind_with(c, self.ignore_punctuation)
4874 }
4875}
4876
4877/// Find all of the ranges of whitespace that occur at the ends of lines
4878/// in the given rope.
4879///
4880/// This could also be done with a regex search, but this implementation
4881/// avoids copying text.
4882pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4883 let mut ranges = Vec::new();
4884
4885 let mut offset = 0;
4886 let mut prev_chunk_trailing_whitespace_range = 0..0;
4887 for chunk in rope.chunks() {
4888 let mut prev_line_trailing_whitespace_range = 0..0;
4889 for (i, line) in chunk.split('\n').enumerate() {
4890 let line_end_offset = offset + line.len();
4891 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4892 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4893
4894 if i == 0 && trimmed_line_len == 0 {
4895 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4896 }
4897 if !prev_line_trailing_whitespace_range.is_empty() {
4898 ranges.push(prev_line_trailing_whitespace_range);
4899 }
4900
4901 offset = line_end_offset + 1;
4902 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4903 }
4904
4905 offset -= 1;
4906 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4907 }
4908
4909 if !prev_chunk_trailing_whitespace_range.is_empty() {
4910 ranges.push(prev_chunk_trailing_whitespace_range);
4911 }
4912
4913 ranges
4914}