1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
233 pub data: Option<Value>,
234 /// Whether to underline the corresponding text range in the editor.
235 pub underline: bool,
236}
237
238/// An operation used to synchronize this buffer with its other replicas.
239#[derive(Clone, Debug, PartialEq)]
240pub enum Operation {
241 /// A text operation.
242 Buffer(text::Operation),
243
244 /// An update to the buffer's diagnostics.
245 UpdateDiagnostics {
246 /// The id of the language server that produced the new diagnostics.
247 server_id: LanguageServerId,
248 /// The diagnostics.
249 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
250 /// The buffer's lamport timestamp.
251 lamport_timestamp: clock::Lamport,
252 },
253
254 /// An update to the most recent selections in this buffer.
255 UpdateSelections {
256 /// The selections.
257 selections: Arc<[Selection<Anchor>]>,
258 /// The buffer's lamport timestamp.
259 lamport_timestamp: clock::Lamport,
260 /// Whether the selections are in 'line mode'.
261 line_mode: bool,
262 /// The [`CursorShape`] associated with these selections.
263 cursor_shape: CursorShape,
264 },
265
266 /// An update to the characters that should trigger autocompletion
267 /// for this buffer.
268 UpdateCompletionTriggers {
269 /// The characters that trigger autocompletion.
270 triggers: Vec<String>,
271 /// The buffer's lamport timestamp.
272 lamport_timestamp: clock::Lamport,
273 /// The language server ID.
274 server_id: LanguageServerId,
275 },
276}
277
278/// An event that occurs in a buffer.
279#[derive(Clone, Debug, PartialEq)]
280pub enum BufferEvent {
281 /// The buffer was changed in a way that must be
282 /// propagated to its other replicas.
283 Operation {
284 operation: Operation,
285 is_local: bool,
286 },
287 /// The buffer was edited.
288 Edited,
289 /// The buffer's `dirty` bit changed.
290 DirtyChanged,
291 /// The buffer was saved.
292 Saved,
293 /// The buffer's file was changed on disk.
294 FileHandleChanged,
295 /// The buffer was reloaded.
296 Reloaded,
297 /// The buffer is in need of a reload
298 ReloadNeeded,
299 /// The buffer's language was changed.
300 LanguageChanged,
301 /// The buffer's syntax trees were updated.
302 Reparsed,
303 /// The buffer's diagnostics were updated.
304 DiagnosticsUpdated,
305 /// The buffer gained or lost editing capabilities.
306 CapabilityChanged,
307 /// The buffer was explicitly requested to close.
308 Closed,
309 /// The buffer was discarded when closing.
310 Discarded,
311}
312
313/// The file associated with a buffer.
314pub trait File: Send + Sync + Any {
315 /// Returns the [`LocalFile`] associated with this file, if the
316 /// file is local.
317 fn as_local(&self) -> Option<&dyn LocalFile>;
318
319 /// Returns whether this file is local.
320 fn is_local(&self) -> bool {
321 self.as_local().is_some()
322 }
323
324 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
325 /// only available in some states, such as modification time.
326 fn disk_state(&self) -> DiskState;
327
328 /// Returns the path of this file relative to the worktree's root directory.
329 fn path(&self) -> &Arc<Path>;
330
331 /// Returns the path of this file relative to the worktree's parent directory (this means it
332 /// includes the name of the worktree's root folder).
333 fn full_path(&self, cx: &App) -> PathBuf;
334
335 /// Returns the last component of this handle's absolute path. If this handle refers to the root
336 /// of its worktree, then this method will return the name of the worktree itself.
337 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
338
339 /// Returns the id of the worktree to which this file belongs.
340 ///
341 /// This is needed for looking up project-specific settings.
342 fn worktree_id(&self, cx: &App) -> WorktreeId;
343
344 /// Converts this file into a protobuf message.
345 fn to_proto(&self, cx: &App) -> rpc::proto::File;
346
347 /// Return whether Zed considers this to be a private file.
348 fn is_private(&self) -> bool;
349}
350
351/// The file's storage status - whether it's stored (`Present`), and if so when it was last
352/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
353/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
354/// indicator for new files.
355#[derive(Copy, Clone, Debug, PartialEq)]
356pub enum DiskState {
357 /// File created in Zed that has not been saved.
358 New,
359 /// File present on the filesystem.
360 Present { mtime: MTime },
361 /// Deleted file that was previously present.
362 Deleted,
363}
364
365impl DiskState {
366 /// Returns the file's last known modification time on disk.
367 pub fn mtime(self) -> Option<MTime> {
368 match self {
369 DiskState::New => None,
370 DiskState::Present { mtime } => Some(mtime),
371 DiskState::Deleted => None,
372 }
373 }
374
375 pub fn exists(&self) -> bool {
376 match self {
377 DiskState::New => false,
378 DiskState::Present { .. } => true,
379 DiskState::Deleted => false,
380 }
381 }
382}
383
384/// The file associated with a buffer, in the case where the file is on the local disk.
385pub trait LocalFile: File {
386 /// Returns the absolute path of this file
387 fn abs_path(&self, cx: &App) -> PathBuf;
388
389 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
390 fn load(&self, cx: &App) -> Task<Result<String>>;
391
392 /// Loads the file's contents from disk.
393 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
394}
395
396/// The auto-indent behavior associated with an editing operation.
397/// For some editing operations, each affected line of text has its
398/// indentation recomputed. For other operations, the entire block
399/// of edited text is adjusted uniformly.
400#[derive(Clone, Debug)]
401pub enum AutoindentMode {
402 /// Indent each line of inserted text.
403 EachLine,
404 /// Apply the same indentation adjustment to all of the lines
405 /// in a given insertion.
406 Block {
407 /// The original indentation column of the first line of each
408 /// insertion, if it has been copied.
409 ///
410 /// Knowing this makes it possible to preserve the relative indentation
411 /// of every line in the insertion from when it was copied.
412 ///
413 /// If the original indent column is `a`, and the first line of insertion
414 /// is then auto-indented to column `b`, then every other line of
415 /// the insertion will be auto-indented to column `b - a`
416 original_indent_columns: Vec<Option<u32>>,
417 },
418}
419
420#[derive(Clone)]
421struct AutoindentRequest {
422 before_edit: BufferSnapshot,
423 entries: Vec<AutoindentRequestEntry>,
424 is_block_mode: bool,
425 ignore_empty_lines: bool,
426}
427
428#[derive(Debug, Clone)]
429struct AutoindentRequestEntry {
430 /// A range of the buffer whose indentation should be adjusted.
431 range: Range<Anchor>,
432 /// Whether or not these lines should be considered brand new, for the
433 /// purpose of auto-indent. When text is not new, its indentation will
434 /// only be adjusted if the suggested indentation level has *changed*
435 /// since the edit was made.
436 first_line_is_new: bool,
437 indent_size: IndentSize,
438 original_indent_column: Option<u32>,
439}
440
441#[derive(Debug)]
442struct IndentSuggestion {
443 basis_row: u32,
444 delta: Ordering,
445 within_error: bool,
446}
447
448struct BufferChunkHighlights<'a> {
449 captures: SyntaxMapCaptures<'a>,
450 next_capture: Option<SyntaxMapCapture<'a>>,
451 stack: Vec<(usize, HighlightId)>,
452 highlight_maps: Vec<HighlightMap>,
453}
454
455/// An iterator that yields chunks of a buffer's text, along with their
456/// syntax highlights and diagnostic status.
457pub struct BufferChunks<'a> {
458 buffer_snapshot: Option<&'a BufferSnapshot>,
459 range: Range<usize>,
460 chunks: text::Chunks<'a>,
461 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
462 error_depth: usize,
463 warning_depth: usize,
464 information_depth: usize,
465 hint_depth: usize,
466 unnecessary_depth: usize,
467 underline: bool,
468 highlights: Option<BufferChunkHighlights<'a>>,
469}
470
471/// A chunk of a buffer's text, along with its syntax highlight and
472/// diagnostic status.
473#[derive(Clone, Debug, Default)]
474pub struct Chunk<'a> {
475 /// The text of the chunk.
476 pub text: &'a str,
477 /// The syntax highlighting style of the chunk.
478 pub syntax_highlight_id: Option<HighlightId>,
479 /// The highlight style that has been applied to this chunk in
480 /// the editor.
481 pub highlight_style: Option<HighlightStyle>,
482 /// The severity of diagnostic associated with this chunk, if any.
483 pub diagnostic_severity: Option<DiagnosticSeverity>,
484 /// Whether this chunk of text is marked as unnecessary.
485 pub is_unnecessary: bool,
486 /// Whether this chunk of text was originally a tab character.
487 pub is_tab: bool,
488 /// Whether this chunk of text was originally a tab character.
489 pub is_inlay: bool,
490 /// Whether to underline the corresponding text range in the editor.
491 pub underline: bool,
492}
493
494/// A set of edits to a given version of a buffer, computed asynchronously.
495#[derive(Debug)]
496pub struct Diff {
497 pub base_version: clock::Global,
498 pub line_ending: LineEnding,
499 pub edits: Vec<(Range<usize>, Arc<str>)>,
500}
501
502#[derive(Debug, Clone, Copy)]
503pub(crate) struct DiagnosticEndpoint {
504 offset: usize,
505 is_start: bool,
506 underline: bool,
507 severity: DiagnosticSeverity,
508 is_unnecessary: bool,
509}
510
511/// A class of characters, used for characterizing a run of text.
512#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
513pub enum CharKind {
514 /// Whitespace.
515 Whitespace,
516 /// Punctuation.
517 Punctuation,
518 /// Word.
519 Word,
520}
521
522/// A runnable is a set of data about a region that could be resolved into a task
523pub struct Runnable {
524 pub tags: SmallVec<[RunnableTag; 1]>,
525 pub language: Arc<Language>,
526 pub buffer: BufferId,
527}
528
529#[derive(Default, Clone, Debug)]
530pub struct HighlightedText {
531 pub text: SharedString,
532 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
533}
534
535#[derive(Default, Debug)]
536struct HighlightedTextBuilder {
537 pub text: String,
538 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
539}
540
541impl HighlightedText {
542 pub fn from_buffer_range<T: ToOffset>(
543 range: Range<T>,
544 snapshot: &text::BufferSnapshot,
545 syntax_snapshot: &SyntaxSnapshot,
546 override_style: Option<HighlightStyle>,
547 syntax_theme: &SyntaxTheme,
548 ) -> Self {
549 let mut highlighted_text = HighlightedTextBuilder::default();
550 highlighted_text.add_text_from_buffer_range(
551 range,
552 snapshot,
553 syntax_snapshot,
554 override_style,
555 syntax_theme,
556 );
557 highlighted_text.build()
558 }
559
560 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
561 gpui::StyledText::new(self.text.clone())
562 .with_default_highlights(default_style, self.highlights.iter().cloned())
563 }
564
565 /// Returns the first line without leading whitespace unless highlighted
566 /// and a boolean indicating if there are more lines after
567 pub fn first_line_preview(self) -> (Self, bool) {
568 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
569 let first_line = &self.text[..newline_ix];
570
571 // Trim leading whitespace, unless an edit starts prior to it.
572 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
573 if let Some((first_highlight_range, _)) = self.highlights.first() {
574 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
575 }
576
577 let preview_text = &first_line[preview_start_ix..];
578 let preview_highlights = self
579 .highlights
580 .into_iter()
581 .take_while(|(range, _)| range.start < newline_ix)
582 .filter_map(|(mut range, highlight)| {
583 range.start = range.start.saturating_sub(preview_start_ix);
584 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
585 if range.is_empty() {
586 None
587 } else {
588 Some((range, highlight))
589 }
590 });
591
592 let preview = Self {
593 text: SharedString::new(preview_text),
594 highlights: preview_highlights.collect(),
595 };
596
597 (preview, self.text.len() > newline_ix)
598 }
599}
600
601impl HighlightedTextBuilder {
602 pub fn build(self) -> HighlightedText {
603 HighlightedText {
604 text: self.text.into(),
605 highlights: self.highlights,
606 }
607 }
608
609 pub fn add_text_from_buffer_range<T: ToOffset>(
610 &mut self,
611 range: Range<T>,
612 snapshot: &text::BufferSnapshot,
613 syntax_snapshot: &SyntaxSnapshot,
614 override_style: Option<HighlightStyle>,
615 syntax_theme: &SyntaxTheme,
616 ) {
617 let range = range.to_offset(snapshot);
618 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
619 let start = self.text.len();
620 self.text.push_str(chunk.text);
621 let end = self.text.len();
622
623 if let Some(mut highlight_style) = chunk
624 .syntax_highlight_id
625 .and_then(|id| id.style(syntax_theme))
626 {
627 if let Some(override_style) = override_style {
628 highlight_style.highlight(override_style);
629 }
630 self.highlights.push((start..end, highlight_style));
631 } else if let Some(override_style) = override_style {
632 self.highlights.push((start..end, override_style));
633 }
634 }
635 }
636
637 fn highlighted_chunks<'a>(
638 range: Range<usize>,
639 snapshot: &'a text::BufferSnapshot,
640 syntax_snapshot: &'a SyntaxSnapshot,
641 ) -> BufferChunks<'a> {
642 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
643 grammar.highlights_query.as_ref()
644 });
645
646 let highlight_maps = captures
647 .grammars()
648 .iter()
649 .map(|grammar| grammar.highlight_map())
650 .collect();
651
652 BufferChunks::new(
653 snapshot.as_rope(),
654 range,
655 Some((captures, highlight_maps)),
656 false,
657 None,
658 )
659 }
660}
661
662#[derive(Clone)]
663pub struct EditPreview {
664 old_snapshot: text::BufferSnapshot,
665 applied_edits_snapshot: text::BufferSnapshot,
666 syntax_snapshot: SyntaxSnapshot,
667}
668
669impl EditPreview {
670 pub fn highlight_edits(
671 &self,
672 current_snapshot: &BufferSnapshot,
673 edits: &[(Range<Anchor>, String)],
674 include_deletions: bool,
675 cx: &App,
676 ) -> HighlightedText {
677 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
678 return HighlightedText::default();
679 };
680
681 let mut highlighted_text = HighlightedTextBuilder::default();
682
683 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
684
685 let insertion_highlight_style = HighlightStyle {
686 background_color: Some(cx.theme().status().created_background),
687 ..Default::default()
688 };
689 let deletion_highlight_style = HighlightStyle {
690 background_color: Some(cx.theme().status().deleted_background),
691 ..Default::default()
692 };
693 let syntax_theme = cx.theme().syntax();
694
695 for (range, edit_text) in edits {
696 let edit_new_end_in_preview_snapshot = range
697 .end
698 .bias_right(&self.old_snapshot)
699 .to_offset(&self.applied_edits_snapshot);
700 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
701
702 let unchanged_range_in_preview_snapshot =
703 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
704 if !unchanged_range_in_preview_snapshot.is_empty() {
705 highlighted_text.add_text_from_buffer_range(
706 unchanged_range_in_preview_snapshot,
707 &self.applied_edits_snapshot,
708 &self.syntax_snapshot,
709 None,
710 &syntax_theme,
711 );
712 }
713
714 let range_in_current_snapshot = range.to_offset(current_snapshot);
715 if include_deletions && !range_in_current_snapshot.is_empty() {
716 highlighted_text.add_text_from_buffer_range(
717 range_in_current_snapshot,
718 ¤t_snapshot.text,
719 ¤t_snapshot.syntax,
720 Some(deletion_highlight_style),
721 &syntax_theme,
722 );
723 }
724
725 if !edit_text.is_empty() {
726 highlighted_text.add_text_from_buffer_range(
727 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
728 &self.applied_edits_snapshot,
729 &self.syntax_snapshot,
730 Some(insertion_highlight_style),
731 &syntax_theme,
732 );
733 }
734
735 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
736 }
737
738 highlighted_text.add_text_from_buffer_range(
739 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
740 &self.applied_edits_snapshot,
741 &self.syntax_snapshot,
742 None,
743 &syntax_theme,
744 );
745
746 highlighted_text.build()
747 }
748
749 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
750 let (first, _) = edits.first()?;
751 let (last, _) = edits.last()?;
752
753 let start = first
754 .start
755 .bias_left(&self.old_snapshot)
756 .to_point(&self.applied_edits_snapshot);
757 let end = last
758 .end
759 .bias_right(&self.old_snapshot)
760 .to_point(&self.applied_edits_snapshot);
761
762 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
763 let range = Point::new(start.row, 0)
764 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
765
766 Some(range.to_offset(&self.applied_edits_snapshot))
767 }
768}
769
770#[derive(Clone, Debug, PartialEq, Eq)]
771pub struct BracketMatch {
772 pub open_range: Range<usize>,
773 pub close_range: Range<usize>,
774 pub newline_only: bool,
775}
776
777impl Buffer {
778 /// Create a new buffer with the given base text.
779 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
780 Self::build(
781 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
782 None,
783 Capability::ReadWrite,
784 )
785 }
786
787 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
788 pub fn local_normalized(
789 base_text_normalized: Rope,
790 line_ending: LineEnding,
791 cx: &Context<Self>,
792 ) -> Self {
793 Self::build(
794 TextBuffer::new_normalized(
795 0,
796 cx.entity_id().as_non_zero_u64().into(),
797 line_ending,
798 base_text_normalized,
799 ),
800 None,
801 Capability::ReadWrite,
802 )
803 }
804
805 /// Create a new buffer that is a replica of a remote buffer.
806 pub fn remote(
807 remote_id: BufferId,
808 replica_id: ReplicaId,
809 capability: Capability,
810 base_text: impl Into<String>,
811 ) -> Self {
812 Self::build(
813 TextBuffer::new(replica_id, remote_id, base_text.into()),
814 None,
815 capability,
816 )
817 }
818
819 /// Create a new buffer that is a replica of a remote buffer, populating its
820 /// state from the given protobuf message.
821 pub fn from_proto(
822 replica_id: ReplicaId,
823 capability: Capability,
824 message: proto::BufferState,
825 file: Option<Arc<dyn File>>,
826 ) -> Result<Self> {
827 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
828 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
829 let mut this = Self::build(buffer, file, capability);
830 this.text.set_line_ending(proto::deserialize_line_ending(
831 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
832 ));
833 this.saved_version = proto::deserialize_version(&message.saved_version);
834 this.saved_mtime = message.saved_mtime.map(|time| time.into());
835 Ok(this)
836 }
837
838 /// Serialize the buffer's state to a protobuf message.
839 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
840 proto::BufferState {
841 id: self.remote_id().into(),
842 file: self.file.as_ref().map(|f| f.to_proto(cx)),
843 base_text: self.base_text().to_string(),
844 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
845 saved_version: proto::serialize_version(&self.saved_version),
846 saved_mtime: self.saved_mtime.map(|time| time.into()),
847 }
848 }
849
850 /// Serialize as protobufs all of the changes to the buffer since the given version.
851 pub fn serialize_ops(
852 &self,
853 since: Option<clock::Global>,
854 cx: &App,
855 ) -> Task<Vec<proto::Operation>> {
856 let mut operations = Vec::new();
857 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
858
859 operations.extend(self.remote_selections.iter().map(|(_, set)| {
860 proto::serialize_operation(&Operation::UpdateSelections {
861 selections: set.selections.clone(),
862 lamport_timestamp: set.lamport_timestamp,
863 line_mode: set.line_mode,
864 cursor_shape: set.cursor_shape,
865 })
866 }));
867
868 for (server_id, diagnostics) in &self.diagnostics {
869 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
870 lamport_timestamp: self.diagnostics_timestamp,
871 server_id: *server_id,
872 diagnostics: diagnostics.iter().cloned().collect(),
873 }));
874 }
875
876 for (server_id, completions) in &self.completion_triggers_per_language_server {
877 operations.push(proto::serialize_operation(
878 &Operation::UpdateCompletionTriggers {
879 triggers: completions.iter().cloned().collect(),
880 lamport_timestamp: self.completion_triggers_timestamp,
881 server_id: *server_id,
882 },
883 ));
884 }
885
886 let text_operations = self.text.operations().clone();
887 cx.background_spawn(async move {
888 let since = since.unwrap_or_default();
889 operations.extend(
890 text_operations
891 .iter()
892 .filter(|(_, op)| !since.observed(op.timestamp()))
893 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
894 );
895 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
896 operations
897 })
898 }
899
900 /// Assign a language to the buffer, returning the buffer.
901 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
902 self.set_language(Some(language), cx);
903 self
904 }
905
906 /// Returns the [`Capability`] of this buffer.
907 pub fn capability(&self) -> Capability {
908 self.capability
909 }
910
911 /// Whether this buffer can only be read.
912 pub fn read_only(&self) -> bool {
913 self.capability == Capability::ReadOnly
914 }
915
916 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
917 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
918 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
919 let snapshot = buffer.snapshot();
920 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
921 Self {
922 saved_mtime,
923 saved_version: buffer.version(),
924 preview_version: buffer.version(),
925 reload_task: None,
926 transaction_depth: 0,
927 was_dirty_before_starting_transaction: None,
928 has_unsaved_edits: Cell::new((buffer.version(), false)),
929 text: buffer,
930 branch_state: None,
931 file,
932 capability,
933 syntax_map,
934 reparse: None,
935 non_text_state_update_count: 0,
936 sync_parse_timeout: Duration::from_millis(1),
937 parse_status: async_watch::channel(ParseStatus::Idle),
938 autoindent_requests: Default::default(),
939 pending_autoindent: Default::default(),
940 language: None,
941 remote_selections: Default::default(),
942 diagnostics: Default::default(),
943 diagnostics_timestamp: Default::default(),
944 completion_triggers: Default::default(),
945 completion_triggers_per_language_server: Default::default(),
946 completion_triggers_timestamp: Default::default(),
947 deferred_ops: OperationQueue::new(),
948 has_conflict: false,
949 change_bits: Default::default(),
950 _subscriptions: Vec::new(),
951 }
952 }
953
954 pub fn build_snapshot(
955 text: Rope,
956 language: Option<Arc<Language>>,
957 language_registry: Option<Arc<LanguageRegistry>>,
958 cx: &mut App,
959 ) -> impl Future<Output = BufferSnapshot> + use<> {
960 let entity_id = cx.reserve_entity::<Self>().entity_id();
961 let buffer_id = entity_id.as_non_zero_u64().into();
962 async move {
963 let text =
964 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
965 let mut syntax = SyntaxMap::new(&text).snapshot();
966 if let Some(language) = language.clone() {
967 let text = text.clone();
968 let language = language.clone();
969 let language_registry = language_registry.clone();
970 syntax.reparse(&text, language_registry, language);
971 }
972 BufferSnapshot {
973 text,
974 syntax,
975 file: None,
976 diagnostics: Default::default(),
977 remote_selections: Default::default(),
978 language,
979 non_text_state_update_count: 0,
980 }
981 }
982 }
983
984 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
985 let entity_id = cx.reserve_entity::<Self>().entity_id();
986 let buffer_id = entity_id.as_non_zero_u64().into();
987 let text =
988 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
989 let syntax = SyntaxMap::new(&text).snapshot();
990 BufferSnapshot {
991 text,
992 syntax,
993 file: None,
994 diagnostics: Default::default(),
995 remote_selections: Default::default(),
996 language: None,
997 non_text_state_update_count: 0,
998 }
999 }
1000
1001 #[cfg(any(test, feature = "test-support"))]
1002 pub fn build_snapshot_sync(
1003 text: Rope,
1004 language: Option<Arc<Language>>,
1005 language_registry: Option<Arc<LanguageRegistry>>,
1006 cx: &mut App,
1007 ) -> BufferSnapshot {
1008 let entity_id = cx.reserve_entity::<Self>().entity_id();
1009 let buffer_id = entity_id.as_non_zero_u64().into();
1010 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1011 let mut syntax = SyntaxMap::new(&text).snapshot();
1012 if let Some(language) = language.clone() {
1013 let text = text.clone();
1014 let language = language.clone();
1015 let language_registry = language_registry.clone();
1016 syntax.reparse(&text, language_registry, language);
1017 }
1018 BufferSnapshot {
1019 text,
1020 syntax,
1021 file: None,
1022 diagnostics: Default::default(),
1023 remote_selections: Default::default(),
1024 language,
1025 non_text_state_update_count: 0,
1026 }
1027 }
1028
1029 /// Retrieve a snapshot of the buffer's current state. This is computationally
1030 /// cheap, and allows reading from the buffer on a background thread.
1031 pub fn snapshot(&self) -> BufferSnapshot {
1032 let text = self.text.snapshot();
1033 let mut syntax_map = self.syntax_map.lock();
1034 syntax_map.interpolate(&text);
1035 let syntax = syntax_map.snapshot();
1036
1037 BufferSnapshot {
1038 text,
1039 syntax,
1040 file: self.file.clone(),
1041 remote_selections: self.remote_selections.clone(),
1042 diagnostics: self.diagnostics.clone(),
1043 language: self.language.clone(),
1044 non_text_state_update_count: self.non_text_state_update_count,
1045 }
1046 }
1047
1048 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1049 let this = cx.entity();
1050 cx.new(|cx| {
1051 let mut branch = Self {
1052 branch_state: Some(BufferBranchState {
1053 base_buffer: this.clone(),
1054 merged_operations: Default::default(),
1055 }),
1056 language: self.language.clone(),
1057 has_conflict: self.has_conflict,
1058 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1059 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1060 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1061 };
1062 if let Some(language_registry) = self.language_registry() {
1063 branch.set_language_registry(language_registry);
1064 }
1065
1066 // Reparse the branch buffer so that we get syntax highlighting immediately.
1067 branch.reparse(cx);
1068
1069 branch
1070 })
1071 }
1072
1073 pub fn preview_edits(
1074 &self,
1075 edits: Arc<[(Range<Anchor>, String)]>,
1076 cx: &App,
1077 ) -> Task<EditPreview> {
1078 let registry = self.language_registry();
1079 let language = self.language().cloned();
1080 let old_snapshot = self.text.snapshot();
1081 let mut branch_buffer = self.text.branch();
1082 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1083 cx.background_spawn(async move {
1084 if !edits.is_empty() {
1085 if let Some(language) = language.clone() {
1086 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1087 }
1088
1089 branch_buffer.edit(edits.iter().cloned());
1090 let snapshot = branch_buffer.snapshot();
1091 syntax_snapshot.interpolate(&snapshot);
1092
1093 if let Some(language) = language {
1094 syntax_snapshot.reparse(&snapshot, registry, language);
1095 }
1096 }
1097 EditPreview {
1098 old_snapshot,
1099 applied_edits_snapshot: branch_buffer.snapshot(),
1100 syntax_snapshot,
1101 }
1102 })
1103 }
1104
1105 /// Applies all of the changes in this buffer that intersect any of the
1106 /// given `ranges` to its base buffer.
1107 ///
1108 /// If `ranges` is empty, then all changes will be applied. This buffer must
1109 /// be a branch buffer to call this method.
1110 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1111 let Some(base_buffer) = self.base_buffer() else {
1112 debug_panic!("not a branch buffer");
1113 return;
1114 };
1115
1116 let mut ranges = if ranges.is_empty() {
1117 &[0..usize::MAX]
1118 } else {
1119 ranges.as_slice()
1120 }
1121 .into_iter()
1122 .peekable();
1123
1124 let mut edits = Vec::new();
1125 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1126 let mut is_included = false;
1127 while let Some(range) = ranges.peek() {
1128 if range.end < edit.new.start {
1129 ranges.next().unwrap();
1130 } else {
1131 if range.start <= edit.new.end {
1132 is_included = true;
1133 }
1134 break;
1135 }
1136 }
1137
1138 if is_included {
1139 edits.push((
1140 edit.old.clone(),
1141 self.text_for_range(edit.new.clone()).collect::<String>(),
1142 ));
1143 }
1144 }
1145
1146 let operation = base_buffer.update(cx, |base_buffer, cx| {
1147 // cx.emit(BufferEvent::DiffBaseChanged);
1148 base_buffer.edit(edits, None, cx)
1149 });
1150
1151 if let Some(operation) = operation {
1152 if let Some(BufferBranchState {
1153 merged_operations, ..
1154 }) = &mut self.branch_state
1155 {
1156 merged_operations.push(operation);
1157 }
1158 }
1159 }
1160
1161 fn on_base_buffer_event(
1162 &mut self,
1163 _: Entity<Buffer>,
1164 event: &BufferEvent,
1165 cx: &mut Context<Self>,
1166 ) {
1167 let BufferEvent::Operation { operation, .. } = event else {
1168 return;
1169 };
1170 let Some(BufferBranchState {
1171 merged_operations, ..
1172 }) = &mut self.branch_state
1173 else {
1174 return;
1175 };
1176
1177 let mut operation_to_undo = None;
1178 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1179 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1180 merged_operations.remove(ix);
1181 operation_to_undo = Some(operation.timestamp);
1182 }
1183 }
1184
1185 self.apply_ops([operation.clone()], cx);
1186
1187 if let Some(timestamp) = operation_to_undo {
1188 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1189 self.undo_operations(counts, cx);
1190 }
1191 }
1192
1193 #[cfg(test)]
1194 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1195 &self.text
1196 }
1197
1198 /// Retrieve a snapshot of the buffer's raw text, without any
1199 /// language-related state like the syntax tree or diagnostics.
1200 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1201 self.text.snapshot()
1202 }
1203
1204 /// The file associated with the buffer, if any.
1205 pub fn file(&self) -> Option<&Arc<dyn File>> {
1206 self.file.as_ref()
1207 }
1208
1209 /// The version of the buffer that was last saved or reloaded from disk.
1210 pub fn saved_version(&self) -> &clock::Global {
1211 &self.saved_version
1212 }
1213
1214 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1215 pub fn saved_mtime(&self) -> Option<MTime> {
1216 self.saved_mtime
1217 }
1218
1219 /// Assign a language to the buffer.
1220 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1221 self.non_text_state_update_count += 1;
1222 self.syntax_map.lock().clear(&self.text);
1223 self.language = language;
1224 self.was_changed();
1225 self.reparse(cx);
1226 cx.emit(BufferEvent::LanguageChanged);
1227 }
1228
1229 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1230 /// other languages if parts of the buffer are written in different languages.
1231 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1232 self.syntax_map
1233 .lock()
1234 .set_language_registry(language_registry);
1235 }
1236
1237 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1238 self.syntax_map.lock().language_registry()
1239 }
1240
1241 /// Assign the buffer a new [`Capability`].
1242 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1243 self.capability = capability;
1244 cx.emit(BufferEvent::CapabilityChanged)
1245 }
1246
1247 /// This method is called to signal that the buffer has been saved.
1248 pub fn did_save(
1249 &mut self,
1250 version: clock::Global,
1251 mtime: Option<MTime>,
1252 cx: &mut Context<Self>,
1253 ) {
1254 self.saved_version = version;
1255 self.has_unsaved_edits
1256 .set((self.saved_version().clone(), false));
1257 self.has_conflict = false;
1258 self.saved_mtime = mtime;
1259 self.was_changed();
1260 cx.emit(BufferEvent::Saved);
1261 cx.notify();
1262 }
1263
1264 /// This method is called to signal that the buffer has been discarded.
1265 pub fn discarded(&self, cx: &mut Context<Self>) {
1266 cx.emit(BufferEvent::Discarded);
1267 cx.notify();
1268 }
1269
1270 /// Reloads the contents of the buffer from disk.
1271 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1272 let (tx, rx) = futures::channel::oneshot::channel();
1273 let prev_version = self.text.version();
1274 self.reload_task = Some(cx.spawn(async move |this, cx| {
1275 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1276 let file = this.file.as_ref()?.as_local()?;
1277
1278 Some((file.disk_state().mtime(), file.load(cx)))
1279 })?
1280 else {
1281 return Ok(());
1282 };
1283
1284 let new_text = new_text.await?;
1285 let diff = this
1286 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1287 .await;
1288 this.update(cx, |this, cx| {
1289 if this.version() == diff.base_version {
1290 this.finalize_last_transaction();
1291 this.apply_diff(diff, cx);
1292 tx.send(this.finalize_last_transaction().cloned()).ok();
1293 this.has_conflict = false;
1294 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1295 } else {
1296 if !diff.edits.is_empty()
1297 || this
1298 .edits_since::<usize>(&diff.base_version)
1299 .next()
1300 .is_some()
1301 {
1302 this.has_conflict = true;
1303 }
1304
1305 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1306 }
1307
1308 this.reload_task.take();
1309 })
1310 }));
1311 rx
1312 }
1313
1314 /// This method is called to signal that the buffer has been reloaded.
1315 pub fn did_reload(
1316 &mut self,
1317 version: clock::Global,
1318 line_ending: LineEnding,
1319 mtime: Option<MTime>,
1320 cx: &mut Context<Self>,
1321 ) {
1322 self.saved_version = version;
1323 self.has_unsaved_edits
1324 .set((self.saved_version.clone(), false));
1325 self.text.set_line_ending(line_ending);
1326 self.saved_mtime = mtime;
1327 cx.emit(BufferEvent::Reloaded);
1328 cx.notify();
1329 }
1330
1331 /// Updates the [`File`] backing this buffer. This should be called when
1332 /// the file has changed or has been deleted.
1333 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1334 let was_dirty = self.is_dirty();
1335 let mut file_changed = false;
1336
1337 if let Some(old_file) = self.file.as_ref() {
1338 if new_file.path() != old_file.path() {
1339 file_changed = true;
1340 }
1341
1342 let old_state = old_file.disk_state();
1343 let new_state = new_file.disk_state();
1344 if old_state != new_state {
1345 file_changed = true;
1346 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1347 cx.emit(BufferEvent::ReloadNeeded)
1348 }
1349 }
1350 } else {
1351 file_changed = true;
1352 };
1353
1354 self.file = Some(new_file);
1355 if file_changed {
1356 self.was_changed();
1357 self.non_text_state_update_count += 1;
1358 if was_dirty != self.is_dirty() {
1359 cx.emit(BufferEvent::DirtyChanged);
1360 }
1361 cx.emit(BufferEvent::FileHandleChanged);
1362 cx.notify();
1363 }
1364 }
1365
1366 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1367 Some(self.branch_state.as_ref()?.base_buffer.clone())
1368 }
1369
1370 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1371 pub fn language(&self) -> Option<&Arc<Language>> {
1372 self.language.as_ref()
1373 }
1374
1375 /// Returns the [`Language`] at the given location.
1376 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1377 let offset = position.to_offset(self);
1378 self.syntax_map
1379 .lock()
1380 .layers_for_range(offset..offset, &self.text, false)
1381 .last()
1382 .map(|info| info.language.clone())
1383 .or_else(|| self.language.clone())
1384 }
1385
1386 /// Returns each [`Language`] for the active syntax layers at the given location.
1387 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1388 let offset = position.to_offset(self);
1389 let mut languages: Vec<Arc<Language>> = self
1390 .syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .map(|info| info.language.clone())
1394 .collect();
1395
1396 if languages.is_empty() {
1397 if let Some(buffer_language) = self.language() {
1398 languages.push(buffer_language.clone());
1399 }
1400 }
1401
1402 languages
1403 }
1404
1405 /// An integer version number that accounts for all updates besides
1406 /// the buffer's text itself (which is versioned via a version vector).
1407 pub fn non_text_state_update_count(&self) -> usize {
1408 self.non_text_state_update_count
1409 }
1410
1411 /// Whether the buffer is being parsed in the background.
1412 #[cfg(any(test, feature = "test-support"))]
1413 pub fn is_parsing(&self) -> bool {
1414 self.reparse.is_some()
1415 }
1416
1417 /// Indicates whether the buffer contains any regions that may be
1418 /// written in a language that hasn't been loaded yet.
1419 pub fn contains_unknown_injections(&self) -> bool {
1420 self.syntax_map.lock().contains_unknown_injections()
1421 }
1422
1423 #[cfg(test)]
1424 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1425 self.sync_parse_timeout = timeout;
1426 }
1427
1428 /// Called after an edit to synchronize the buffer's main parse tree with
1429 /// the buffer's new underlying state.
1430 ///
1431 /// Locks the syntax map and interpolates the edits since the last reparse
1432 /// into the foreground syntax tree.
1433 ///
1434 /// Then takes a stable snapshot of the syntax map before unlocking it.
1435 /// The snapshot with the interpolated edits is sent to a background thread,
1436 /// where we ask Tree-sitter to perform an incremental parse.
1437 ///
1438 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1439 /// waiting on the parse to complete. As soon as it completes, we proceed
1440 /// synchronously, unless a 1ms timeout elapses.
1441 ///
1442 /// If we time out waiting on the parse, we spawn a second task waiting
1443 /// until the parse does complete and return with the interpolated tree still
1444 /// in the foreground. When the background parse completes, call back into
1445 /// the main thread and assign the foreground parse state.
1446 ///
1447 /// If the buffer or grammar changed since the start of the background parse,
1448 /// initiate an additional reparse recursively. To avoid concurrent parses
1449 /// for the same buffer, we only initiate a new parse if we are not already
1450 /// parsing in the background.
1451 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1452 if self.reparse.is_some() {
1453 return;
1454 }
1455 let language = if let Some(language) = self.language.clone() {
1456 language
1457 } else {
1458 return;
1459 };
1460
1461 let text = self.text_snapshot();
1462 let parsed_version = self.version();
1463
1464 let mut syntax_map = self.syntax_map.lock();
1465 syntax_map.interpolate(&text);
1466 let language_registry = syntax_map.language_registry();
1467 let mut syntax_snapshot = syntax_map.snapshot();
1468 drop(syntax_map);
1469
1470 let parse_task = cx.background_spawn({
1471 let language = language.clone();
1472 let language_registry = language_registry.clone();
1473 async move {
1474 syntax_snapshot.reparse(&text, language_registry, language);
1475 syntax_snapshot
1476 }
1477 });
1478
1479 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1480 match cx
1481 .background_executor()
1482 .block_with_timeout(self.sync_parse_timeout, parse_task)
1483 {
1484 Ok(new_syntax_snapshot) => {
1485 self.did_finish_parsing(new_syntax_snapshot, cx);
1486 self.reparse = None;
1487 }
1488 Err(parse_task) => {
1489 self.reparse = Some(cx.spawn(async move |this, cx| {
1490 let new_syntax_map = parse_task.await;
1491 this.update(cx, move |this, cx| {
1492 let grammar_changed =
1493 this.language.as_ref().map_or(true, |current_language| {
1494 !Arc::ptr_eq(&language, current_language)
1495 });
1496 let language_registry_changed = new_syntax_map
1497 .contains_unknown_injections()
1498 && language_registry.map_or(false, |registry| {
1499 registry.version() != new_syntax_map.language_registry_version()
1500 });
1501 let parse_again = language_registry_changed
1502 || grammar_changed
1503 || this.version.changed_since(&parsed_version);
1504 this.did_finish_parsing(new_syntax_map, cx);
1505 this.reparse = None;
1506 if parse_again {
1507 this.reparse(cx);
1508 }
1509 })
1510 .ok();
1511 }));
1512 }
1513 }
1514 }
1515
1516 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1517 self.was_changed();
1518 self.non_text_state_update_count += 1;
1519 self.syntax_map.lock().did_parse(syntax_snapshot);
1520 self.request_autoindent(cx);
1521 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1522 cx.emit(BufferEvent::Reparsed);
1523 cx.notify();
1524 }
1525
1526 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1527 self.parse_status.1.clone()
1528 }
1529
1530 /// Assign to the buffer a set of diagnostics created by a given language server.
1531 pub fn update_diagnostics(
1532 &mut self,
1533 server_id: LanguageServerId,
1534 diagnostics: DiagnosticSet,
1535 cx: &mut Context<Self>,
1536 ) {
1537 let lamport_timestamp = self.text.lamport_clock.tick();
1538 let op = Operation::UpdateDiagnostics {
1539 server_id,
1540 diagnostics: diagnostics.iter().cloned().collect(),
1541 lamport_timestamp,
1542 };
1543 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1544 self.send_operation(op, true, cx);
1545 }
1546
1547 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1548 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1549 return None;
1550 };
1551 Some(&self.diagnostics[idx].1)
1552 }
1553
1554 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1555 if let Some(indent_sizes) = self.compute_autoindents() {
1556 let indent_sizes = cx.background_spawn(indent_sizes);
1557 match cx
1558 .background_executor()
1559 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1560 {
1561 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1562 Err(indent_sizes) => {
1563 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1564 let indent_sizes = indent_sizes.await;
1565 this.update(cx, |this, cx| {
1566 this.apply_autoindents(indent_sizes, cx);
1567 })
1568 .ok();
1569 }));
1570 }
1571 }
1572 } else {
1573 self.autoindent_requests.clear();
1574 }
1575 }
1576
1577 fn compute_autoindents(
1578 &self,
1579 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1580 let max_rows_between_yields = 100;
1581 let snapshot = self.snapshot();
1582 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1583 return None;
1584 }
1585
1586 let autoindent_requests = self.autoindent_requests.clone();
1587 Some(async move {
1588 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1589 for request in autoindent_requests {
1590 // Resolve each edited range to its row in the current buffer and in the
1591 // buffer before this batch of edits.
1592 let mut row_ranges = Vec::new();
1593 let mut old_to_new_rows = BTreeMap::new();
1594 let mut language_indent_sizes_by_new_row = Vec::new();
1595 for entry in &request.entries {
1596 let position = entry.range.start;
1597 let new_row = position.to_point(&snapshot).row;
1598 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1599 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1600
1601 if !entry.first_line_is_new {
1602 let old_row = position.to_point(&request.before_edit).row;
1603 old_to_new_rows.insert(old_row, new_row);
1604 }
1605 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1606 }
1607
1608 // Build a map containing the suggested indentation for each of the edited lines
1609 // with respect to the state of the buffer before these edits. This map is keyed
1610 // by the rows for these lines in the current state of the buffer.
1611 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1612 let old_edited_ranges =
1613 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1614 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1615 let mut language_indent_size = IndentSize::default();
1616 for old_edited_range in old_edited_ranges {
1617 let suggestions = request
1618 .before_edit
1619 .suggest_autoindents(old_edited_range.clone())
1620 .into_iter()
1621 .flatten();
1622 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1623 if let Some(suggestion) = suggestion {
1624 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1625
1626 // Find the indent size based on the language for this row.
1627 while let Some((row, size)) = language_indent_sizes.peek() {
1628 if *row > new_row {
1629 break;
1630 }
1631 language_indent_size = *size;
1632 language_indent_sizes.next();
1633 }
1634
1635 let suggested_indent = old_to_new_rows
1636 .get(&suggestion.basis_row)
1637 .and_then(|from_row| {
1638 Some(old_suggestions.get(from_row).copied()?.0)
1639 })
1640 .unwrap_or_else(|| {
1641 request
1642 .before_edit
1643 .indent_size_for_line(suggestion.basis_row)
1644 })
1645 .with_delta(suggestion.delta, language_indent_size);
1646 old_suggestions
1647 .insert(new_row, (suggested_indent, suggestion.within_error));
1648 }
1649 }
1650 yield_now().await;
1651 }
1652
1653 // Compute new suggestions for each line, but only include them in the result
1654 // if they differ from the old suggestion for that line.
1655 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1656 let mut language_indent_size = IndentSize::default();
1657 for (row_range, original_indent_column) in row_ranges {
1658 let new_edited_row_range = if request.is_block_mode {
1659 row_range.start..row_range.start + 1
1660 } else {
1661 row_range.clone()
1662 };
1663
1664 let suggestions = snapshot
1665 .suggest_autoindents(new_edited_row_range.clone())
1666 .into_iter()
1667 .flatten();
1668 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1669 if let Some(suggestion) = suggestion {
1670 // Find the indent size based on the language for this row.
1671 while let Some((row, size)) = language_indent_sizes.peek() {
1672 if *row > new_row {
1673 break;
1674 }
1675 language_indent_size = *size;
1676 language_indent_sizes.next();
1677 }
1678
1679 let suggested_indent = indent_sizes
1680 .get(&suggestion.basis_row)
1681 .copied()
1682 .map(|e| e.0)
1683 .unwrap_or_else(|| {
1684 snapshot.indent_size_for_line(suggestion.basis_row)
1685 })
1686 .with_delta(suggestion.delta, language_indent_size);
1687
1688 if old_suggestions.get(&new_row).map_or(
1689 true,
1690 |(old_indentation, was_within_error)| {
1691 suggested_indent != *old_indentation
1692 && (!suggestion.within_error || *was_within_error)
1693 },
1694 ) {
1695 indent_sizes.insert(
1696 new_row,
1697 (suggested_indent, request.ignore_empty_lines),
1698 );
1699 }
1700 }
1701 }
1702
1703 if let (true, Some(original_indent_column)) =
1704 (request.is_block_mode, original_indent_column)
1705 {
1706 let new_indent =
1707 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1708 *indent
1709 } else {
1710 snapshot.indent_size_for_line(row_range.start)
1711 };
1712 let delta = new_indent.len as i64 - original_indent_column as i64;
1713 if delta != 0 {
1714 for row in row_range.skip(1) {
1715 indent_sizes.entry(row).or_insert_with(|| {
1716 let mut size = snapshot.indent_size_for_line(row);
1717 if size.kind == new_indent.kind {
1718 match delta.cmp(&0) {
1719 Ordering::Greater => size.len += delta as u32,
1720 Ordering::Less => {
1721 size.len = size.len.saturating_sub(-delta as u32)
1722 }
1723 Ordering::Equal => {}
1724 }
1725 }
1726 (size, request.ignore_empty_lines)
1727 });
1728 }
1729 }
1730 }
1731
1732 yield_now().await;
1733 }
1734 }
1735
1736 indent_sizes
1737 .into_iter()
1738 .filter_map(|(row, (indent, ignore_empty_lines))| {
1739 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1740 None
1741 } else {
1742 Some((row, indent))
1743 }
1744 })
1745 .collect()
1746 })
1747 }
1748
1749 fn apply_autoindents(
1750 &mut self,
1751 indent_sizes: BTreeMap<u32, IndentSize>,
1752 cx: &mut Context<Self>,
1753 ) {
1754 self.autoindent_requests.clear();
1755
1756 let edits: Vec<_> = indent_sizes
1757 .into_iter()
1758 .filter_map(|(row, indent_size)| {
1759 let current_size = indent_size_for_line(self, row);
1760 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1761 })
1762 .collect();
1763
1764 let preserve_preview = self.preserve_preview();
1765 self.edit(edits, None, cx);
1766 if preserve_preview {
1767 self.refresh_preview();
1768 }
1769 }
1770
1771 /// Create a minimal edit that will cause the given row to be indented
1772 /// with the given size. After applying this edit, the length of the line
1773 /// will always be at least `new_size.len`.
1774 pub fn edit_for_indent_size_adjustment(
1775 row: u32,
1776 current_size: IndentSize,
1777 new_size: IndentSize,
1778 ) -> Option<(Range<Point>, String)> {
1779 if new_size.kind == current_size.kind {
1780 match new_size.len.cmp(¤t_size.len) {
1781 Ordering::Greater => {
1782 let point = Point::new(row, 0);
1783 Some((
1784 point..point,
1785 iter::repeat(new_size.char())
1786 .take((new_size.len - current_size.len) as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790
1791 Ordering::Less => Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1793 String::new(),
1794 )),
1795
1796 Ordering::Equal => None,
1797 }
1798 } else {
1799 Some((
1800 Point::new(row, 0)..Point::new(row, current_size.len),
1801 iter::repeat(new_size.char())
1802 .take(new_size.len as usize)
1803 .collect::<String>(),
1804 ))
1805 }
1806 }
1807
1808 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1809 /// and the given new text.
1810 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1811 let old_text = self.as_rope().clone();
1812 let base_version = self.version();
1813 cx.background_executor()
1814 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1815 let old_text = old_text.to_string();
1816 let line_ending = LineEnding::detect(&new_text);
1817 LineEnding::normalize(&mut new_text);
1818 let edits = text_diff(&old_text, &new_text);
1819 Diff {
1820 base_version,
1821 line_ending,
1822 edits,
1823 }
1824 })
1825 }
1826
1827 /// Spawns a background task that searches the buffer for any whitespace
1828 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1829 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1830 let old_text = self.as_rope().clone();
1831 let line_ending = self.line_ending();
1832 let base_version = self.version();
1833 cx.background_spawn(async move {
1834 let ranges = trailing_whitespace_ranges(&old_text);
1835 let empty = Arc::<str>::from("");
1836 Diff {
1837 base_version,
1838 line_ending,
1839 edits: ranges
1840 .into_iter()
1841 .map(|range| (range, empty.clone()))
1842 .collect(),
1843 }
1844 })
1845 }
1846
1847 /// Ensures that the buffer ends with a single newline character, and
1848 /// no other whitespace.
1849 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1850 let len = self.len();
1851 let mut offset = len;
1852 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1853 let non_whitespace_len = chunk
1854 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1855 .len();
1856 offset -= chunk.len();
1857 offset += non_whitespace_len;
1858 if non_whitespace_len != 0 {
1859 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1860 return;
1861 }
1862 break;
1863 }
1864 }
1865 self.edit([(offset..len, "\n")], None, cx);
1866 }
1867
1868 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1869 /// calculated, then adjust the diff to account for those changes, and discard any
1870 /// parts of the diff that conflict with those changes.
1871 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1872 let snapshot = self.snapshot();
1873 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1874 let mut delta = 0;
1875 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1876 while let Some(edit_since) = edits_since.peek() {
1877 // If the edit occurs after a diff hunk, then it does not
1878 // affect that hunk.
1879 if edit_since.old.start > range.end {
1880 break;
1881 }
1882 // If the edit precedes the diff hunk, then adjust the hunk
1883 // to reflect the edit.
1884 else if edit_since.old.end < range.start {
1885 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1886 edits_since.next();
1887 }
1888 // If the edit intersects a diff hunk, then discard that hunk.
1889 else {
1890 return None;
1891 }
1892 }
1893
1894 let start = (range.start as i64 + delta) as usize;
1895 let end = (range.end as i64 + delta) as usize;
1896 Some((start..end, new_text))
1897 });
1898
1899 self.start_transaction();
1900 self.text.set_line_ending(diff.line_ending);
1901 self.edit(adjusted_edits, None, cx);
1902 self.end_transaction(cx)
1903 }
1904
1905 fn has_unsaved_edits(&self) -> bool {
1906 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1907
1908 if last_version == self.version {
1909 self.has_unsaved_edits
1910 .set((last_version, has_unsaved_edits));
1911 return has_unsaved_edits;
1912 }
1913
1914 let has_edits = self.has_edits_since(&self.saved_version);
1915 self.has_unsaved_edits
1916 .set((self.version.clone(), has_edits));
1917 has_edits
1918 }
1919
1920 /// Checks if the buffer has unsaved changes.
1921 pub fn is_dirty(&self) -> bool {
1922 if self.capability == Capability::ReadOnly {
1923 return false;
1924 }
1925 if self.has_conflict {
1926 return true;
1927 }
1928 match self.file.as_ref().map(|f| f.disk_state()) {
1929 Some(DiskState::New) | Some(DiskState::Deleted) => {
1930 !self.is_empty() && self.has_unsaved_edits()
1931 }
1932 _ => self.has_unsaved_edits(),
1933 }
1934 }
1935
1936 /// Checks if the buffer and its file have both changed since the buffer
1937 /// was last saved or reloaded.
1938 pub fn has_conflict(&self) -> bool {
1939 if self.has_conflict {
1940 return true;
1941 }
1942 let Some(file) = self.file.as_ref() else {
1943 return false;
1944 };
1945 match file.disk_state() {
1946 DiskState::New => false,
1947 DiskState::Present { mtime } => match self.saved_mtime {
1948 Some(saved_mtime) => {
1949 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1950 }
1951 None => true,
1952 },
1953 DiskState::Deleted => false,
1954 }
1955 }
1956
1957 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1958 pub fn subscribe(&mut self) -> Subscription {
1959 self.text.subscribe()
1960 }
1961
1962 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1963 ///
1964 /// This allows downstream code to check if the buffer's text has changed without
1965 /// waiting for an effect cycle, which would be required if using eents.
1966 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1967 if let Err(ix) = self
1968 .change_bits
1969 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1970 {
1971 self.change_bits.insert(ix, bit);
1972 }
1973 }
1974
1975 fn was_changed(&mut self) {
1976 self.change_bits.retain(|change_bit| {
1977 change_bit.upgrade().map_or(false, |bit| {
1978 bit.replace(true);
1979 true
1980 })
1981 });
1982 }
1983
1984 /// Starts a transaction, if one is not already in-progress. When undoing or
1985 /// redoing edits, all of the edits performed within a transaction are undone
1986 /// or redone together.
1987 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1988 self.start_transaction_at(Instant::now())
1989 }
1990
1991 /// Starts a transaction, providing the current time. Subsequent transactions
1992 /// that occur within a short period of time will be grouped together. This
1993 /// is controlled by the buffer's undo grouping duration.
1994 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1995 self.transaction_depth += 1;
1996 if self.was_dirty_before_starting_transaction.is_none() {
1997 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1998 }
1999 self.text.start_transaction_at(now)
2000 }
2001
2002 /// Terminates the current transaction, if this is the outermost transaction.
2003 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2004 self.end_transaction_at(Instant::now(), cx)
2005 }
2006
2007 /// Terminates the current transaction, providing the current time. Subsequent transactions
2008 /// that occur within a short period of time will be grouped together. This
2009 /// is controlled by the buffer's undo grouping duration.
2010 pub fn end_transaction_at(
2011 &mut self,
2012 now: Instant,
2013 cx: &mut Context<Self>,
2014 ) -> Option<TransactionId> {
2015 assert!(self.transaction_depth > 0);
2016 self.transaction_depth -= 1;
2017 let was_dirty = if self.transaction_depth == 0 {
2018 self.was_dirty_before_starting_transaction.take().unwrap()
2019 } else {
2020 false
2021 };
2022 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2023 self.did_edit(&start_version, was_dirty, cx);
2024 Some(transaction_id)
2025 } else {
2026 None
2027 }
2028 }
2029
2030 /// Manually add a transaction to the buffer's undo history.
2031 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2032 self.text.push_transaction(transaction, now);
2033 }
2034
2035 /// Prevent the last transaction from being grouped with any subsequent transactions,
2036 /// even if they occur with the buffer's undo grouping duration.
2037 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2038 self.text.finalize_last_transaction()
2039 }
2040
2041 /// Manually group all changes since a given transaction.
2042 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2043 self.text.group_until_transaction(transaction_id);
2044 }
2045
2046 /// Manually remove a transaction from the buffer's undo history
2047 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2048 self.text.forget_transaction(transaction_id)
2049 }
2050
2051 /// Retrieve a transaction from the buffer's undo history
2052 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2053 self.text.get_transaction(transaction_id)
2054 }
2055
2056 /// Manually merge two transactions in the buffer's undo history.
2057 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2058 self.text.merge_transactions(transaction, destination);
2059 }
2060
2061 /// Waits for the buffer to receive operations with the given timestamps.
2062 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2063 &mut self,
2064 edit_ids: It,
2065 ) -> impl Future<Output = Result<()>> + use<It> {
2066 self.text.wait_for_edits(edit_ids)
2067 }
2068
2069 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2070 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2071 &mut self,
2072 anchors: It,
2073 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2074 self.text.wait_for_anchors(anchors)
2075 }
2076
2077 /// Waits for the buffer to receive operations up to the given version.
2078 pub fn wait_for_version(
2079 &mut self,
2080 version: clock::Global,
2081 ) -> impl Future<Output = Result<()>> + use<> {
2082 self.text.wait_for_version(version)
2083 }
2084
2085 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2086 /// [`Buffer::wait_for_version`] to resolve with an error.
2087 pub fn give_up_waiting(&mut self) {
2088 self.text.give_up_waiting();
2089 }
2090
2091 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2092 pub fn set_active_selections(
2093 &mut self,
2094 selections: Arc<[Selection<Anchor>]>,
2095 line_mode: bool,
2096 cursor_shape: CursorShape,
2097 cx: &mut Context<Self>,
2098 ) {
2099 let lamport_timestamp = self.text.lamport_clock.tick();
2100 self.remote_selections.insert(
2101 self.text.replica_id(),
2102 SelectionSet {
2103 selections: selections.clone(),
2104 lamport_timestamp,
2105 line_mode,
2106 cursor_shape,
2107 },
2108 );
2109 self.send_operation(
2110 Operation::UpdateSelections {
2111 selections,
2112 line_mode,
2113 lamport_timestamp,
2114 cursor_shape,
2115 },
2116 true,
2117 cx,
2118 );
2119 self.non_text_state_update_count += 1;
2120 cx.notify();
2121 }
2122
2123 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2124 /// this replica.
2125 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2126 if self
2127 .remote_selections
2128 .get(&self.text.replica_id())
2129 .map_or(true, |set| !set.selections.is_empty())
2130 {
2131 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2132 }
2133 }
2134
2135 pub fn set_agent_selections(
2136 &mut self,
2137 selections: Arc<[Selection<Anchor>]>,
2138 line_mode: bool,
2139 cursor_shape: CursorShape,
2140 cx: &mut Context<Self>,
2141 ) {
2142 let lamport_timestamp = self.text.lamport_clock.tick();
2143 self.remote_selections.insert(
2144 AGENT_REPLICA_ID,
2145 SelectionSet {
2146 selections: selections.clone(),
2147 lamport_timestamp,
2148 line_mode,
2149 cursor_shape,
2150 },
2151 );
2152 self.non_text_state_update_count += 1;
2153 cx.notify();
2154 }
2155
2156 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2157 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2158 }
2159
2160 /// Replaces the buffer's entire text.
2161 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2162 where
2163 T: Into<Arc<str>>,
2164 {
2165 self.autoindent_requests.clear();
2166 self.edit([(0..self.len(), text)], None, cx)
2167 }
2168
2169 /// Appends the given text to the end of the buffer.
2170 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2171 where
2172 T: Into<Arc<str>>,
2173 {
2174 self.edit([(self.len()..self.len(), text)], None, cx)
2175 }
2176
2177 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2178 /// delete, and a string of text to insert at that location.
2179 ///
2180 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2181 /// request for the edited ranges, which will be processed when the buffer finishes
2182 /// parsing.
2183 ///
2184 /// Parsing takes place at the end of a transaction, and may compute synchronously
2185 /// or asynchronously, depending on the changes.
2186 pub fn edit<I, S, T>(
2187 &mut self,
2188 edits_iter: I,
2189 autoindent_mode: Option<AutoindentMode>,
2190 cx: &mut Context<Self>,
2191 ) -> Option<clock::Lamport>
2192 where
2193 I: IntoIterator<Item = (Range<S>, T)>,
2194 S: ToOffset,
2195 T: Into<Arc<str>>,
2196 {
2197 // Skip invalid edits and coalesce contiguous ones.
2198 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2199
2200 for (range, new_text) in edits_iter {
2201 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2202
2203 if range.start > range.end {
2204 mem::swap(&mut range.start, &mut range.end);
2205 }
2206 let new_text = new_text.into();
2207 if !new_text.is_empty() || !range.is_empty() {
2208 if let Some((prev_range, prev_text)) = edits.last_mut() {
2209 if prev_range.end >= range.start {
2210 prev_range.end = cmp::max(prev_range.end, range.end);
2211 *prev_text = format!("{prev_text}{new_text}").into();
2212 } else {
2213 edits.push((range, new_text));
2214 }
2215 } else {
2216 edits.push((range, new_text));
2217 }
2218 }
2219 }
2220 if edits.is_empty() {
2221 return None;
2222 }
2223
2224 self.start_transaction();
2225 self.pending_autoindent.take();
2226 let autoindent_request = autoindent_mode
2227 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2228
2229 let edit_operation = self.text.edit(edits.iter().cloned());
2230 let edit_id = edit_operation.timestamp();
2231
2232 if let Some((before_edit, mode)) = autoindent_request {
2233 let mut delta = 0isize;
2234 let entries = edits
2235 .into_iter()
2236 .enumerate()
2237 .zip(&edit_operation.as_edit().unwrap().new_text)
2238 .map(|((ix, (range, _)), new_text)| {
2239 let new_text_length = new_text.len();
2240 let old_start = range.start.to_point(&before_edit);
2241 let new_start = (delta + range.start as isize) as usize;
2242 let range_len = range.end - range.start;
2243 delta += new_text_length as isize - range_len as isize;
2244
2245 // Decide what range of the insertion to auto-indent, and whether
2246 // the first line of the insertion should be considered a newly-inserted line
2247 // or an edit to an existing line.
2248 let mut range_of_insertion_to_indent = 0..new_text_length;
2249 let mut first_line_is_new = true;
2250
2251 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2252 let old_line_end = before_edit.line_len(old_start.row);
2253
2254 if old_start.column > old_line_start {
2255 first_line_is_new = false;
2256 }
2257
2258 if !new_text.contains('\n')
2259 && (old_start.column + (range_len as u32) < old_line_end
2260 || old_line_end == old_line_start)
2261 {
2262 first_line_is_new = false;
2263 }
2264
2265 // When inserting text starting with a newline, avoid auto-indenting the
2266 // previous line.
2267 if new_text.starts_with('\n') {
2268 range_of_insertion_to_indent.start += 1;
2269 first_line_is_new = true;
2270 }
2271
2272 let mut original_indent_column = None;
2273 if let AutoindentMode::Block {
2274 original_indent_columns,
2275 } = &mode
2276 {
2277 original_indent_column = Some(if new_text.starts_with('\n') {
2278 indent_size_for_text(
2279 new_text[range_of_insertion_to_indent.clone()].chars(),
2280 )
2281 .len
2282 } else {
2283 original_indent_columns
2284 .get(ix)
2285 .copied()
2286 .flatten()
2287 .unwrap_or_else(|| {
2288 indent_size_for_text(
2289 new_text[range_of_insertion_to_indent.clone()].chars(),
2290 )
2291 .len
2292 })
2293 });
2294
2295 // Avoid auto-indenting the line after the edit.
2296 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2297 range_of_insertion_to_indent.end -= 1;
2298 }
2299 }
2300
2301 AutoindentRequestEntry {
2302 first_line_is_new,
2303 original_indent_column,
2304 indent_size: before_edit.language_indent_size_at(range.start, cx),
2305 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2306 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2307 }
2308 })
2309 .collect();
2310
2311 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2312 before_edit,
2313 entries,
2314 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2315 ignore_empty_lines: false,
2316 }));
2317 }
2318
2319 self.end_transaction(cx);
2320 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2321 Some(edit_id)
2322 }
2323
2324 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2325 self.was_changed();
2326
2327 if self.edits_since::<usize>(old_version).next().is_none() {
2328 return;
2329 }
2330
2331 self.reparse(cx);
2332 cx.emit(BufferEvent::Edited);
2333 if was_dirty != self.is_dirty() {
2334 cx.emit(BufferEvent::DirtyChanged);
2335 }
2336 cx.notify();
2337 }
2338
2339 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2340 where
2341 I: IntoIterator<Item = Range<T>>,
2342 T: ToOffset + Copy,
2343 {
2344 let before_edit = self.snapshot();
2345 let entries = ranges
2346 .into_iter()
2347 .map(|range| AutoindentRequestEntry {
2348 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2349 first_line_is_new: true,
2350 indent_size: before_edit.language_indent_size_at(range.start, cx),
2351 original_indent_column: None,
2352 })
2353 .collect();
2354 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2355 before_edit,
2356 entries,
2357 is_block_mode: false,
2358 ignore_empty_lines: true,
2359 }));
2360 self.request_autoindent(cx);
2361 }
2362
2363 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2364 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2365 pub fn insert_empty_line(
2366 &mut self,
2367 position: impl ToPoint,
2368 space_above: bool,
2369 space_below: bool,
2370 cx: &mut Context<Self>,
2371 ) -> Point {
2372 let mut position = position.to_point(self);
2373
2374 self.start_transaction();
2375
2376 self.edit(
2377 [(position..position, "\n")],
2378 Some(AutoindentMode::EachLine),
2379 cx,
2380 );
2381
2382 if position.column > 0 {
2383 position += Point::new(1, 0);
2384 }
2385
2386 if !self.is_line_blank(position.row) {
2387 self.edit(
2388 [(position..position, "\n")],
2389 Some(AutoindentMode::EachLine),
2390 cx,
2391 );
2392 }
2393
2394 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2395 self.edit(
2396 [(position..position, "\n")],
2397 Some(AutoindentMode::EachLine),
2398 cx,
2399 );
2400 position.row += 1;
2401 }
2402
2403 if space_below
2404 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2405 {
2406 self.edit(
2407 [(position..position, "\n")],
2408 Some(AutoindentMode::EachLine),
2409 cx,
2410 );
2411 }
2412
2413 self.end_transaction(cx);
2414
2415 position
2416 }
2417
2418 /// Applies the given remote operations to the buffer.
2419 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2420 self.pending_autoindent.take();
2421 let was_dirty = self.is_dirty();
2422 let old_version = self.version.clone();
2423 let mut deferred_ops = Vec::new();
2424 let buffer_ops = ops
2425 .into_iter()
2426 .filter_map(|op| match op {
2427 Operation::Buffer(op) => Some(op),
2428 _ => {
2429 if self.can_apply_op(&op) {
2430 self.apply_op(op, cx);
2431 } else {
2432 deferred_ops.push(op);
2433 }
2434 None
2435 }
2436 })
2437 .collect::<Vec<_>>();
2438 for operation in buffer_ops.iter() {
2439 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2440 }
2441 self.text.apply_ops(buffer_ops);
2442 self.deferred_ops.insert(deferred_ops);
2443 self.flush_deferred_ops(cx);
2444 self.did_edit(&old_version, was_dirty, cx);
2445 // Notify independently of whether the buffer was edited as the operations could include a
2446 // selection update.
2447 cx.notify();
2448 }
2449
2450 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2451 let mut deferred_ops = Vec::new();
2452 for op in self.deferred_ops.drain().iter().cloned() {
2453 if self.can_apply_op(&op) {
2454 self.apply_op(op, cx);
2455 } else {
2456 deferred_ops.push(op);
2457 }
2458 }
2459 self.deferred_ops.insert(deferred_ops);
2460 }
2461
2462 pub fn has_deferred_ops(&self) -> bool {
2463 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2464 }
2465
2466 fn can_apply_op(&self, operation: &Operation) -> bool {
2467 match operation {
2468 Operation::Buffer(_) => {
2469 unreachable!("buffer operations should never be applied at this layer")
2470 }
2471 Operation::UpdateDiagnostics {
2472 diagnostics: diagnostic_set,
2473 ..
2474 } => diagnostic_set.iter().all(|diagnostic| {
2475 self.text.can_resolve(&diagnostic.range.start)
2476 && self.text.can_resolve(&diagnostic.range.end)
2477 }),
2478 Operation::UpdateSelections { selections, .. } => selections
2479 .iter()
2480 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2481 Operation::UpdateCompletionTriggers { .. } => true,
2482 }
2483 }
2484
2485 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2486 match operation {
2487 Operation::Buffer(_) => {
2488 unreachable!("buffer operations should never be applied at this layer")
2489 }
2490 Operation::UpdateDiagnostics {
2491 server_id,
2492 diagnostics: diagnostic_set,
2493 lamport_timestamp,
2494 } => {
2495 let snapshot = self.snapshot();
2496 self.apply_diagnostic_update(
2497 server_id,
2498 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2499 lamport_timestamp,
2500 cx,
2501 );
2502 }
2503 Operation::UpdateSelections {
2504 selections,
2505 lamport_timestamp,
2506 line_mode,
2507 cursor_shape,
2508 } => {
2509 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2510 if set.lamport_timestamp > lamport_timestamp {
2511 return;
2512 }
2513 }
2514
2515 self.remote_selections.insert(
2516 lamport_timestamp.replica_id,
2517 SelectionSet {
2518 selections,
2519 lamport_timestamp,
2520 line_mode,
2521 cursor_shape,
2522 },
2523 );
2524 self.text.lamport_clock.observe(lamport_timestamp);
2525 self.non_text_state_update_count += 1;
2526 }
2527 Operation::UpdateCompletionTriggers {
2528 triggers,
2529 lamport_timestamp,
2530 server_id,
2531 } => {
2532 if triggers.is_empty() {
2533 self.completion_triggers_per_language_server
2534 .remove(&server_id);
2535 self.completion_triggers = self
2536 .completion_triggers_per_language_server
2537 .values()
2538 .flat_map(|triggers| triggers.into_iter().cloned())
2539 .collect();
2540 } else {
2541 self.completion_triggers_per_language_server
2542 .insert(server_id, triggers.iter().cloned().collect());
2543 self.completion_triggers.extend(triggers);
2544 }
2545 self.text.lamport_clock.observe(lamport_timestamp);
2546 }
2547 }
2548 }
2549
2550 fn apply_diagnostic_update(
2551 &mut self,
2552 server_id: LanguageServerId,
2553 diagnostics: DiagnosticSet,
2554 lamport_timestamp: clock::Lamport,
2555 cx: &mut Context<Self>,
2556 ) {
2557 if lamport_timestamp > self.diagnostics_timestamp {
2558 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2559 if diagnostics.is_empty() {
2560 if let Ok(ix) = ix {
2561 self.diagnostics.remove(ix);
2562 }
2563 } else {
2564 match ix {
2565 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2566 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2567 };
2568 }
2569 self.diagnostics_timestamp = lamport_timestamp;
2570 self.non_text_state_update_count += 1;
2571 self.text.lamport_clock.observe(lamport_timestamp);
2572 cx.notify();
2573 cx.emit(BufferEvent::DiagnosticsUpdated);
2574 }
2575 }
2576
2577 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2578 self.was_changed();
2579 cx.emit(BufferEvent::Operation {
2580 operation,
2581 is_local,
2582 });
2583 }
2584
2585 /// Removes the selections for a given peer.
2586 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2587 self.remote_selections.remove(&replica_id);
2588 cx.notify();
2589 }
2590
2591 /// Undoes the most recent transaction.
2592 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2593 let was_dirty = self.is_dirty();
2594 let old_version = self.version.clone();
2595
2596 if let Some((transaction_id, operation)) = self.text.undo() {
2597 self.send_operation(Operation::Buffer(operation), true, cx);
2598 self.did_edit(&old_version, was_dirty, cx);
2599 Some(transaction_id)
2600 } else {
2601 None
2602 }
2603 }
2604
2605 /// Manually undoes a specific transaction in the buffer's undo history.
2606 pub fn undo_transaction(
2607 &mut self,
2608 transaction_id: TransactionId,
2609 cx: &mut Context<Self>,
2610 ) -> bool {
2611 let was_dirty = self.is_dirty();
2612 let old_version = self.version.clone();
2613 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2614 self.send_operation(Operation::Buffer(operation), true, cx);
2615 self.did_edit(&old_version, was_dirty, cx);
2616 true
2617 } else {
2618 false
2619 }
2620 }
2621
2622 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2623 pub fn undo_to_transaction(
2624 &mut self,
2625 transaction_id: TransactionId,
2626 cx: &mut Context<Self>,
2627 ) -> bool {
2628 let was_dirty = self.is_dirty();
2629 let old_version = self.version.clone();
2630
2631 let operations = self.text.undo_to_transaction(transaction_id);
2632 let undone = !operations.is_empty();
2633 for operation in operations {
2634 self.send_operation(Operation::Buffer(operation), true, cx);
2635 }
2636 if undone {
2637 self.did_edit(&old_version, was_dirty, cx)
2638 }
2639 undone
2640 }
2641
2642 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2643 let was_dirty = self.is_dirty();
2644 let operation = self.text.undo_operations(counts);
2645 let old_version = self.version.clone();
2646 self.send_operation(Operation::Buffer(operation), true, cx);
2647 self.did_edit(&old_version, was_dirty, cx);
2648 }
2649
2650 /// Manually redoes a specific transaction in the buffer's redo history.
2651 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2652 let was_dirty = self.is_dirty();
2653 let old_version = self.version.clone();
2654
2655 if let Some((transaction_id, operation)) = self.text.redo() {
2656 self.send_operation(Operation::Buffer(operation), true, cx);
2657 self.did_edit(&old_version, was_dirty, cx);
2658 Some(transaction_id)
2659 } else {
2660 None
2661 }
2662 }
2663
2664 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2665 pub fn redo_to_transaction(
2666 &mut self,
2667 transaction_id: TransactionId,
2668 cx: &mut Context<Self>,
2669 ) -> bool {
2670 let was_dirty = self.is_dirty();
2671 let old_version = self.version.clone();
2672
2673 let operations = self.text.redo_to_transaction(transaction_id);
2674 let redone = !operations.is_empty();
2675 for operation in operations {
2676 self.send_operation(Operation::Buffer(operation), true, cx);
2677 }
2678 if redone {
2679 self.did_edit(&old_version, was_dirty, cx)
2680 }
2681 redone
2682 }
2683
2684 /// Override current completion triggers with the user-provided completion triggers.
2685 pub fn set_completion_triggers(
2686 &mut self,
2687 server_id: LanguageServerId,
2688 triggers: BTreeSet<String>,
2689 cx: &mut Context<Self>,
2690 ) {
2691 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2692 if triggers.is_empty() {
2693 self.completion_triggers_per_language_server
2694 .remove(&server_id);
2695 self.completion_triggers = self
2696 .completion_triggers_per_language_server
2697 .values()
2698 .flat_map(|triggers| triggers.into_iter().cloned())
2699 .collect();
2700 } else {
2701 self.completion_triggers_per_language_server
2702 .insert(server_id, triggers.clone());
2703 self.completion_triggers.extend(triggers.iter().cloned());
2704 }
2705 self.send_operation(
2706 Operation::UpdateCompletionTriggers {
2707 triggers: triggers.into_iter().collect(),
2708 lamport_timestamp: self.completion_triggers_timestamp,
2709 server_id,
2710 },
2711 true,
2712 cx,
2713 );
2714 cx.notify();
2715 }
2716
2717 /// Returns a list of strings which trigger a completion menu for this language.
2718 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2719 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2720 &self.completion_triggers
2721 }
2722
2723 /// Call this directly after performing edits to prevent the preview tab
2724 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2725 /// to return false until there are additional edits.
2726 pub fn refresh_preview(&mut self) {
2727 self.preview_version = self.version.clone();
2728 }
2729
2730 /// Whether we should preserve the preview status of a tab containing this buffer.
2731 pub fn preserve_preview(&self) -> bool {
2732 !self.has_edits_since(&self.preview_version)
2733 }
2734}
2735
2736#[doc(hidden)]
2737#[cfg(any(test, feature = "test-support"))]
2738impl Buffer {
2739 pub fn edit_via_marked_text(
2740 &mut self,
2741 marked_string: &str,
2742 autoindent_mode: Option<AutoindentMode>,
2743 cx: &mut Context<Self>,
2744 ) {
2745 let edits = self.edits_for_marked_text(marked_string);
2746 self.edit(edits, autoindent_mode, cx);
2747 }
2748
2749 pub fn set_group_interval(&mut self, group_interval: Duration) {
2750 self.text.set_group_interval(group_interval);
2751 }
2752
2753 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2754 where
2755 T: rand::Rng,
2756 {
2757 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2758 let mut last_end = None;
2759 for _ in 0..old_range_count {
2760 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2761 break;
2762 }
2763
2764 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2765 let mut range = self.random_byte_range(new_start, rng);
2766 if rng.gen_bool(0.2) {
2767 mem::swap(&mut range.start, &mut range.end);
2768 }
2769 last_end = Some(range.end);
2770
2771 let new_text_len = rng.gen_range(0..10);
2772 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2773 new_text = new_text.to_uppercase();
2774
2775 edits.push((range, new_text));
2776 }
2777 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2778 self.edit(edits, None, cx);
2779 }
2780
2781 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2782 let was_dirty = self.is_dirty();
2783 let old_version = self.version.clone();
2784
2785 let ops = self.text.randomly_undo_redo(rng);
2786 if !ops.is_empty() {
2787 for op in ops {
2788 self.send_operation(Operation::Buffer(op), true, cx);
2789 self.did_edit(&old_version, was_dirty, cx);
2790 }
2791 }
2792 }
2793}
2794
2795impl EventEmitter<BufferEvent> for Buffer {}
2796
2797impl Deref for Buffer {
2798 type Target = TextBuffer;
2799
2800 fn deref(&self) -> &Self::Target {
2801 &self.text
2802 }
2803}
2804
2805impl BufferSnapshot {
2806 /// Returns [`IndentSize`] for a given line that respects user settings and
2807 /// language preferences.
2808 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2809 indent_size_for_line(self, row)
2810 }
2811
2812 /// Returns [`IndentSize`] for a given position that respects user settings
2813 /// and language preferences.
2814 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2815 let settings = language_settings(
2816 self.language_at(position).map(|l| l.name()),
2817 self.file(),
2818 cx,
2819 );
2820 if settings.hard_tabs {
2821 IndentSize::tab()
2822 } else {
2823 IndentSize::spaces(settings.tab_size.get())
2824 }
2825 }
2826
2827 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2828 /// is passed in as `single_indent_size`.
2829 pub fn suggested_indents(
2830 &self,
2831 rows: impl Iterator<Item = u32>,
2832 single_indent_size: IndentSize,
2833 ) -> BTreeMap<u32, IndentSize> {
2834 let mut result = BTreeMap::new();
2835
2836 for row_range in contiguous_ranges(rows, 10) {
2837 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2838 Some(suggestions) => suggestions,
2839 _ => break,
2840 };
2841
2842 for (row, suggestion) in row_range.zip(suggestions) {
2843 let indent_size = if let Some(suggestion) = suggestion {
2844 result
2845 .get(&suggestion.basis_row)
2846 .copied()
2847 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2848 .with_delta(suggestion.delta, single_indent_size)
2849 } else {
2850 self.indent_size_for_line(row)
2851 };
2852
2853 result.insert(row, indent_size);
2854 }
2855 }
2856
2857 result
2858 }
2859
2860 fn suggest_autoindents(
2861 &self,
2862 row_range: Range<u32>,
2863 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2864 let config = &self.language.as_ref()?.config;
2865 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2866 let significant_indentation = config.significant_indentation;
2867
2868 // Find the suggested indentation ranges based on the syntax tree.
2869 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2870 let end = Point::new(row_range.end, 0);
2871 let range = (start..end).to_offset(&self.text);
2872 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2873 Some(&grammar.indents_config.as_ref()?.query)
2874 });
2875 let indent_configs = matches
2876 .grammars()
2877 .iter()
2878 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2879 .collect::<Vec<_>>();
2880
2881 let mut indent_ranges = Vec::<Range<Point>>::new();
2882 let mut outdent_positions = Vec::<Point>::new();
2883 while let Some(mat) = matches.peek() {
2884 let mut start: Option<Point> = None;
2885 let mut end: Option<Point> = None;
2886 let mut outdent: Option<Point> = None;
2887
2888 let config = &indent_configs[mat.grammar_index];
2889 for capture in mat.captures {
2890 if capture.index == config.indent_capture_ix {
2891 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2892 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2893 } else if Some(capture.index) == config.start_capture_ix {
2894 start = Some(Point::from_ts_point(capture.node.end_position()));
2895 } else if Some(capture.index) == config.end_capture_ix {
2896 end = Some(Point::from_ts_point(capture.node.start_position()));
2897 } else if Some(capture.index) == config.outdent_capture_ix {
2898 let point = Point::from_ts_point(capture.node.start_position());
2899 outdent.get_or_insert(point);
2900 outdent_positions.push(point);
2901 }
2902 }
2903
2904 matches.advance();
2905 // in case of significant indentation expand end to outdent position
2906 let end = if significant_indentation {
2907 outdent.or(end)
2908 } else {
2909 end
2910 };
2911 if let Some((start, end)) = start.zip(end) {
2912 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2913 continue;
2914 }
2915 let range = start..end;
2916 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2917 Err(ix) => indent_ranges.insert(ix, range),
2918 Ok(ix) => {
2919 let prev_range = &mut indent_ranges[ix];
2920 prev_range.end = prev_range.end.max(range.end);
2921 }
2922 }
2923 }
2924 }
2925
2926 let mut error_ranges = Vec::<Range<Point>>::new();
2927 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2928 grammar.error_query.as_ref()
2929 });
2930 while let Some(mat) = matches.peek() {
2931 let node = mat.captures[0].node;
2932 let start = Point::from_ts_point(node.start_position());
2933 let end = Point::from_ts_point(node.end_position());
2934 let range = start..end;
2935 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2936 Ok(ix) | Err(ix) => ix,
2937 };
2938 let mut end_ix = ix;
2939 while let Some(existing_range) = error_ranges.get(end_ix) {
2940 if existing_range.end < end {
2941 end_ix += 1;
2942 } else {
2943 break;
2944 }
2945 }
2946 error_ranges.splice(ix..end_ix, [range]);
2947 matches.advance();
2948 }
2949
2950 // we don't use outdent positions to truncate in case of significant indentation
2951 // rather we use them to expand (handled above)
2952 if !significant_indentation {
2953 outdent_positions.sort();
2954 for outdent_position in outdent_positions {
2955 // find the innermost indent range containing this outdent_position
2956 // set its end to the outdent position
2957 if let Some(range_to_truncate) = indent_ranges
2958 .iter_mut()
2959 .filter(|indent_range| indent_range.contains(&outdent_position))
2960 .next_back()
2961 {
2962 range_to_truncate.end = outdent_position;
2963 }
2964 }
2965 }
2966
2967 // Find the suggested indentation increases and decreased based on regexes.
2968 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2969 self.for_each_line(
2970 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2971 ..Point::new(row_range.end, 0),
2972 |row, line| {
2973 if config
2974 .decrease_indent_pattern
2975 .as_ref()
2976 .map_or(false, |regex| regex.is_match(line))
2977 {
2978 indent_change_rows.push((row, Ordering::Less));
2979 }
2980 if config
2981 .increase_indent_pattern
2982 .as_ref()
2983 .map_or(false, |regex| regex.is_match(line))
2984 {
2985 indent_change_rows.push((row + 1, Ordering::Greater));
2986 }
2987 },
2988 );
2989
2990 let mut indent_changes = indent_change_rows.into_iter().peekable();
2991 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2992 prev_non_blank_row.unwrap_or(0)
2993 } else {
2994 row_range.start.saturating_sub(1)
2995 };
2996 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2997 Some(row_range.map(move |row| {
2998 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2999
3000 let mut indent_from_prev_row = false;
3001 let mut outdent_from_prev_row = false;
3002 let mut outdent_to_row = u32::MAX;
3003 let mut from_regex = false;
3004
3005 while let Some((indent_row, delta)) = indent_changes.peek() {
3006 match indent_row.cmp(&row) {
3007 Ordering::Equal => match delta {
3008 Ordering::Less => {
3009 from_regex = true;
3010 outdent_from_prev_row = true
3011 }
3012 Ordering::Greater => {
3013 indent_from_prev_row = true;
3014 from_regex = true
3015 }
3016 _ => {}
3017 },
3018
3019 Ordering::Greater => break,
3020 Ordering::Less => {}
3021 }
3022
3023 indent_changes.next();
3024 }
3025
3026 for range in &indent_ranges {
3027 if range.start.row >= row {
3028 break;
3029 }
3030 if range.start.row == prev_row && range.end > row_start {
3031 indent_from_prev_row = true;
3032 }
3033 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3034 {
3035 indent_from_prev_row = true;
3036 }
3037 if !significant_indentation || !self.is_line_blank(row) {
3038 if range.end > prev_row_start && range.end <= row_start {
3039 outdent_to_row = outdent_to_row.min(range.start.row);
3040 }
3041 }
3042 }
3043
3044 let within_error = error_ranges
3045 .iter()
3046 .any(|e| e.start.row < row && e.end > row_start);
3047
3048 let suggestion = if outdent_to_row == prev_row
3049 || (outdent_from_prev_row && indent_from_prev_row)
3050 {
3051 Some(IndentSuggestion {
3052 basis_row: prev_row,
3053 delta: Ordering::Equal,
3054 within_error: within_error && !from_regex,
3055 })
3056 } else if indent_from_prev_row {
3057 Some(IndentSuggestion {
3058 basis_row: prev_row,
3059 delta: Ordering::Greater,
3060 within_error: within_error && !from_regex,
3061 })
3062 } else if outdent_to_row < prev_row {
3063 Some(IndentSuggestion {
3064 basis_row: outdent_to_row,
3065 delta: Ordering::Equal,
3066 within_error: within_error && !from_regex,
3067 })
3068 } else if outdent_from_prev_row {
3069 Some(IndentSuggestion {
3070 basis_row: prev_row,
3071 delta: Ordering::Less,
3072 within_error: within_error && !from_regex,
3073 })
3074 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3075 {
3076 Some(IndentSuggestion {
3077 basis_row: prev_row,
3078 delta: Ordering::Equal,
3079 within_error: within_error && !from_regex,
3080 })
3081 } else {
3082 None
3083 };
3084
3085 prev_row = row;
3086 prev_row_start = row_start;
3087 suggestion
3088 }))
3089 }
3090
3091 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3092 while row > 0 {
3093 row -= 1;
3094 if !self.is_line_blank(row) {
3095 return Some(row);
3096 }
3097 }
3098 None
3099 }
3100
3101 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3102 let captures = self.syntax.captures(range, &self.text, |grammar| {
3103 grammar.highlights_query.as_ref()
3104 });
3105 let highlight_maps = captures
3106 .grammars()
3107 .iter()
3108 .map(|grammar| grammar.highlight_map())
3109 .collect();
3110 (captures, highlight_maps)
3111 }
3112
3113 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3114 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3115 /// returned in chunks where each chunk has a single syntax highlighting style and
3116 /// diagnostic status.
3117 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3118 let range = range.start.to_offset(self)..range.end.to_offset(self);
3119
3120 let mut syntax = None;
3121 if language_aware {
3122 syntax = Some(self.get_highlights(range.clone()));
3123 }
3124 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3125 let diagnostics = language_aware;
3126 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3127 }
3128
3129 pub fn highlighted_text_for_range<T: ToOffset>(
3130 &self,
3131 range: Range<T>,
3132 override_style: Option<HighlightStyle>,
3133 syntax_theme: &SyntaxTheme,
3134 ) -> HighlightedText {
3135 HighlightedText::from_buffer_range(
3136 range,
3137 &self.text,
3138 &self.syntax,
3139 override_style,
3140 syntax_theme,
3141 )
3142 }
3143
3144 /// Invokes the given callback for each line of text in the given range of the buffer.
3145 /// Uses callback to avoid allocating a string for each line.
3146 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3147 let mut line = String::new();
3148 let mut row = range.start.row;
3149 for chunk in self
3150 .as_rope()
3151 .chunks_in_range(range.to_offset(self))
3152 .chain(["\n"])
3153 {
3154 for (newline_ix, text) in chunk.split('\n').enumerate() {
3155 if newline_ix > 0 {
3156 callback(row, &line);
3157 row += 1;
3158 line.clear();
3159 }
3160 line.push_str(text);
3161 }
3162 }
3163 }
3164
3165 /// Iterates over every [`SyntaxLayer`] in the buffer.
3166 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3167 self.syntax
3168 .layers_for_range(0..self.len(), &self.text, true)
3169 }
3170
3171 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3172 let offset = position.to_offset(self);
3173 self.syntax
3174 .layers_for_range(offset..offset, &self.text, false)
3175 .filter(|l| l.node().end_byte() > offset)
3176 .last()
3177 }
3178
3179 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3180 &self,
3181 range: Range<D>,
3182 ) -> Option<SyntaxLayer> {
3183 let range = range.to_offset(self);
3184 return self
3185 .syntax
3186 .layers_for_range(range, &self.text, false)
3187 .max_by(|a, b| {
3188 if a.depth != b.depth {
3189 a.depth.cmp(&b.depth)
3190 } else if a.offset.0 != b.offset.0 {
3191 a.offset.0.cmp(&b.offset.0)
3192 } else {
3193 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3194 }
3195 });
3196 }
3197
3198 /// Returns the main [`Language`].
3199 pub fn language(&self) -> Option<&Arc<Language>> {
3200 self.language.as_ref()
3201 }
3202
3203 /// Returns the [`Language`] at the given location.
3204 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3205 self.syntax_layer_at(position)
3206 .map(|info| info.language)
3207 .or(self.language.as_ref())
3208 }
3209
3210 /// Returns the settings for the language at the given location.
3211 pub fn settings_at<'a, D: ToOffset>(
3212 &'a self,
3213 position: D,
3214 cx: &'a App,
3215 ) -> Cow<'a, LanguageSettings> {
3216 language_settings(
3217 self.language_at(position).map(|l| l.name()),
3218 self.file.as_ref(),
3219 cx,
3220 )
3221 }
3222
3223 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3224 CharClassifier::new(self.language_scope_at(point))
3225 }
3226
3227 /// Returns the [`LanguageScope`] at the given location.
3228 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3229 let offset = position.to_offset(self);
3230 let mut scope = None;
3231 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3232
3233 // Use the layer that has the smallest node intersecting the given point.
3234 for layer in self
3235 .syntax
3236 .layers_for_range(offset..offset, &self.text, false)
3237 {
3238 let mut cursor = layer.node().walk();
3239
3240 let mut range = None;
3241 loop {
3242 let child_range = cursor.node().byte_range();
3243 if !child_range.contains(&offset) {
3244 break;
3245 }
3246
3247 range = Some(child_range);
3248 if cursor.goto_first_child_for_byte(offset).is_none() {
3249 break;
3250 }
3251 }
3252
3253 if let Some(range) = range {
3254 if smallest_range_and_depth.as_ref().map_or(
3255 true,
3256 |(smallest_range, smallest_range_depth)| {
3257 if layer.depth > *smallest_range_depth {
3258 true
3259 } else if layer.depth == *smallest_range_depth {
3260 range.len() < smallest_range.len()
3261 } else {
3262 false
3263 }
3264 },
3265 ) {
3266 smallest_range_and_depth = Some((range, layer.depth));
3267 scope = Some(LanguageScope {
3268 language: layer.language.clone(),
3269 override_id: layer.override_id(offset, &self.text),
3270 });
3271 }
3272 }
3273 }
3274
3275 scope.or_else(|| {
3276 self.language.clone().map(|language| LanguageScope {
3277 language,
3278 override_id: None,
3279 })
3280 })
3281 }
3282
3283 /// Returns a tuple of the range and character kind of the word
3284 /// surrounding the given position.
3285 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3286 let mut start = start.to_offset(self);
3287 let mut end = start;
3288 let mut next_chars = self.chars_at(start).take(128).peekable();
3289 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3290
3291 let classifier = self.char_classifier_at(start);
3292 let word_kind = cmp::max(
3293 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3294 next_chars.peek().copied().map(|c| classifier.kind(c)),
3295 );
3296
3297 for ch in prev_chars {
3298 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3299 start -= ch.len_utf8();
3300 } else {
3301 break;
3302 }
3303 }
3304
3305 for ch in next_chars {
3306 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3307 end += ch.len_utf8();
3308 } else {
3309 break;
3310 }
3311 }
3312
3313 (start..end, word_kind)
3314 }
3315
3316 /// Returns the closest syntax node enclosing the given range.
3317 pub fn syntax_ancestor<'a, T: ToOffset>(
3318 &'a self,
3319 range: Range<T>,
3320 ) -> Option<tree_sitter::Node<'a>> {
3321 let range = range.start.to_offset(self)..range.end.to_offset(self);
3322 let mut result: Option<tree_sitter::Node<'a>> = None;
3323 'outer: for layer in self
3324 .syntax
3325 .layers_for_range(range.clone(), &self.text, true)
3326 {
3327 let mut cursor = layer.node().walk();
3328
3329 // Descend to the first leaf that touches the start of the range.
3330 //
3331 // If the range is non-empty and the current node ends exactly at the start,
3332 // move to the next sibling to find a node that extends beyond the start.
3333 //
3334 // If the range is empty and the current node starts after the range position,
3335 // move to the previous sibling to find the node that contains the position.
3336 while cursor.goto_first_child_for_byte(range.start).is_some() {
3337 if !range.is_empty() && cursor.node().end_byte() == range.start {
3338 cursor.goto_next_sibling();
3339 }
3340 if range.is_empty() && cursor.node().start_byte() > range.start {
3341 cursor.goto_previous_sibling();
3342 }
3343 }
3344
3345 // Ascend to the smallest ancestor that strictly contains the range.
3346 loop {
3347 let node_range = cursor.node().byte_range();
3348 if node_range.start <= range.start
3349 && node_range.end >= range.end
3350 && node_range.len() > range.len()
3351 {
3352 break;
3353 }
3354 if !cursor.goto_parent() {
3355 continue 'outer;
3356 }
3357 }
3358
3359 let left_node = cursor.node();
3360 let mut layer_result = left_node;
3361
3362 // For an empty range, try to find another node immediately to the right of the range.
3363 if left_node.end_byte() == range.start {
3364 let mut right_node = None;
3365 while !cursor.goto_next_sibling() {
3366 if !cursor.goto_parent() {
3367 break;
3368 }
3369 }
3370
3371 while cursor.node().start_byte() == range.start {
3372 right_node = Some(cursor.node());
3373 if !cursor.goto_first_child() {
3374 break;
3375 }
3376 }
3377
3378 // If there is a candidate node on both sides of the (empty) range, then
3379 // decide between the two by favoring a named node over an anonymous token.
3380 // If both nodes are the same in that regard, favor the right one.
3381 if let Some(right_node) = right_node {
3382 if right_node.is_named() || !left_node.is_named() {
3383 layer_result = right_node;
3384 }
3385 }
3386 }
3387
3388 if let Some(previous_result) = &result {
3389 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3390 continue;
3391 }
3392 }
3393 result = Some(layer_result);
3394 }
3395
3396 result
3397 }
3398
3399 /// Returns the root syntax node within the given row
3400 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3401 let start_offset = position.to_offset(self);
3402
3403 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3404
3405 let layer = self
3406 .syntax
3407 .layers_for_range(start_offset..start_offset, &self.text, true)
3408 .next()?;
3409
3410 let mut cursor = layer.node().walk();
3411
3412 // Descend to the first leaf that touches the start of the range.
3413 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3414 if cursor.node().end_byte() == start_offset {
3415 cursor.goto_next_sibling();
3416 }
3417 }
3418
3419 // Ascend to the root node within the same row.
3420 while cursor.goto_parent() {
3421 if cursor.node().start_position().row != row {
3422 break;
3423 }
3424 }
3425
3426 return Some(cursor.node());
3427 }
3428
3429 /// Returns the outline for the buffer.
3430 ///
3431 /// This method allows passing an optional [`SyntaxTheme`] to
3432 /// syntax-highlight the returned symbols.
3433 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3434 self.outline_items_containing(0..self.len(), true, theme)
3435 .map(Outline::new)
3436 }
3437
3438 /// Returns all the symbols that contain the given position.
3439 ///
3440 /// This method allows passing an optional [`SyntaxTheme`] to
3441 /// syntax-highlight the returned symbols.
3442 pub fn symbols_containing<T: ToOffset>(
3443 &self,
3444 position: T,
3445 theme: Option<&SyntaxTheme>,
3446 ) -> Option<Vec<OutlineItem<Anchor>>> {
3447 let position = position.to_offset(self);
3448 let mut items = self.outline_items_containing(
3449 position.saturating_sub(1)..self.len().min(position + 1),
3450 false,
3451 theme,
3452 )?;
3453 let mut prev_depth = None;
3454 items.retain(|item| {
3455 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3456 prev_depth = Some(item.depth);
3457 result
3458 });
3459 Some(items)
3460 }
3461
3462 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3463 let range = range.to_offset(self);
3464 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3465 grammar.outline_config.as_ref().map(|c| &c.query)
3466 });
3467 let configs = matches
3468 .grammars()
3469 .iter()
3470 .map(|g| g.outline_config.as_ref().unwrap())
3471 .collect::<Vec<_>>();
3472
3473 while let Some(mat) = matches.peek() {
3474 let config = &configs[mat.grammar_index];
3475 let containing_item_node = maybe!({
3476 let item_node = mat.captures.iter().find_map(|cap| {
3477 if cap.index == config.item_capture_ix {
3478 Some(cap.node)
3479 } else {
3480 None
3481 }
3482 })?;
3483
3484 let item_byte_range = item_node.byte_range();
3485 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3486 None
3487 } else {
3488 Some(item_node)
3489 }
3490 });
3491
3492 if let Some(item_node) = containing_item_node {
3493 return Some(
3494 Point::from_ts_point(item_node.start_position())
3495 ..Point::from_ts_point(item_node.end_position()),
3496 );
3497 }
3498
3499 matches.advance();
3500 }
3501 None
3502 }
3503
3504 pub fn outline_items_containing<T: ToOffset>(
3505 &self,
3506 range: Range<T>,
3507 include_extra_context: bool,
3508 theme: Option<&SyntaxTheme>,
3509 ) -> Option<Vec<OutlineItem<Anchor>>> {
3510 let range = range.to_offset(self);
3511 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3512 grammar.outline_config.as_ref().map(|c| &c.query)
3513 });
3514 let configs = matches
3515 .grammars()
3516 .iter()
3517 .map(|g| g.outline_config.as_ref().unwrap())
3518 .collect::<Vec<_>>();
3519
3520 let mut items = Vec::new();
3521 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3522 while let Some(mat) = matches.peek() {
3523 let config = &configs[mat.grammar_index];
3524 if let Some(item) =
3525 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3526 {
3527 items.push(item);
3528 } else if let Some(capture) = mat
3529 .captures
3530 .iter()
3531 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3532 {
3533 let capture_range = capture.node.start_position()..capture.node.end_position();
3534 let mut capture_row_range =
3535 capture_range.start.row as u32..capture_range.end.row as u32;
3536 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3537 {
3538 capture_row_range.end -= 1;
3539 }
3540 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3541 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3542 last_row_range.end = capture_row_range.end;
3543 } else {
3544 annotation_row_ranges.push(capture_row_range);
3545 }
3546 } else {
3547 annotation_row_ranges.push(capture_row_range);
3548 }
3549 }
3550 matches.advance();
3551 }
3552
3553 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3554
3555 // Assign depths based on containment relationships and convert to anchors.
3556 let mut item_ends_stack = Vec::<Point>::new();
3557 let mut anchor_items = Vec::new();
3558 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3559 for item in items {
3560 while let Some(last_end) = item_ends_stack.last().copied() {
3561 if last_end < item.range.end {
3562 item_ends_stack.pop();
3563 } else {
3564 break;
3565 }
3566 }
3567
3568 let mut annotation_row_range = None;
3569 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3570 let row_preceding_item = item.range.start.row.saturating_sub(1);
3571 if next_annotation_row_range.end < row_preceding_item {
3572 annotation_row_ranges.next();
3573 } else {
3574 if next_annotation_row_range.end == row_preceding_item {
3575 annotation_row_range = Some(next_annotation_row_range.clone());
3576 annotation_row_ranges.next();
3577 }
3578 break;
3579 }
3580 }
3581
3582 anchor_items.push(OutlineItem {
3583 depth: item_ends_stack.len(),
3584 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3585 text: item.text,
3586 highlight_ranges: item.highlight_ranges,
3587 name_ranges: item.name_ranges,
3588 body_range: item.body_range.map(|body_range| {
3589 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3590 }),
3591 annotation_range: annotation_row_range.map(|annotation_range| {
3592 self.anchor_after(Point::new(annotation_range.start, 0))
3593 ..self.anchor_before(Point::new(
3594 annotation_range.end,
3595 self.line_len(annotation_range.end),
3596 ))
3597 }),
3598 });
3599 item_ends_stack.push(item.range.end);
3600 }
3601
3602 Some(anchor_items)
3603 }
3604
3605 fn next_outline_item(
3606 &self,
3607 config: &OutlineConfig,
3608 mat: &SyntaxMapMatch,
3609 range: &Range<usize>,
3610 include_extra_context: bool,
3611 theme: Option<&SyntaxTheme>,
3612 ) -> Option<OutlineItem<Point>> {
3613 let item_node = mat.captures.iter().find_map(|cap| {
3614 if cap.index == config.item_capture_ix {
3615 Some(cap.node)
3616 } else {
3617 None
3618 }
3619 })?;
3620
3621 let item_byte_range = item_node.byte_range();
3622 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3623 return None;
3624 }
3625 let item_point_range = Point::from_ts_point(item_node.start_position())
3626 ..Point::from_ts_point(item_node.end_position());
3627
3628 let mut open_point = None;
3629 let mut close_point = None;
3630 let mut buffer_ranges = Vec::new();
3631 for capture in mat.captures {
3632 let node_is_name;
3633 if capture.index == config.name_capture_ix {
3634 node_is_name = true;
3635 } else if Some(capture.index) == config.context_capture_ix
3636 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3637 {
3638 node_is_name = false;
3639 } else {
3640 if Some(capture.index) == config.open_capture_ix {
3641 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3642 } else if Some(capture.index) == config.close_capture_ix {
3643 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3644 }
3645
3646 continue;
3647 }
3648
3649 let mut range = capture.node.start_byte()..capture.node.end_byte();
3650 let start = capture.node.start_position();
3651 if capture.node.end_position().row > start.row {
3652 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3653 }
3654
3655 if !range.is_empty() {
3656 buffer_ranges.push((range, node_is_name));
3657 }
3658 }
3659 if buffer_ranges.is_empty() {
3660 return None;
3661 }
3662 let mut text = String::new();
3663 let mut highlight_ranges = Vec::new();
3664 let mut name_ranges = Vec::new();
3665 let mut chunks = self.chunks(
3666 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3667 true,
3668 );
3669 let mut last_buffer_range_end = 0;
3670
3671 for (buffer_range, is_name) in buffer_ranges {
3672 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3673 if space_added {
3674 text.push(' ');
3675 }
3676 let before_append_len = text.len();
3677 let mut offset = buffer_range.start;
3678 chunks.seek(buffer_range.clone());
3679 for mut chunk in chunks.by_ref() {
3680 if chunk.text.len() > buffer_range.end - offset {
3681 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3682 offset = buffer_range.end;
3683 } else {
3684 offset += chunk.text.len();
3685 }
3686 let style = chunk
3687 .syntax_highlight_id
3688 .zip(theme)
3689 .and_then(|(highlight, theme)| highlight.style(theme));
3690 if let Some(style) = style {
3691 let start = text.len();
3692 let end = start + chunk.text.len();
3693 highlight_ranges.push((start..end, style));
3694 }
3695 text.push_str(chunk.text);
3696 if offset >= buffer_range.end {
3697 break;
3698 }
3699 }
3700 if is_name {
3701 let after_append_len = text.len();
3702 let start = if space_added && !name_ranges.is_empty() {
3703 before_append_len - 1
3704 } else {
3705 before_append_len
3706 };
3707 name_ranges.push(start..after_append_len);
3708 }
3709 last_buffer_range_end = buffer_range.end;
3710 }
3711
3712 Some(OutlineItem {
3713 depth: 0, // We'll calculate the depth later
3714 range: item_point_range,
3715 text,
3716 highlight_ranges,
3717 name_ranges,
3718 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3719 annotation_range: None,
3720 })
3721 }
3722
3723 pub fn function_body_fold_ranges<T: ToOffset>(
3724 &self,
3725 within: Range<T>,
3726 ) -> impl Iterator<Item = Range<usize>> + '_ {
3727 self.text_object_ranges(within, TreeSitterOptions::default())
3728 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3729 }
3730
3731 /// For each grammar in the language, runs the provided
3732 /// [`tree_sitter::Query`] against the given range.
3733 pub fn matches(
3734 &self,
3735 range: Range<usize>,
3736 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3737 ) -> SyntaxMapMatches {
3738 self.syntax.matches(range, self, query)
3739 }
3740
3741 pub fn all_bracket_ranges(
3742 &self,
3743 range: Range<usize>,
3744 ) -> impl Iterator<Item = BracketMatch> + '_ {
3745 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3746 grammar.brackets_config.as_ref().map(|c| &c.query)
3747 });
3748 let configs = matches
3749 .grammars()
3750 .iter()
3751 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3752 .collect::<Vec<_>>();
3753
3754 iter::from_fn(move || {
3755 while let Some(mat) = matches.peek() {
3756 let mut open = None;
3757 let mut close = None;
3758 let config = &configs[mat.grammar_index];
3759 let pattern = &config.patterns[mat.pattern_index];
3760 for capture in mat.captures {
3761 if capture.index == config.open_capture_ix {
3762 open = Some(capture.node.byte_range());
3763 } else if capture.index == config.close_capture_ix {
3764 close = Some(capture.node.byte_range());
3765 }
3766 }
3767
3768 matches.advance();
3769
3770 let Some((open_range, close_range)) = open.zip(close) else {
3771 continue;
3772 };
3773
3774 let bracket_range = open_range.start..=close_range.end;
3775 if !bracket_range.overlaps(&range) {
3776 continue;
3777 }
3778
3779 return Some(BracketMatch {
3780 open_range,
3781 close_range,
3782 newline_only: pattern.newline_only,
3783 });
3784 }
3785 None
3786 })
3787 }
3788
3789 /// Returns bracket range pairs overlapping or adjacent to `range`
3790 pub fn bracket_ranges<T: ToOffset>(
3791 &self,
3792 range: Range<T>,
3793 ) -> impl Iterator<Item = BracketMatch> + '_ {
3794 // Find bracket pairs that *inclusively* contain the given range.
3795 let range = range.start.to_offset(self).saturating_sub(1)
3796 ..self.len().min(range.end.to_offset(self) + 1);
3797 self.all_bracket_ranges(range)
3798 .filter(|pair| !pair.newline_only)
3799 }
3800
3801 pub fn text_object_ranges<T: ToOffset>(
3802 &self,
3803 range: Range<T>,
3804 options: TreeSitterOptions,
3805 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3806 let range = range.start.to_offset(self).saturating_sub(1)
3807 ..self.len().min(range.end.to_offset(self) + 1);
3808
3809 let mut matches =
3810 self.syntax
3811 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3812 grammar.text_object_config.as_ref().map(|c| &c.query)
3813 });
3814
3815 let configs = matches
3816 .grammars()
3817 .iter()
3818 .map(|grammar| grammar.text_object_config.as_ref())
3819 .collect::<Vec<_>>();
3820
3821 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3822
3823 iter::from_fn(move || {
3824 loop {
3825 while let Some(capture) = captures.pop() {
3826 if capture.0.overlaps(&range) {
3827 return Some(capture);
3828 }
3829 }
3830
3831 let mat = matches.peek()?;
3832
3833 let Some(config) = configs[mat.grammar_index].as_ref() else {
3834 matches.advance();
3835 continue;
3836 };
3837
3838 for capture in mat.captures {
3839 let Some(ix) = config
3840 .text_objects_by_capture_ix
3841 .binary_search_by_key(&capture.index, |e| e.0)
3842 .ok()
3843 else {
3844 continue;
3845 };
3846 let text_object = config.text_objects_by_capture_ix[ix].1;
3847 let byte_range = capture.node.byte_range();
3848
3849 let mut found = false;
3850 for (range, existing) in captures.iter_mut() {
3851 if existing == &text_object {
3852 range.start = range.start.min(byte_range.start);
3853 range.end = range.end.max(byte_range.end);
3854 found = true;
3855 break;
3856 }
3857 }
3858
3859 if !found {
3860 captures.push((byte_range, text_object));
3861 }
3862 }
3863
3864 matches.advance();
3865 }
3866 })
3867 }
3868
3869 /// Returns enclosing bracket ranges containing the given range
3870 pub fn enclosing_bracket_ranges<T: ToOffset>(
3871 &self,
3872 range: Range<T>,
3873 ) -> impl Iterator<Item = BracketMatch> + '_ {
3874 let range = range.start.to_offset(self)..range.end.to_offset(self);
3875
3876 self.bracket_ranges(range.clone()).filter(move |pair| {
3877 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3878 })
3879 }
3880
3881 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3882 ///
3883 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3884 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3885 &self,
3886 range: Range<T>,
3887 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3888 ) -> Option<(Range<usize>, Range<usize>)> {
3889 let range = range.start.to_offset(self)..range.end.to_offset(self);
3890
3891 // Get the ranges of the innermost pair of brackets.
3892 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3893
3894 for pair in self.enclosing_bracket_ranges(range.clone()) {
3895 if let Some(range_filter) = range_filter {
3896 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3897 continue;
3898 }
3899 }
3900
3901 let len = pair.close_range.end - pair.open_range.start;
3902
3903 if let Some((existing_open, existing_close)) = &result {
3904 let existing_len = existing_close.end - existing_open.start;
3905 if len > existing_len {
3906 continue;
3907 }
3908 }
3909
3910 result = Some((pair.open_range, pair.close_range));
3911 }
3912
3913 result
3914 }
3915
3916 /// Returns anchor ranges for any matches of the redaction query.
3917 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3918 /// will be run on the relevant section of the buffer.
3919 pub fn redacted_ranges<T: ToOffset>(
3920 &self,
3921 range: Range<T>,
3922 ) -> impl Iterator<Item = Range<usize>> + '_ {
3923 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3924 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3925 grammar
3926 .redactions_config
3927 .as_ref()
3928 .map(|config| &config.query)
3929 });
3930
3931 let configs = syntax_matches
3932 .grammars()
3933 .iter()
3934 .map(|grammar| grammar.redactions_config.as_ref())
3935 .collect::<Vec<_>>();
3936
3937 iter::from_fn(move || {
3938 let redacted_range = syntax_matches
3939 .peek()
3940 .and_then(|mat| {
3941 configs[mat.grammar_index].and_then(|config| {
3942 mat.captures
3943 .iter()
3944 .find(|capture| capture.index == config.redaction_capture_ix)
3945 })
3946 })
3947 .map(|mat| mat.node.byte_range());
3948 syntax_matches.advance();
3949 redacted_range
3950 })
3951 }
3952
3953 pub fn injections_intersecting_range<T: ToOffset>(
3954 &self,
3955 range: Range<T>,
3956 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3957 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3958
3959 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3960 grammar
3961 .injection_config
3962 .as_ref()
3963 .map(|config| &config.query)
3964 });
3965
3966 let configs = syntax_matches
3967 .grammars()
3968 .iter()
3969 .map(|grammar| grammar.injection_config.as_ref())
3970 .collect::<Vec<_>>();
3971
3972 iter::from_fn(move || {
3973 let ranges = syntax_matches.peek().and_then(|mat| {
3974 let config = &configs[mat.grammar_index]?;
3975 let content_capture_range = mat.captures.iter().find_map(|capture| {
3976 if capture.index == config.content_capture_ix {
3977 Some(capture.node.byte_range())
3978 } else {
3979 None
3980 }
3981 })?;
3982 let language = self.language_at(content_capture_range.start)?;
3983 Some((content_capture_range, language))
3984 });
3985 syntax_matches.advance();
3986 ranges
3987 })
3988 }
3989
3990 pub fn runnable_ranges(
3991 &self,
3992 offset_range: Range<usize>,
3993 ) -> impl Iterator<Item = RunnableRange> + '_ {
3994 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3995 grammar.runnable_config.as_ref().map(|config| &config.query)
3996 });
3997
3998 let test_configs = syntax_matches
3999 .grammars()
4000 .iter()
4001 .map(|grammar| grammar.runnable_config.as_ref())
4002 .collect::<Vec<_>>();
4003
4004 iter::from_fn(move || {
4005 loop {
4006 let mat = syntax_matches.peek()?;
4007
4008 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4009 let mut run_range = None;
4010 let full_range = mat.captures.iter().fold(
4011 Range {
4012 start: usize::MAX,
4013 end: 0,
4014 },
4015 |mut acc, next| {
4016 let byte_range = next.node.byte_range();
4017 if acc.start > byte_range.start {
4018 acc.start = byte_range.start;
4019 }
4020 if acc.end < byte_range.end {
4021 acc.end = byte_range.end;
4022 }
4023 acc
4024 },
4025 );
4026 if full_range.start > full_range.end {
4027 // We did not find a full spanning range of this match.
4028 return None;
4029 }
4030 let extra_captures: SmallVec<[_; 1]> =
4031 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4032 test_configs
4033 .extra_captures
4034 .get(capture.index as usize)
4035 .cloned()
4036 .and_then(|tag_name| match tag_name {
4037 RunnableCapture::Named(name) => {
4038 Some((capture.node.byte_range(), name))
4039 }
4040 RunnableCapture::Run => {
4041 let _ = run_range.insert(capture.node.byte_range());
4042 None
4043 }
4044 })
4045 }));
4046 let run_range = run_range?;
4047 let tags = test_configs
4048 .query
4049 .property_settings(mat.pattern_index)
4050 .iter()
4051 .filter_map(|property| {
4052 if *property.key == *"tag" {
4053 property
4054 .value
4055 .as_ref()
4056 .map(|value| RunnableTag(value.to_string().into()))
4057 } else {
4058 None
4059 }
4060 })
4061 .collect();
4062 let extra_captures = extra_captures
4063 .into_iter()
4064 .map(|(range, name)| {
4065 (
4066 name.to_string(),
4067 self.text_for_range(range.clone()).collect::<String>(),
4068 )
4069 })
4070 .collect();
4071 // All tags should have the same range.
4072 Some(RunnableRange {
4073 run_range,
4074 full_range,
4075 runnable: Runnable {
4076 tags,
4077 language: mat.language,
4078 buffer: self.remote_id(),
4079 },
4080 extra_captures,
4081 buffer_id: self.remote_id(),
4082 })
4083 });
4084
4085 syntax_matches.advance();
4086 if test_range.is_some() {
4087 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4088 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4089 return test_range;
4090 }
4091 }
4092 })
4093 }
4094
4095 /// Returns selections for remote peers intersecting the given range.
4096 #[allow(clippy::type_complexity)]
4097 pub fn selections_in_range(
4098 &self,
4099 range: Range<Anchor>,
4100 include_local: bool,
4101 ) -> impl Iterator<
4102 Item = (
4103 ReplicaId,
4104 bool,
4105 CursorShape,
4106 impl Iterator<Item = &Selection<Anchor>> + '_,
4107 ),
4108 > + '_ {
4109 self.remote_selections
4110 .iter()
4111 .filter(move |(replica_id, set)| {
4112 (include_local || **replica_id != self.text.replica_id())
4113 && !set.selections.is_empty()
4114 })
4115 .map(move |(replica_id, set)| {
4116 let start_ix = match set.selections.binary_search_by(|probe| {
4117 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4118 }) {
4119 Ok(ix) | Err(ix) => ix,
4120 };
4121 let end_ix = match set.selections.binary_search_by(|probe| {
4122 probe.start.cmp(&range.end, self).then(Ordering::Less)
4123 }) {
4124 Ok(ix) | Err(ix) => ix,
4125 };
4126
4127 (
4128 *replica_id,
4129 set.line_mode,
4130 set.cursor_shape,
4131 set.selections[start_ix..end_ix].iter(),
4132 )
4133 })
4134 }
4135
4136 /// Returns if the buffer contains any diagnostics.
4137 pub fn has_diagnostics(&self) -> bool {
4138 !self.diagnostics.is_empty()
4139 }
4140
4141 /// Returns all the diagnostics intersecting the given range.
4142 pub fn diagnostics_in_range<'a, T, O>(
4143 &'a self,
4144 search_range: Range<T>,
4145 reversed: bool,
4146 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4147 where
4148 T: 'a + Clone + ToOffset,
4149 O: 'a + FromAnchor,
4150 {
4151 let mut iterators: Vec<_> = self
4152 .diagnostics
4153 .iter()
4154 .map(|(_, collection)| {
4155 collection
4156 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4157 .peekable()
4158 })
4159 .collect();
4160
4161 std::iter::from_fn(move || {
4162 let (next_ix, _) = iterators
4163 .iter_mut()
4164 .enumerate()
4165 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4166 .min_by(|(_, a), (_, b)| {
4167 let cmp = a
4168 .range
4169 .start
4170 .cmp(&b.range.start, self)
4171 // when range is equal, sort by diagnostic severity
4172 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4173 // and stabilize order with group_id
4174 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4175 if reversed { cmp.reverse() } else { cmp }
4176 })?;
4177 iterators[next_ix]
4178 .next()
4179 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4180 diagnostic,
4181 range: FromAnchor::from_anchor(&range.start, self)
4182 ..FromAnchor::from_anchor(&range.end, self),
4183 })
4184 })
4185 }
4186
4187 /// Returns all the diagnostic groups associated with the given
4188 /// language server ID. If no language server ID is provided,
4189 /// all diagnostics groups are returned.
4190 pub fn diagnostic_groups(
4191 &self,
4192 language_server_id: Option<LanguageServerId>,
4193 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4194 let mut groups = Vec::new();
4195
4196 if let Some(language_server_id) = language_server_id {
4197 if let Ok(ix) = self
4198 .diagnostics
4199 .binary_search_by_key(&language_server_id, |e| e.0)
4200 {
4201 self.diagnostics[ix]
4202 .1
4203 .groups(language_server_id, &mut groups, self);
4204 }
4205 } else {
4206 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4207 diagnostics.groups(*language_server_id, &mut groups, self);
4208 }
4209 }
4210
4211 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4212 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4213 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4214 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4215 });
4216
4217 groups
4218 }
4219
4220 /// Returns an iterator over the diagnostics for the given group.
4221 pub fn diagnostic_group<O>(
4222 &self,
4223 group_id: usize,
4224 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4225 where
4226 O: FromAnchor + 'static,
4227 {
4228 self.diagnostics
4229 .iter()
4230 .flat_map(move |(_, set)| set.group(group_id, self))
4231 }
4232
4233 /// An integer version number that accounts for all updates besides
4234 /// the buffer's text itself (which is versioned via a version vector).
4235 pub fn non_text_state_update_count(&self) -> usize {
4236 self.non_text_state_update_count
4237 }
4238
4239 /// Returns a snapshot of underlying file.
4240 pub fn file(&self) -> Option<&Arc<dyn File>> {
4241 self.file.as_ref()
4242 }
4243
4244 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4245 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4246 if let Some(file) = self.file() {
4247 if file.path().file_name().is_none() || include_root {
4248 Some(file.full_path(cx))
4249 } else {
4250 Some(file.path().to_path_buf())
4251 }
4252 } else {
4253 None
4254 }
4255 }
4256
4257 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4258 let query_str = query.fuzzy_contents;
4259 if query_str.map_or(false, |query| query.is_empty()) {
4260 return BTreeMap::default();
4261 }
4262
4263 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4264 language,
4265 override_id: None,
4266 }));
4267
4268 let mut query_ix = 0;
4269 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4270 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4271
4272 let mut words = BTreeMap::default();
4273 let mut current_word_start_ix = None;
4274 let mut chunk_ix = query.range.start;
4275 for chunk in self.chunks(query.range, false) {
4276 for (i, c) in chunk.text.char_indices() {
4277 let ix = chunk_ix + i;
4278 if classifier.is_word(c) {
4279 if current_word_start_ix.is_none() {
4280 current_word_start_ix = Some(ix);
4281 }
4282
4283 if let Some(query_chars) = &query_chars {
4284 if query_ix < query_len {
4285 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4286 query_ix += 1;
4287 }
4288 }
4289 }
4290 continue;
4291 } else if let Some(word_start) = current_word_start_ix.take() {
4292 if query_ix == query_len {
4293 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4294 let mut word_text = self.text_for_range(word_start..ix).peekable();
4295 let first_char = word_text
4296 .peek()
4297 .and_then(|first_chunk| first_chunk.chars().next());
4298 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4299 if !query.skip_digits
4300 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4301 {
4302 words.insert(word_text.collect(), word_range);
4303 }
4304 }
4305 }
4306 query_ix = 0;
4307 }
4308 chunk_ix += chunk.text.len();
4309 }
4310
4311 words
4312 }
4313}
4314
4315pub struct WordsQuery<'a> {
4316 /// Only returns words with all chars from the fuzzy string in them.
4317 pub fuzzy_contents: Option<&'a str>,
4318 /// Skips words that start with a digit.
4319 pub skip_digits: bool,
4320 /// Buffer offset range, to look for words.
4321 pub range: Range<usize>,
4322}
4323
4324fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4325 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4326}
4327
4328fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4329 let mut result = IndentSize::spaces(0);
4330 for c in text {
4331 let kind = match c {
4332 ' ' => IndentKind::Space,
4333 '\t' => IndentKind::Tab,
4334 _ => break,
4335 };
4336 if result.len == 0 {
4337 result.kind = kind;
4338 }
4339 result.len += 1;
4340 }
4341 result
4342}
4343
4344impl Clone for BufferSnapshot {
4345 fn clone(&self) -> Self {
4346 Self {
4347 text: self.text.clone(),
4348 syntax: self.syntax.clone(),
4349 file: self.file.clone(),
4350 remote_selections: self.remote_selections.clone(),
4351 diagnostics: self.diagnostics.clone(),
4352 language: self.language.clone(),
4353 non_text_state_update_count: self.non_text_state_update_count,
4354 }
4355 }
4356}
4357
4358impl Deref for BufferSnapshot {
4359 type Target = text::BufferSnapshot;
4360
4361 fn deref(&self) -> &Self::Target {
4362 &self.text
4363 }
4364}
4365
4366unsafe impl Send for BufferChunks<'_> {}
4367
4368impl<'a> BufferChunks<'a> {
4369 pub(crate) fn new(
4370 text: &'a Rope,
4371 range: Range<usize>,
4372 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4373 diagnostics: bool,
4374 buffer_snapshot: Option<&'a BufferSnapshot>,
4375 ) -> Self {
4376 let mut highlights = None;
4377 if let Some((captures, highlight_maps)) = syntax {
4378 highlights = Some(BufferChunkHighlights {
4379 captures,
4380 next_capture: None,
4381 stack: Default::default(),
4382 highlight_maps,
4383 })
4384 }
4385
4386 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4387 let chunks = text.chunks_in_range(range.clone());
4388
4389 let mut this = BufferChunks {
4390 range,
4391 buffer_snapshot,
4392 chunks,
4393 diagnostic_endpoints,
4394 error_depth: 0,
4395 warning_depth: 0,
4396 information_depth: 0,
4397 hint_depth: 0,
4398 unnecessary_depth: 0,
4399 underline: true,
4400 highlights,
4401 };
4402 this.initialize_diagnostic_endpoints();
4403 this
4404 }
4405
4406 /// Seeks to the given byte offset in the buffer.
4407 pub fn seek(&mut self, range: Range<usize>) {
4408 let old_range = std::mem::replace(&mut self.range, range.clone());
4409 self.chunks.set_range(self.range.clone());
4410 if let Some(highlights) = self.highlights.as_mut() {
4411 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4412 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4413 highlights
4414 .stack
4415 .retain(|(end_offset, _)| *end_offset > range.start);
4416 if let Some(capture) = &highlights.next_capture {
4417 if range.start >= capture.node.start_byte() {
4418 let next_capture_end = capture.node.end_byte();
4419 if range.start < next_capture_end {
4420 highlights.stack.push((
4421 next_capture_end,
4422 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4423 ));
4424 }
4425 highlights.next_capture.take();
4426 }
4427 }
4428 } else if let Some(snapshot) = self.buffer_snapshot {
4429 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4430 *highlights = BufferChunkHighlights {
4431 captures,
4432 next_capture: None,
4433 stack: Default::default(),
4434 highlight_maps,
4435 };
4436 } else {
4437 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4438 // Seeking such BufferChunks is not supported.
4439 debug_assert!(
4440 false,
4441 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4442 );
4443 }
4444
4445 highlights.captures.set_byte_range(self.range.clone());
4446 self.initialize_diagnostic_endpoints();
4447 }
4448 }
4449
4450 fn initialize_diagnostic_endpoints(&mut self) {
4451 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4452 if let Some(buffer) = self.buffer_snapshot {
4453 let mut diagnostic_endpoints = Vec::new();
4454 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4455 diagnostic_endpoints.push(DiagnosticEndpoint {
4456 offset: entry.range.start,
4457 is_start: true,
4458 severity: entry.diagnostic.severity,
4459 is_unnecessary: entry.diagnostic.is_unnecessary,
4460 underline: entry.diagnostic.underline,
4461 });
4462 diagnostic_endpoints.push(DiagnosticEndpoint {
4463 offset: entry.range.end,
4464 is_start: false,
4465 severity: entry.diagnostic.severity,
4466 is_unnecessary: entry.diagnostic.is_unnecessary,
4467 underline: entry.diagnostic.underline,
4468 });
4469 }
4470 diagnostic_endpoints
4471 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4472 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4473 self.hint_depth = 0;
4474 self.error_depth = 0;
4475 self.warning_depth = 0;
4476 self.information_depth = 0;
4477 }
4478 }
4479 }
4480
4481 /// The current byte offset in the buffer.
4482 pub fn offset(&self) -> usize {
4483 self.range.start
4484 }
4485
4486 pub fn range(&self) -> Range<usize> {
4487 self.range.clone()
4488 }
4489
4490 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4491 let depth = match endpoint.severity {
4492 DiagnosticSeverity::ERROR => &mut self.error_depth,
4493 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4494 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4495 DiagnosticSeverity::HINT => &mut self.hint_depth,
4496 _ => return,
4497 };
4498 if endpoint.is_start {
4499 *depth += 1;
4500 } else {
4501 *depth -= 1;
4502 }
4503
4504 if endpoint.is_unnecessary {
4505 if endpoint.is_start {
4506 self.unnecessary_depth += 1;
4507 } else {
4508 self.unnecessary_depth -= 1;
4509 }
4510 }
4511 }
4512
4513 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4514 if self.error_depth > 0 {
4515 Some(DiagnosticSeverity::ERROR)
4516 } else if self.warning_depth > 0 {
4517 Some(DiagnosticSeverity::WARNING)
4518 } else if self.information_depth > 0 {
4519 Some(DiagnosticSeverity::INFORMATION)
4520 } else if self.hint_depth > 0 {
4521 Some(DiagnosticSeverity::HINT)
4522 } else {
4523 None
4524 }
4525 }
4526
4527 fn current_code_is_unnecessary(&self) -> bool {
4528 self.unnecessary_depth > 0
4529 }
4530}
4531
4532impl<'a> Iterator for BufferChunks<'a> {
4533 type Item = Chunk<'a>;
4534
4535 fn next(&mut self) -> Option<Self::Item> {
4536 let mut next_capture_start = usize::MAX;
4537 let mut next_diagnostic_endpoint = usize::MAX;
4538
4539 if let Some(highlights) = self.highlights.as_mut() {
4540 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4541 if *parent_capture_end <= self.range.start {
4542 highlights.stack.pop();
4543 } else {
4544 break;
4545 }
4546 }
4547
4548 if highlights.next_capture.is_none() {
4549 highlights.next_capture = highlights.captures.next();
4550 }
4551
4552 while let Some(capture) = highlights.next_capture.as_ref() {
4553 if self.range.start < capture.node.start_byte() {
4554 next_capture_start = capture.node.start_byte();
4555 break;
4556 } else {
4557 let highlight_id =
4558 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4559 highlights
4560 .stack
4561 .push((capture.node.end_byte(), highlight_id));
4562 highlights.next_capture = highlights.captures.next();
4563 }
4564 }
4565 }
4566
4567 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4568 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4569 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4570 if endpoint.offset <= self.range.start {
4571 self.update_diagnostic_depths(endpoint);
4572 diagnostic_endpoints.next();
4573 self.underline = endpoint.underline;
4574 } else {
4575 next_diagnostic_endpoint = endpoint.offset;
4576 break;
4577 }
4578 }
4579 }
4580 self.diagnostic_endpoints = diagnostic_endpoints;
4581
4582 if let Some(chunk) = self.chunks.peek() {
4583 let chunk_start = self.range.start;
4584 let mut chunk_end = (self.chunks.offset() + chunk.len())
4585 .min(next_capture_start)
4586 .min(next_diagnostic_endpoint);
4587 let mut highlight_id = None;
4588 if let Some(highlights) = self.highlights.as_ref() {
4589 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4590 chunk_end = chunk_end.min(*parent_capture_end);
4591 highlight_id = Some(*parent_highlight_id);
4592 }
4593 }
4594
4595 let slice =
4596 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4597 self.range.start = chunk_end;
4598 if self.range.start == self.chunks.offset() + chunk.len() {
4599 self.chunks.next().unwrap();
4600 }
4601
4602 Some(Chunk {
4603 text: slice,
4604 syntax_highlight_id: highlight_id,
4605 underline: self.underline,
4606 diagnostic_severity: self.current_diagnostic_severity(),
4607 is_unnecessary: self.current_code_is_unnecessary(),
4608 ..Chunk::default()
4609 })
4610 } else {
4611 None
4612 }
4613 }
4614}
4615
4616impl operation_queue::Operation for Operation {
4617 fn lamport_timestamp(&self) -> clock::Lamport {
4618 match self {
4619 Operation::Buffer(_) => {
4620 unreachable!("buffer operations should never be deferred at this layer")
4621 }
4622 Operation::UpdateDiagnostics {
4623 lamport_timestamp, ..
4624 }
4625 | Operation::UpdateSelections {
4626 lamport_timestamp, ..
4627 }
4628 | Operation::UpdateCompletionTriggers {
4629 lamport_timestamp, ..
4630 } => *lamport_timestamp,
4631 }
4632 }
4633}
4634
4635impl Default for Diagnostic {
4636 fn default() -> Self {
4637 Self {
4638 source: Default::default(),
4639 code: None,
4640 code_description: None,
4641 severity: DiagnosticSeverity::ERROR,
4642 message: Default::default(),
4643 markdown: None,
4644 group_id: 0,
4645 is_primary: false,
4646 is_disk_based: false,
4647 is_unnecessary: false,
4648 underline: true,
4649 data: None,
4650 }
4651 }
4652}
4653
4654impl IndentSize {
4655 /// Returns an [`IndentSize`] representing the given spaces.
4656 pub fn spaces(len: u32) -> Self {
4657 Self {
4658 len,
4659 kind: IndentKind::Space,
4660 }
4661 }
4662
4663 /// Returns an [`IndentSize`] representing a tab.
4664 pub fn tab() -> Self {
4665 Self {
4666 len: 1,
4667 kind: IndentKind::Tab,
4668 }
4669 }
4670
4671 /// An iterator over the characters represented by this [`IndentSize`].
4672 pub fn chars(&self) -> impl Iterator<Item = char> {
4673 iter::repeat(self.char()).take(self.len as usize)
4674 }
4675
4676 /// The character representation of this [`IndentSize`].
4677 pub fn char(&self) -> char {
4678 match self.kind {
4679 IndentKind::Space => ' ',
4680 IndentKind::Tab => '\t',
4681 }
4682 }
4683
4684 /// Consumes the current [`IndentSize`] and returns a new one that has
4685 /// been shrunk or enlarged by the given size along the given direction.
4686 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4687 match direction {
4688 Ordering::Less => {
4689 if self.kind == size.kind && self.len >= size.len {
4690 self.len -= size.len;
4691 }
4692 }
4693 Ordering::Equal => {}
4694 Ordering::Greater => {
4695 if self.len == 0 {
4696 self = size;
4697 } else if self.kind == size.kind {
4698 self.len += size.len;
4699 }
4700 }
4701 }
4702 self
4703 }
4704
4705 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4706 match self.kind {
4707 IndentKind::Space => self.len as usize,
4708 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4709 }
4710 }
4711}
4712
4713#[cfg(any(test, feature = "test-support"))]
4714pub struct TestFile {
4715 pub path: Arc<Path>,
4716 pub root_name: String,
4717 pub local_root: Option<PathBuf>,
4718}
4719
4720#[cfg(any(test, feature = "test-support"))]
4721impl File for TestFile {
4722 fn path(&self) -> &Arc<Path> {
4723 &self.path
4724 }
4725
4726 fn full_path(&self, _: &gpui::App) -> PathBuf {
4727 PathBuf::from(&self.root_name).join(self.path.as_ref())
4728 }
4729
4730 fn as_local(&self) -> Option<&dyn LocalFile> {
4731 if self.local_root.is_some() {
4732 Some(self)
4733 } else {
4734 None
4735 }
4736 }
4737
4738 fn disk_state(&self) -> DiskState {
4739 unimplemented!()
4740 }
4741
4742 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4743 self.path().file_name().unwrap_or(self.root_name.as_ref())
4744 }
4745
4746 fn worktree_id(&self, _: &App) -> WorktreeId {
4747 WorktreeId::from_usize(0)
4748 }
4749
4750 fn to_proto(&self, _: &App) -> rpc::proto::File {
4751 unimplemented!()
4752 }
4753
4754 fn is_private(&self) -> bool {
4755 false
4756 }
4757}
4758
4759#[cfg(any(test, feature = "test-support"))]
4760impl LocalFile for TestFile {
4761 fn abs_path(&self, _cx: &App) -> PathBuf {
4762 PathBuf::from(self.local_root.as_ref().unwrap())
4763 .join(&self.root_name)
4764 .join(self.path.as_ref())
4765 }
4766
4767 fn load(&self, _cx: &App) -> Task<Result<String>> {
4768 unimplemented!()
4769 }
4770
4771 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4772 unimplemented!()
4773 }
4774}
4775
4776pub(crate) fn contiguous_ranges(
4777 values: impl Iterator<Item = u32>,
4778 max_len: usize,
4779) -> impl Iterator<Item = Range<u32>> {
4780 let mut values = values;
4781 let mut current_range: Option<Range<u32>> = None;
4782 std::iter::from_fn(move || {
4783 loop {
4784 if let Some(value) = values.next() {
4785 if let Some(range) = &mut current_range {
4786 if value == range.end && range.len() < max_len {
4787 range.end += 1;
4788 continue;
4789 }
4790 }
4791
4792 let prev_range = current_range.clone();
4793 current_range = Some(value..(value + 1));
4794 if prev_range.is_some() {
4795 return prev_range;
4796 }
4797 } else {
4798 return current_range.take();
4799 }
4800 }
4801 })
4802}
4803
4804#[derive(Default, Debug)]
4805pub struct CharClassifier {
4806 scope: Option<LanguageScope>,
4807 for_completion: bool,
4808 ignore_punctuation: bool,
4809}
4810
4811impl CharClassifier {
4812 pub fn new(scope: Option<LanguageScope>) -> Self {
4813 Self {
4814 scope,
4815 for_completion: false,
4816 ignore_punctuation: false,
4817 }
4818 }
4819
4820 pub fn for_completion(self, for_completion: bool) -> Self {
4821 Self {
4822 for_completion,
4823 ..self
4824 }
4825 }
4826
4827 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4828 Self {
4829 ignore_punctuation,
4830 ..self
4831 }
4832 }
4833
4834 pub fn is_whitespace(&self, c: char) -> bool {
4835 self.kind(c) == CharKind::Whitespace
4836 }
4837
4838 pub fn is_word(&self, c: char) -> bool {
4839 self.kind(c) == CharKind::Word
4840 }
4841
4842 pub fn is_punctuation(&self, c: char) -> bool {
4843 self.kind(c) == CharKind::Punctuation
4844 }
4845
4846 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4847 if c.is_alphanumeric() || c == '_' {
4848 return CharKind::Word;
4849 }
4850
4851 if let Some(scope) = &self.scope {
4852 let characters = if self.for_completion {
4853 scope.completion_query_characters()
4854 } else {
4855 scope.word_characters()
4856 };
4857 if let Some(characters) = characters {
4858 if characters.contains(&c) {
4859 return CharKind::Word;
4860 }
4861 }
4862 }
4863
4864 if c.is_whitespace() {
4865 return CharKind::Whitespace;
4866 }
4867
4868 if ignore_punctuation {
4869 CharKind::Word
4870 } else {
4871 CharKind::Punctuation
4872 }
4873 }
4874
4875 pub fn kind(&self, c: char) -> CharKind {
4876 self.kind_with(c, self.ignore_punctuation)
4877 }
4878}
4879
4880/// Find all of the ranges of whitespace that occur at the ends of lines
4881/// in the given rope.
4882///
4883/// This could also be done with a regex search, but this implementation
4884/// avoids copying text.
4885pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4886 let mut ranges = Vec::new();
4887
4888 let mut offset = 0;
4889 let mut prev_chunk_trailing_whitespace_range = 0..0;
4890 for chunk in rope.chunks() {
4891 let mut prev_line_trailing_whitespace_range = 0..0;
4892 for (i, line) in chunk.split('\n').enumerate() {
4893 let line_end_offset = offset + line.len();
4894 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4895 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4896
4897 if i == 0 && trimmed_line_len == 0 {
4898 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4899 }
4900 if !prev_line_trailing_whitespace_range.is_empty() {
4901 ranges.push(prev_line_trailing_whitespace_range);
4902 }
4903
4904 offset = line_end_offset + 1;
4905 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4906 }
4907
4908 offset -= 1;
4909 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4910 }
4911
4912 if !prev_chunk_trailing_whitespace_range.is_empty() {
4913 ranges.push(prev_chunk_trailing_whitespace_range);
4914 }
4915
4916 ranges
4917}