1use crate::{
2 DebugVariableCapture, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76#[derive(Debug)]
77pub struct DebugVariableRanges {
78 pub buffer_id: BufferId,
79 pub range: Range<usize>,
80}
81
82/// A label for the background task spawned by the buffer to compute
83/// a diff against the contents of its file.
84pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
85
86/// Indicate whether a [`Buffer`] has permissions to edit.
87#[derive(PartialEq, Clone, Copy, Debug)]
88pub enum Capability {
89 /// The buffer is a mutable replica.
90 ReadWrite,
91 /// The buffer is a read-only replica.
92 ReadOnly,
93}
94
95pub type BufferRow = u32;
96
97/// An in-memory representation of a source code file, including its text,
98/// syntax trees, git status, and diagnostics.
99pub struct Buffer {
100 text: TextBuffer,
101 branch_state: Option<BufferBranchState>,
102 /// Filesystem state, `None` when there is no path.
103 file: Option<Arc<dyn File>>,
104 /// The mtime of the file when this buffer was last loaded from
105 /// or saved to disk.
106 saved_mtime: Option<MTime>,
107 /// The version vector when this buffer was last loaded from
108 /// or saved to disk.
109 saved_version: clock::Global,
110 preview_version: clock::Global,
111 transaction_depth: usize,
112 was_dirty_before_starting_transaction: Option<bool>,
113 reload_task: Option<Task<Result<()>>>,
114 language: Option<Arc<Language>>,
115 autoindent_requests: Vec<Arc<AutoindentRequest>>,
116 pending_autoindent: Option<Task<()>>,
117 sync_parse_timeout: Duration,
118 syntax_map: Mutex<SyntaxMap>,
119 reparse: Option<Task<()>>,
120 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
121 non_text_state_update_count: usize,
122 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
123 remote_selections: TreeMap<ReplicaId, SelectionSet>,
124 diagnostics_timestamp: clock::Lamport,
125 completion_triggers: BTreeSet<String>,
126 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
127 completion_triggers_timestamp: clock::Lamport,
128 deferred_ops: OperationQueue<Operation>,
129 capability: Capability,
130 has_conflict: bool,
131 /// Memoize calls to has_changes_since(saved_version).
132 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
133 has_unsaved_edits: Cell<(clock::Global, bool)>,
134 change_bits: Vec<rc::Weak<Cell<bool>>>,
135 _subscriptions: Vec<gpui::Subscription>,
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq)]
139pub enum ParseStatus {
140 Idle,
141 Parsing,
142}
143
144struct BufferBranchState {
145 base_buffer: Entity<Buffer>,
146 merged_operations: Vec<Lamport>,
147}
148
149/// An immutable, cheaply cloneable representation of a fixed
150/// state of a buffer.
151pub struct BufferSnapshot {
152 pub text: text::BufferSnapshot,
153 pub(crate) syntax: SyntaxSnapshot,
154 file: Option<Arc<dyn File>>,
155 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
156 remote_selections: TreeMap<ReplicaId, SelectionSet>,
157 language: Option<Arc<Language>>,
158 non_text_state_update_count: usize,
159}
160
161/// The kind and amount of indentation in a particular line. For now,
162/// assumes that indentation is all the same character.
163#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
164pub struct IndentSize {
165 /// The number of bytes that comprise the indentation.
166 pub len: u32,
167 /// The kind of whitespace used for indentation.
168 pub kind: IndentKind,
169}
170
171/// A whitespace character that's used for indentation.
172#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
173pub enum IndentKind {
174 /// An ASCII space character.
175 #[default]
176 Space,
177 /// An ASCII tab character.
178 Tab,
179}
180
181/// The shape of a selection cursor.
182#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
183#[serde(rename_all = "snake_case")]
184pub enum CursorShape {
185 /// A vertical bar
186 #[default]
187 Bar,
188 /// A block that surrounds the following character
189 Block,
190 /// An underline that runs along the following character
191 Underline,
192 /// A box drawn around the following character
193 Hollow,
194}
195
196#[derive(Clone, Debug)]
197struct SelectionSet {
198 line_mode: bool,
199 cursor_shape: CursorShape,
200 selections: Arc<[Selection<Anchor>]>,
201 lamport_timestamp: clock::Lamport,
202}
203
204/// A diagnostic associated with a certain range of a buffer.
205#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
206pub struct Diagnostic {
207 /// The name of the service that produced this diagnostic.
208 pub source: Option<String>,
209 /// A machine-readable code that identifies this diagnostic.
210 pub code: Option<NumberOrString>,
211 pub code_description: Option<lsp::Url>,
212 /// Whether this diagnostic is a hint, warning, or error.
213 pub severity: DiagnosticSeverity,
214 /// The human-readable message associated with this diagnostic.
215 pub message: String,
216 /// The human-readable message (in markdown format)
217 pub markdown: Option<String>,
218 /// An id that identifies the group to which this diagnostic belongs.
219 ///
220 /// When a language server produces a diagnostic with
221 /// one or more associated diagnostics, those diagnostics are all
222 /// assigned a single group ID.
223 pub group_id: usize,
224 /// Whether this diagnostic is the primary diagnostic for its group.
225 ///
226 /// In a given group, the primary diagnostic is the top-level diagnostic
227 /// returned by the language server. The non-primary diagnostics are the
228 /// associated diagnostics.
229 pub is_primary: bool,
230 /// Whether this diagnostic is considered to originate from an analysis of
231 /// files on disk, as opposed to any unsaved buffer contents. This is a
232 /// property of a given diagnostic source, and is configured for a given
233 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
234 /// for the language server.
235 pub is_disk_based: bool,
236 /// Whether this diagnostic marks unnecessary code.
237 pub is_unnecessary: bool,
238 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
239 pub data: Option<Value>,
240}
241
242/// An operation used to synchronize this buffer with its other replicas.
243#[derive(Clone, Debug, PartialEq)]
244pub enum Operation {
245 /// A text operation.
246 Buffer(text::Operation),
247
248 /// An update to the buffer's diagnostics.
249 UpdateDiagnostics {
250 /// The id of the language server that produced the new diagnostics.
251 server_id: LanguageServerId,
252 /// The diagnostics.
253 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
254 /// The buffer's lamport timestamp.
255 lamport_timestamp: clock::Lamport,
256 },
257
258 /// An update to the most recent selections in this buffer.
259 UpdateSelections {
260 /// The selections.
261 selections: Arc<[Selection<Anchor>]>,
262 /// The buffer's lamport timestamp.
263 lamport_timestamp: clock::Lamport,
264 /// Whether the selections are in 'line mode'.
265 line_mode: bool,
266 /// The [`CursorShape`] associated with these selections.
267 cursor_shape: CursorShape,
268 },
269
270 /// An update to the characters that should trigger autocompletion
271 /// for this buffer.
272 UpdateCompletionTriggers {
273 /// The characters that trigger autocompletion.
274 triggers: Vec<String>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// The language server ID.
278 server_id: LanguageServerId,
279 },
280}
281
282/// An event that occurs in a buffer.
283#[derive(Clone, Debug, PartialEq)]
284pub enum BufferEvent {
285 /// The buffer was changed in a way that must be
286 /// propagated to its other replicas.
287 Operation {
288 operation: Operation,
289 is_local: bool,
290 },
291 /// The buffer was edited.
292 Edited,
293 /// The buffer's `dirty` bit changed.
294 DirtyChanged,
295 /// The buffer was saved.
296 Saved,
297 /// The buffer's file was changed on disk.
298 FileHandleChanged,
299 /// The buffer was reloaded.
300 Reloaded,
301 /// The buffer is in need of a reload
302 ReloadNeeded,
303 /// The buffer's language was changed.
304 LanguageChanged,
305 /// The buffer's syntax trees were updated.
306 Reparsed,
307 /// The buffer's diagnostics were updated.
308 DiagnosticsUpdated,
309 /// The buffer gained or lost editing capabilities.
310 CapabilityChanged,
311 /// The buffer was explicitly requested to close.
312 Closed,
313 /// The buffer was discarded when closing.
314 Discarded,
315}
316
317/// The file associated with a buffer.
318pub trait File: Send + Sync + Any {
319 /// Returns the [`LocalFile`] associated with this file, if the
320 /// file is local.
321 fn as_local(&self) -> Option<&dyn LocalFile>;
322
323 /// Returns whether this file is local.
324 fn is_local(&self) -> bool {
325 self.as_local().is_some()
326 }
327
328 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
329 /// only available in some states, such as modification time.
330 fn disk_state(&self) -> DiskState;
331
332 /// Returns the path of this file relative to the worktree's root directory.
333 fn path(&self) -> &Arc<Path>;
334
335 /// Returns the path of this file relative to the worktree's parent directory (this means it
336 /// includes the name of the worktree's root folder).
337 fn full_path(&self, cx: &App) -> PathBuf;
338
339 /// Returns the last component of this handle's absolute path. If this handle refers to the root
340 /// of its worktree, then this method will return the name of the worktree itself.
341 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
342
343 /// Returns the id of the worktree to which this file belongs.
344 ///
345 /// This is needed for looking up project-specific settings.
346 fn worktree_id(&self, cx: &App) -> WorktreeId;
347
348 /// Converts this file into a protobuf message.
349 fn to_proto(&self, cx: &App) -> rpc::proto::File;
350
351 /// Return whether Zed considers this to be a private file.
352 fn is_private(&self) -> bool;
353}
354
355/// The file's storage status - whether it's stored (`Present`), and if so when it was last
356/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
357/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
358/// indicator for new files.
359#[derive(Copy, Clone, Debug, PartialEq)]
360pub enum DiskState {
361 /// File created in Zed that has not been saved.
362 New,
363 /// File present on the filesystem.
364 Present { mtime: MTime },
365 /// Deleted file that was previously present.
366 Deleted,
367}
368
369impl DiskState {
370 /// Returns the file's last known modification time on disk.
371 pub fn mtime(self) -> Option<MTime> {
372 match self {
373 DiskState::New => None,
374 DiskState::Present { mtime } => Some(mtime),
375 DiskState::Deleted => None,
376 }
377 }
378
379 pub fn exists(&self) -> bool {
380 match self {
381 DiskState::New => false,
382 DiskState::Present { .. } => true,
383 DiskState::Deleted => false,
384 }
385 }
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &App) -> PathBuf;
392
393 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
394 fn load(&self, cx: &App) -> Task<Result<String>>;
395
396 /// Loads the file's contents from disk.
397 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
398}
399
400/// The auto-indent behavior associated with an editing operation.
401/// For some editing operations, each affected line of text has its
402/// indentation recomputed. For other operations, the entire block
403/// of edited text is adjusted uniformly.
404#[derive(Clone, Debug)]
405pub enum AutoindentMode {
406 /// Indent each line of inserted text.
407 EachLine,
408 /// Apply the same indentation adjustment to all of the lines
409 /// in a given insertion.
410 Block {
411 /// The original indentation column of the first line of each
412 /// insertion, if it has been copied.
413 ///
414 /// Knowing this makes it possible to preserve the relative indentation
415 /// of every line in the insertion from when it was copied.
416 ///
417 /// If the original indent column is `a`, and the first line of insertion
418 /// is then auto-indented to column `b`, then every other line of
419 /// the insertion will be auto-indented to column `b - a`
420 original_indent_columns: Vec<Option<u32>>,
421 },
422}
423
424#[derive(Clone)]
425struct AutoindentRequest {
426 before_edit: BufferSnapshot,
427 entries: Vec<AutoindentRequestEntry>,
428 is_block_mode: bool,
429 ignore_empty_lines: bool,
430}
431
432#[derive(Debug, Clone)]
433struct AutoindentRequestEntry {
434 /// A range of the buffer whose indentation should be adjusted.
435 range: Range<Anchor>,
436 /// Whether or not these lines should be considered brand new, for the
437 /// purpose of auto-indent. When text is not new, its indentation will
438 /// only be adjusted if the suggested indentation level has *changed*
439 /// since the edit was made.
440 first_line_is_new: bool,
441 indent_size: IndentSize,
442 original_indent_column: Option<u32>,
443}
444
445#[derive(Debug)]
446struct IndentSuggestion {
447 basis_row: u32,
448 delta: Ordering,
449 within_error: bool,
450}
451
452struct BufferChunkHighlights<'a> {
453 captures: SyntaxMapCaptures<'a>,
454 next_capture: Option<SyntaxMapCapture<'a>>,
455 stack: Vec<(usize, HighlightId)>,
456 highlight_maps: Vec<HighlightMap>,
457}
458
459/// An iterator that yields chunks of a buffer's text, along with their
460/// syntax highlights and diagnostic status.
461pub struct BufferChunks<'a> {
462 buffer_snapshot: Option<&'a BufferSnapshot>,
463 range: Range<usize>,
464 chunks: text::Chunks<'a>,
465 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
466 error_depth: usize,
467 warning_depth: usize,
468 information_depth: usize,
469 hint_depth: usize,
470 unnecessary_depth: usize,
471 highlights: Option<BufferChunkHighlights<'a>>,
472}
473
474/// A chunk of a buffer's text, along with its syntax highlight and
475/// diagnostic status.
476#[derive(Clone, Debug, Default)]
477pub struct Chunk<'a> {
478 /// The text of the chunk.
479 pub text: &'a str,
480 /// The syntax highlighting style of the chunk.
481 pub syntax_highlight_id: Option<HighlightId>,
482 /// The highlight style that has been applied to this chunk in
483 /// the editor.
484 pub highlight_style: Option<HighlightStyle>,
485 /// The severity of diagnostic associated with this chunk, if any.
486 pub diagnostic_severity: Option<DiagnosticSeverity>,
487 /// Whether this chunk of text is marked as unnecessary.
488 pub is_unnecessary: bool,
489 /// Whether this chunk of text was originally a tab character.
490 pub is_tab: bool,
491}
492
493/// A set of edits to a given version of a buffer, computed asynchronously.
494#[derive(Debug)]
495pub struct Diff {
496 pub base_version: clock::Global,
497 pub line_ending: LineEnding,
498 pub edits: Vec<(Range<usize>, Arc<str>)>,
499}
500
501#[derive(Clone, Copy)]
502pub(crate) struct DiagnosticEndpoint {
503 offset: usize,
504 is_start: bool,
505 severity: DiagnosticSeverity,
506 is_unnecessary: bool,
507}
508
509/// A class of characters, used for characterizing a run of text.
510#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
511pub enum CharKind {
512 /// Whitespace.
513 Whitespace,
514 /// Punctuation.
515 Punctuation,
516 /// Word.
517 Word,
518}
519
520/// A runnable is a set of data about a region that could be resolved into a task
521pub struct Runnable {
522 pub tags: SmallVec<[RunnableTag; 1]>,
523 pub language: Arc<Language>,
524 pub buffer: BufferId,
525}
526
527#[derive(Default, Clone, Debug)]
528pub struct HighlightedText {
529 pub text: SharedString,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533#[derive(Default, Debug)]
534struct HighlightedTextBuilder {
535 pub text: String,
536 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
537}
538
539impl HighlightedText {
540 pub fn from_buffer_range<T: ToOffset>(
541 range: Range<T>,
542 snapshot: &text::BufferSnapshot,
543 syntax_snapshot: &SyntaxSnapshot,
544 override_style: Option<HighlightStyle>,
545 syntax_theme: &SyntaxTheme,
546 ) -> Self {
547 let mut highlighted_text = HighlightedTextBuilder::default();
548 highlighted_text.add_text_from_buffer_range(
549 range,
550 snapshot,
551 syntax_snapshot,
552 override_style,
553 syntax_theme,
554 );
555 highlighted_text.build()
556 }
557
558 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
559 gpui::StyledText::new(self.text.clone())
560 .with_default_highlights(default_style, self.highlights.iter().cloned())
561 }
562
563 /// Returns the first line without leading whitespace unless highlighted
564 /// and a boolean indicating if there are more lines after
565 pub fn first_line_preview(self) -> (Self, bool) {
566 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
567 let first_line = &self.text[..newline_ix];
568
569 // Trim leading whitespace, unless an edit starts prior to it.
570 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
571 if let Some((first_highlight_range, _)) = self.highlights.first() {
572 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
573 }
574
575 let preview_text = &first_line[preview_start_ix..];
576 let preview_highlights = self
577 .highlights
578 .into_iter()
579 .take_while(|(range, _)| range.start < newline_ix)
580 .filter_map(|(mut range, highlight)| {
581 range.start = range.start.saturating_sub(preview_start_ix);
582 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
583 if range.is_empty() {
584 None
585 } else {
586 Some((range, highlight))
587 }
588 });
589
590 let preview = Self {
591 text: SharedString::new(preview_text),
592 highlights: preview_highlights.collect(),
593 };
594
595 (preview, self.text.len() > newline_ix)
596 }
597}
598
599impl HighlightedTextBuilder {
600 pub fn build(self) -> HighlightedText {
601 HighlightedText {
602 text: self.text.into(),
603 highlights: self.highlights,
604 }
605 }
606
607 pub fn add_text_from_buffer_range<T: ToOffset>(
608 &mut self,
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) {
615 let range = range.to_offset(snapshot);
616 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
617 let start = self.text.len();
618 self.text.push_str(chunk.text);
619 let end = self.text.len();
620
621 if let Some(mut highlight_style) = chunk
622 .syntax_highlight_id
623 .and_then(|id| id.style(syntax_theme))
624 {
625 if let Some(override_style) = override_style {
626 highlight_style.highlight(override_style);
627 }
628 self.highlights.push((start..end, highlight_style));
629 } else if let Some(override_style) = override_style {
630 self.highlights.push((start..end, override_style));
631 }
632 }
633 }
634
635 fn highlighted_chunks<'a>(
636 range: Range<usize>,
637 snapshot: &'a text::BufferSnapshot,
638 syntax_snapshot: &'a SyntaxSnapshot,
639 ) -> BufferChunks<'a> {
640 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
641 grammar.highlights_query.as_ref()
642 });
643
644 let highlight_maps = captures
645 .grammars()
646 .iter()
647 .map(|grammar| grammar.highlight_map())
648 .collect();
649
650 BufferChunks::new(
651 snapshot.as_rope(),
652 range,
653 Some((captures, highlight_maps)),
654 false,
655 None,
656 )
657 }
658}
659
660#[derive(Clone)]
661pub struct EditPreview {
662 old_snapshot: text::BufferSnapshot,
663 applied_edits_snapshot: text::BufferSnapshot,
664 syntax_snapshot: SyntaxSnapshot,
665}
666
667impl EditPreview {
668 pub fn highlight_edits(
669 &self,
670 current_snapshot: &BufferSnapshot,
671 edits: &[(Range<Anchor>, String)],
672 include_deletions: bool,
673 cx: &App,
674 ) -> HighlightedText {
675 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
676 return HighlightedText::default();
677 };
678
679 let mut highlighted_text = HighlightedTextBuilder::default();
680
681 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
682
683 let insertion_highlight_style = HighlightStyle {
684 background_color: Some(cx.theme().status().created_background),
685 ..Default::default()
686 };
687 let deletion_highlight_style = HighlightStyle {
688 background_color: Some(cx.theme().status().deleted_background),
689 ..Default::default()
690 };
691 let syntax_theme = cx.theme().syntax();
692
693 for (range, edit_text) in edits {
694 let edit_new_end_in_preview_snapshot = range
695 .end
696 .bias_right(&self.old_snapshot)
697 .to_offset(&self.applied_edits_snapshot);
698 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
699
700 let unchanged_range_in_preview_snapshot =
701 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
702 if !unchanged_range_in_preview_snapshot.is_empty() {
703 highlighted_text.add_text_from_buffer_range(
704 unchanged_range_in_preview_snapshot,
705 &self.applied_edits_snapshot,
706 &self.syntax_snapshot,
707 None,
708 &syntax_theme,
709 );
710 }
711
712 let range_in_current_snapshot = range.to_offset(current_snapshot);
713 if include_deletions && !range_in_current_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 range_in_current_snapshot,
716 ¤t_snapshot.text,
717 ¤t_snapshot.syntax,
718 Some(deletion_highlight_style),
719 &syntax_theme,
720 );
721 }
722
723 if !edit_text.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
726 &self.applied_edits_snapshot,
727 &self.syntax_snapshot,
728 Some(insertion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
734 }
735
736 highlighted_text.add_text_from_buffer_range(
737 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743
744 highlighted_text.build()
745 }
746
747 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
748 let (first, _) = edits.first()?;
749 let (last, _) = edits.last()?;
750
751 let start = first
752 .start
753 .bias_left(&self.old_snapshot)
754 .to_point(&self.applied_edits_snapshot);
755 let end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
761 let range = Point::new(start.row, 0)
762 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
763
764 Some(range.to_offset(&self.applied_edits_snapshot))
765 }
766}
767
768#[derive(Clone, Debug, PartialEq, Eq)]
769pub struct BracketMatch {
770 pub open_range: Range<usize>,
771 pub close_range: Range<usize>,
772 pub newline_only: bool,
773}
774
775impl Buffer {
776 /// Create a new buffer with the given base text.
777 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
778 Self::build(
779 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
780 None,
781 Capability::ReadWrite,
782 )
783 }
784
785 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
786 pub fn local_normalized(
787 base_text_normalized: Rope,
788 line_ending: LineEnding,
789 cx: &Context<Self>,
790 ) -> Self {
791 Self::build(
792 TextBuffer::new_normalized(
793 0,
794 cx.entity_id().as_non_zero_u64().into(),
795 line_ending,
796 base_text_normalized,
797 ),
798 None,
799 Capability::ReadWrite,
800 )
801 }
802
803 /// Create a new buffer that is a replica of a remote buffer.
804 pub fn remote(
805 remote_id: BufferId,
806 replica_id: ReplicaId,
807 capability: Capability,
808 base_text: impl Into<String>,
809 ) -> Self {
810 Self::build(
811 TextBuffer::new(replica_id, remote_id, base_text.into()),
812 None,
813 capability,
814 )
815 }
816
817 /// Create a new buffer that is a replica of a remote buffer, populating its
818 /// state from the given protobuf message.
819 pub fn from_proto(
820 replica_id: ReplicaId,
821 capability: Capability,
822 message: proto::BufferState,
823 file: Option<Arc<dyn File>>,
824 ) -> Result<Self> {
825 let buffer_id = BufferId::new(message.id)
826 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
827 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
828 let mut this = Self::build(buffer, file, capability);
829 this.text.set_line_ending(proto::deserialize_line_ending(
830 rpc::proto::LineEnding::from_i32(message.line_ending)
831 .ok_or_else(|| anyhow!("missing line_ending"))?,
832 ));
833 this.saved_version = proto::deserialize_version(&message.saved_version);
834 this.saved_mtime = message.saved_mtime.map(|time| time.into());
835 Ok(this)
836 }
837
838 /// Serialize the buffer's state to a protobuf message.
839 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
840 proto::BufferState {
841 id: self.remote_id().into(),
842 file: self.file.as_ref().map(|f| f.to_proto(cx)),
843 base_text: self.base_text().to_string(),
844 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
845 saved_version: proto::serialize_version(&self.saved_version),
846 saved_mtime: self.saved_mtime.map(|time| time.into()),
847 }
848 }
849
850 /// Serialize as protobufs all of the changes to the buffer since the given version.
851 pub fn serialize_ops(
852 &self,
853 since: Option<clock::Global>,
854 cx: &App,
855 ) -> Task<Vec<proto::Operation>> {
856 let mut operations = Vec::new();
857 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
858
859 operations.extend(self.remote_selections.iter().map(|(_, set)| {
860 proto::serialize_operation(&Operation::UpdateSelections {
861 selections: set.selections.clone(),
862 lamport_timestamp: set.lamport_timestamp,
863 line_mode: set.line_mode,
864 cursor_shape: set.cursor_shape,
865 })
866 }));
867
868 for (server_id, diagnostics) in &self.diagnostics {
869 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
870 lamport_timestamp: self.diagnostics_timestamp,
871 server_id: *server_id,
872 diagnostics: diagnostics.iter().cloned().collect(),
873 }));
874 }
875
876 for (server_id, completions) in &self.completion_triggers_per_language_server {
877 operations.push(proto::serialize_operation(
878 &Operation::UpdateCompletionTriggers {
879 triggers: completions.iter().cloned().collect(),
880 lamport_timestamp: self.completion_triggers_timestamp,
881 server_id: *server_id,
882 },
883 ));
884 }
885
886 let text_operations = self.text.operations().clone();
887 cx.background_spawn(async move {
888 let since = since.unwrap_or_default();
889 operations.extend(
890 text_operations
891 .iter()
892 .filter(|(_, op)| !since.observed(op.timestamp()))
893 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
894 );
895 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
896 operations
897 })
898 }
899
900 /// Assign a language to the buffer, returning the buffer.
901 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
902 self.set_language(Some(language), cx);
903 self
904 }
905
906 /// Returns the [`Capability`] of this buffer.
907 pub fn capability(&self) -> Capability {
908 self.capability
909 }
910
911 /// Whether this buffer can only be read.
912 pub fn read_only(&self) -> bool {
913 self.capability == Capability::ReadOnly
914 }
915
916 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
917 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
918 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
919 let snapshot = buffer.snapshot();
920 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
921 Self {
922 saved_mtime,
923 saved_version: buffer.version(),
924 preview_version: buffer.version(),
925 reload_task: None,
926 transaction_depth: 0,
927 was_dirty_before_starting_transaction: None,
928 has_unsaved_edits: Cell::new((buffer.version(), false)),
929 text: buffer,
930 branch_state: None,
931 file,
932 capability,
933 syntax_map,
934 reparse: None,
935 non_text_state_update_count: 0,
936 sync_parse_timeout: Duration::from_millis(1),
937 parse_status: async_watch::channel(ParseStatus::Idle),
938 autoindent_requests: Default::default(),
939 pending_autoindent: Default::default(),
940 language: None,
941 remote_selections: Default::default(),
942 diagnostics: Default::default(),
943 diagnostics_timestamp: Default::default(),
944 completion_triggers: Default::default(),
945 completion_triggers_per_language_server: Default::default(),
946 completion_triggers_timestamp: Default::default(),
947 deferred_ops: OperationQueue::new(),
948 has_conflict: false,
949 change_bits: Default::default(),
950 _subscriptions: Vec::new(),
951 }
952 }
953
954 pub fn build_snapshot(
955 text: Rope,
956 language: Option<Arc<Language>>,
957 language_registry: Option<Arc<LanguageRegistry>>,
958 cx: &mut App,
959 ) -> impl Future<Output = BufferSnapshot> + use<> {
960 let entity_id = cx.reserve_entity::<Self>().entity_id();
961 let buffer_id = entity_id.as_non_zero_u64().into();
962 async move {
963 let text =
964 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
965 let mut syntax = SyntaxMap::new(&text).snapshot();
966 if let Some(language) = language.clone() {
967 let text = text.clone();
968 let language = language.clone();
969 let language_registry = language_registry.clone();
970 syntax.reparse(&text, language_registry, language);
971 }
972 BufferSnapshot {
973 text,
974 syntax,
975 file: None,
976 diagnostics: Default::default(),
977 remote_selections: Default::default(),
978 language,
979 non_text_state_update_count: 0,
980 }
981 }
982 }
983
984 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
985 let entity_id = cx.reserve_entity::<Self>().entity_id();
986 let buffer_id = entity_id.as_non_zero_u64().into();
987 let text =
988 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
989 let syntax = SyntaxMap::new(&text).snapshot();
990 BufferSnapshot {
991 text,
992 syntax,
993 file: None,
994 diagnostics: Default::default(),
995 remote_selections: Default::default(),
996 language: None,
997 non_text_state_update_count: 0,
998 }
999 }
1000
1001 #[cfg(any(test, feature = "test-support"))]
1002 pub fn build_snapshot_sync(
1003 text: Rope,
1004 language: Option<Arc<Language>>,
1005 language_registry: Option<Arc<LanguageRegistry>>,
1006 cx: &mut App,
1007 ) -> BufferSnapshot {
1008 let entity_id = cx.reserve_entity::<Self>().entity_id();
1009 let buffer_id = entity_id.as_non_zero_u64().into();
1010 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1011 let mut syntax = SyntaxMap::new(&text).snapshot();
1012 if let Some(language) = language.clone() {
1013 let text = text.clone();
1014 let language = language.clone();
1015 let language_registry = language_registry.clone();
1016 syntax.reparse(&text, language_registry, language);
1017 }
1018 BufferSnapshot {
1019 text,
1020 syntax,
1021 file: None,
1022 diagnostics: Default::default(),
1023 remote_selections: Default::default(),
1024 language,
1025 non_text_state_update_count: 0,
1026 }
1027 }
1028
1029 /// Retrieve a snapshot of the buffer's current state. This is computationally
1030 /// cheap, and allows reading from the buffer on a background thread.
1031 pub fn snapshot(&self) -> BufferSnapshot {
1032 let text = self.text.snapshot();
1033 let mut syntax_map = self.syntax_map.lock();
1034 syntax_map.interpolate(&text);
1035 let syntax = syntax_map.snapshot();
1036
1037 BufferSnapshot {
1038 text,
1039 syntax,
1040 file: self.file.clone(),
1041 remote_selections: self.remote_selections.clone(),
1042 diagnostics: self.diagnostics.clone(),
1043 language: self.language.clone(),
1044 non_text_state_update_count: self.non_text_state_update_count,
1045 }
1046 }
1047
1048 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1049 let this = cx.entity();
1050 cx.new(|cx| {
1051 let mut branch = Self {
1052 branch_state: Some(BufferBranchState {
1053 base_buffer: this.clone(),
1054 merged_operations: Default::default(),
1055 }),
1056 language: self.language.clone(),
1057 has_conflict: self.has_conflict,
1058 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1059 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1060 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1061 };
1062 if let Some(language_registry) = self.language_registry() {
1063 branch.set_language_registry(language_registry);
1064 }
1065
1066 // Reparse the branch buffer so that we get syntax highlighting immediately.
1067 branch.reparse(cx);
1068
1069 branch
1070 })
1071 }
1072
1073 pub fn preview_edits(
1074 &self,
1075 edits: Arc<[(Range<Anchor>, String)]>,
1076 cx: &App,
1077 ) -> Task<EditPreview> {
1078 let registry = self.language_registry();
1079 let language = self.language().cloned();
1080 let old_snapshot = self.text.snapshot();
1081 let mut branch_buffer = self.text.branch();
1082 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1083 cx.background_spawn(async move {
1084 if !edits.is_empty() {
1085 if let Some(language) = language.clone() {
1086 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1087 }
1088
1089 branch_buffer.edit(edits.iter().cloned());
1090 let snapshot = branch_buffer.snapshot();
1091 syntax_snapshot.interpolate(&snapshot);
1092
1093 if let Some(language) = language {
1094 syntax_snapshot.reparse(&snapshot, registry, language);
1095 }
1096 }
1097 EditPreview {
1098 old_snapshot,
1099 applied_edits_snapshot: branch_buffer.snapshot(),
1100 syntax_snapshot,
1101 }
1102 })
1103 }
1104
1105 /// Applies all of the changes in this buffer that intersect any of the
1106 /// given `ranges` to its base buffer.
1107 ///
1108 /// If `ranges` is empty, then all changes will be applied. This buffer must
1109 /// be a branch buffer to call this method.
1110 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1111 let Some(base_buffer) = self.base_buffer() else {
1112 debug_panic!("not a branch buffer");
1113 return;
1114 };
1115
1116 let mut ranges = if ranges.is_empty() {
1117 &[0..usize::MAX]
1118 } else {
1119 ranges.as_slice()
1120 }
1121 .into_iter()
1122 .peekable();
1123
1124 let mut edits = Vec::new();
1125 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1126 let mut is_included = false;
1127 while let Some(range) = ranges.peek() {
1128 if range.end < edit.new.start {
1129 ranges.next().unwrap();
1130 } else {
1131 if range.start <= edit.new.end {
1132 is_included = true;
1133 }
1134 break;
1135 }
1136 }
1137
1138 if is_included {
1139 edits.push((
1140 edit.old.clone(),
1141 self.text_for_range(edit.new.clone()).collect::<String>(),
1142 ));
1143 }
1144 }
1145
1146 let operation = base_buffer.update(cx, |base_buffer, cx| {
1147 // cx.emit(BufferEvent::DiffBaseChanged);
1148 base_buffer.edit(edits, None, cx)
1149 });
1150
1151 if let Some(operation) = operation {
1152 if let Some(BufferBranchState {
1153 merged_operations, ..
1154 }) = &mut self.branch_state
1155 {
1156 merged_operations.push(operation);
1157 }
1158 }
1159 }
1160
1161 fn on_base_buffer_event(
1162 &mut self,
1163 _: Entity<Buffer>,
1164 event: &BufferEvent,
1165 cx: &mut Context<Self>,
1166 ) {
1167 let BufferEvent::Operation { operation, .. } = event else {
1168 return;
1169 };
1170 let Some(BufferBranchState {
1171 merged_operations, ..
1172 }) = &mut self.branch_state
1173 else {
1174 return;
1175 };
1176
1177 let mut operation_to_undo = None;
1178 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1179 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1180 merged_operations.remove(ix);
1181 operation_to_undo = Some(operation.timestamp);
1182 }
1183 }
1184
1185 self.apply_ops([operation.clone()], cx);
1186
1187 if let Some(timestamp) = operation_to_undo {
1188 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1189 self.undo_operations(counts, cx);
1190 }
1191 }
1192
1193 #[cfg(test)]
1194 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1195 &self.text
1196 }
1197
1198 /// Retrieve a snapshot of the buffer's raw text, without any
1199 /// language-related state like the syntax tree or diagnostics.
1200 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1201 self.text.snapshot()
1202 }
1203
1204 /// The file associated with the buffer, if any.
1205 pub fn file(&self) -> Option<&Arc<dyn File>> {
1206 self.file.as_ref()
1207 }
1208
1209 /// The version of the buffer that was last saved or reloaded from disk.
1210 pub fn saved_version(&self) -> &clock::Global {
1211 &self.saved_version
1212 }
1213
1214 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1215 pub fn saved_mtime(&self) -> Option<MTime> {
1216 self.saved_mtime
1217 }
1218
1219 /// Assign a language to the buffer.
1220 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1221 self.non_text_state_update_count += 1;
1222 self.syntax_map.lock().clear(&self.text);
1223 self.language = language;
1224 self.was_changed();
1225 self.reparse(cx);
1226 cx.emit(BufferEvent::LanguageChanged);
1227 }
1228
1229 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1230 /// other languages if parts of the buffer are written in different languages.
1231 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1232 self.syntax_map
1233 .lock()
1234 .set_language_registry(language_registry);
1235 }
1236
1237 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1238 self.syntax_map.lock().language_registry()
1239 }
1240
1241 /// Assign the buffer a new [`Capability`].
1242 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1243 self.capability = capability;
1244 cx.emit(BufferEvent::CapabilityChanged)
1245 }
1246
1247 /// This method is called to signal that the buffer has been saved.
1248 pub fn did_save(
1249 &mut self,
1250 version: clock::Global,
1251 mtime: Option<MTime>,
1252 cx: &mut Context<Self>,
1253 ) {
1254 self.saved_version = version;
1255 self.has_unsaved_edits
1256 .set((self.saved_version().clone(), false));
1257 self.has_conflict = false;
1258 self.saved_mtime = mtime;
1259 self.was_changed();
1260 cx.emit(BufferEvent::Saved);
1261 cx.notify();
1262 }
1263
1264 /// This method is called to signal that the buffer has been discarded.
1265 pub fn discarded(&self, cx: &mut Context<Self>) {
1266 cx.emit(BufferEvent::Discarded);
1267 cx.notify();
1268 }
1269
1270 /// Reloads the contents of the buffer from disk.
1271 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1272 let (tx, rx) = futures::channel::oneshot::channel();
1273 let prev_version = self.text.version();
1274 self.reload_task = Some(cx.spawn(async move |this, cx| {
1275 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1276 let file = this.file.as_ref()?.as_local()?;
1277
1278 Some((file.disk_state().mtime(), file.load(cx)))
1279 })?
1280 else {
1281 return Ok(());
1282 };
1283
1284 let new_text = new_text.await?;
1285 let diff = this
1286 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1287 .await;
1288 this.update(cx, |this, cx| {
1289 if this.version() == diff.base_version {
1290 this.finalize_last_transaction();
1291 this.apply_diff(diff, cx);
1292 tx.send(this.finalize_last_transaction().cloned()).ok();
1293 this.has_conflict = false;
1294 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1295 } else {
1296 if !diff.edits.is_empty()
1297 || this
1298 .edits_since::<usize>(&diff.base_version)
1299 .next()
1300 .is_some()
1301 {
1302 this.has_conflict = true;
1303 }
1304
1305 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1306 }
1307
1308 this.reload_task.take();
1309 })
1310 }));
1311 rx
1312 }
1313
1314 /// This method is called to signal that the buffer has been reloaded.
1315 pub fn did_reload(
1316 &mut self,
1317 version: clock::Global,
1318 line_ending: LineEnding,
1319 mtime: Option<MTime>,
1320 cx: &mut Context<Self>,
1321 ) {
1322 self.saved_version = version;
1323 self.has_unsaved_edits
1324 .set((self.saved_version.clone(), false));
1325 self.text.set_line_ending(line_ending);
1326 self.saved_mtime = mtime;
1327 cx.emit(BufferEvent::Reloaded);
1328 cx.notify();
1329 }
1330
1331 /// Updates the [`File`] backing this buffer. This should be called when
1332 /// the file has changed or has been deleted.
1333 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1334 let was_dirty = self.is_dirty();
1335 let mut file_changed = false;
1336
1337 if let Some(old_file) = self.file.as_ref() {
1338 if new_file.path() != old_file.path() {
1339 file_changed = true;
1340 }
1341
1342 let old_state = old_file.disk_state();
1343 let new_state = new_file.disk_state();
1344 if old_state != new_state {
1345 file_changed = true;
1346 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1347 cx.emit(BufferEvent::ReloadNeeded)
1348 }
1349 }
1350 } else {
1351 file_changed = true;
1352 };
1353
1354 self.file = Some(new_file);
1355 if file_changed {
1356 self.was_changed();
1357 self.non_text_state_update_count += 1;
1358 if was_dirty != self.is_dirty() {
1359 cx.emit(BufferEvent::DirtyChanged);
1360 }
1361 cx.emit(BufferEvent::FileHandleChanged);
1362 cx.notify();
1363 }
1364 }
1365
1366 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1367 Some(self.branch_state.as_ref()?.base_buffer.clone())
1368 }
1369
1370 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1371 pub fn language(&self) -> Option<&Arc<Language>> {
1372 self.language.as_ref()
1373 }
1374
1375 /// Returns the [`Language`] at the given location.
1376 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1377 let offset = position.to_offset(self);
1378 self.syntax_map
1379 .lock()
1380 .layers_for_range(offset..offset, &self.text, false)
1381 .last()
1382 .map(|info| info.language.clone())
1383 .or_else(|| self.language.clone())
1384 }
1385
1386 /// Returns each [`Language`] for the active syntax layers at the given location.
1387 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1388 let offset = position.to_offset(self);
1389 let mut languages: Vec<Arc<Language>> = self
1390 .syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .map(|info| info.language.clone())
1394 .collect();
1395
1396 if languages.is_empty() {
1397 if let Some(buffer_language) = self.language() {
1398 languages.push(buffer_language.clone());
1399 }
1400 }
1401
1402 languages
1403 }
1404
1405 /// An integer version number that accounts for all updates besides
1406 /// the buffer's text itself (which is versioned via a version vector).
1407 pub fn non_text_state_update_count(&self) -> usize {
1408 self.non_text_state_update_count
1409 }
1410
1411 /// Whether the buffer is being parsed in the background.
1412 #[cfg(any(test, feature = "test-support"))]
1413 pub fn is_parsing(&self) -> bool {
1414 self.reparse.is_some()
1415 }
1416
1417 /// Indicates whether the buffer contains any regions that may be
1418 /// written in a language that hasn't been loaded yet.
1419 pub fn contains_unknown_injections(&self) -> bool {
1420 self.syntax_map.lock().contains_unknown_injections()
1421 }
1422
1423 #[cfg(test)]
1424 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1425 self.sync_parse_timeout = timeout;
1426 }
1427
1428 /// Called after an edit to synchronize the buffer's main parse tree with
1429 /// the buffer's new underlying state.
1430 ///
1431 /// Locks the syntax map and interpolates the edits since the last reparse
1432 /// into the foreground syntax tree.
1433 ///
1434 /// Then takes a stable snapshot of the syntax map before unlocking it.
1435 /// The snapshot with the interpolated edits is sent to a background thread,
1436 /// where we ask Tree-sitter to perform an incremental parse.
1437 ///
1438 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1439 /// waiting on the parse to complete. As soon as it completes, we proceed
1440 /// synchronously, unless a 1ms timeout elapses.
1441 ///
1442 /// If we time out waiting on the parse, we spawn a second task waiting
1443 /// until the parse does complete and return with the interpolated tree still
1444 /// in the foreground. When the background parse completes, call back into
1445 /// the main thread and assign the foreground parse state.
1446 ///
1447 /// If the buffer or grammar changed since the start of the background parse,
1448 /// initiate an additional reparse recursively. To avoid concurrent parses
1449 /// for the same buffer, we only initiate a new parse if we are not already
1450 /// parsing in the background.
1451 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1452 if self.reparse.is_some() {
1453 return;
1454 }
1455 let language = if let Some(language) = self.language.clone() {
1456 language
1457 } else {
1458 return;
1459 };
1460
1461 let text = self.text_snapshot();
1462 let parsed_version = self.version();
1463
1464 let mut syntax_map = self.syntax_map.lock();
1465 syntax_map.interpolate(&text);
1466 let language_registry = syntax_map.language_registry();
1467 let mut syntax_snapshot = syntax_map.snapshot();
1468 drop(syntax_map);
1469
1470 let parse_task = cx.background_spawn({
1471 let language = language.clone();
1472 let language_registry = language_registry.clone();
1473 async move {
1474 syntax_snapshot.reparse(&text, language_registry, language);
1475 syntax_snapshot
1476 }
1477 });
1478
1479 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1480 match cx
1481 .background_executor()
1482 .block_with_timeout(self.sync_parse_timeout, parse_task)
1483 {
1484 Ok(new_syntax_snapshot) => {
1485 self.did_finish_parsing(new_syntax_snapshot, cx);
1486 self.reparse = None;
1487 }
1488 Err(parse_task) => {
1489 self.reparse = Some(cx.spawn(async move |this, cx| {
1490 let new_syntax_map = parse_task.await;
1491 this.update(cx, move |this, cx| {
1492 let grammar_changed =
1493 this.language.as_ref().map_or(true, |current_language| {
1494 !Arc::ptr_eq(&language, current_language)
1495 });
1496 let language_registry_changed = new_syntax_map
1497 .contains_unknown_injections()
1498 && language_registry.map_or(false, |registry| {
1499 registry.version() != new_syntax_map.language_registry_version()
1500 });
1501 let parse_again = language_registry_changed
1502 || grammar_changed
1503 || this.version.changed_since(&parsed_version);
1504 this.did_finish_parsing(new_syntax_map, cx);
1505 this.reparse = None;
1506 if parse_again {
1507 this.reparse(cx);
1508 }
1509 })
1510 .ok();
1511 }));
1512 }
1513 }
1514 }
1515
1516 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1517 self.was_changed();
1518 self.non_text_state_update_count += 1;
1519 self.syntax_map.lock().did_parse(syntax_snapshot);
1520 self.request_autoindent(cx);
1521 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1522 cx.emit(BufferEvent::Reparsed);
1523 cx.notify();
1524 }
1525
1526 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1527 self.parse_status.1.clone()
1528 }
1529
1530 /// Assign to the buffer a set of diagnostics created by a given language server.
1531 pub fn update_diagnostics(
1532 &mut self,
1533 server_id: LanguageServerId,
1534 diagnostics: DiagnosticSet,
1535 cx: &mut Context<Self>,
1536 ) {
1537 let lamport_timestamp = self.text.lamport_clock.tick();
1538 let op = Operation::UpdateDiagnostics {
1539 server_id,
1540 diagnostics: diagnostics.iter().cloned().collect(),
1541 lamport_timestamp,
1542 };
1543 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1544 self.send_operation(op, true, cx);
1545 }
1546
1547 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1548 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1549 return None;
1550 };
1551 Some(&self.diagnostics[idx].1)
1552 }
1553
1554 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1555 if let Some(indent_sizes) = self.compute_autoindents() {
1556 let indent_sizes = cx.background_spawn(indent_sizes);
1557 match cx
1558 .background_executor()
1559 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1560 {
1561 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1562 Err(indent_sizes) => {
1563 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1564 let indent_sizes = indent_sizes.await;
1565 this.update(cx, |this, cx| {
1566 this.apply_autoindents(indent_sizes, cx);
1567 })
1568 .ok();
1569 }));
1570 }
1571 }
1572 } else {
1573 self.autoindent_requests.clear();
1574 }
1575 }
1576
1577 fn compute_autoindents(
1578 &self,
1579 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1580 let max_rows_between_yields = 100;
1581 let snapshot = self.snapshot();
1582 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1583 return None;
1584 }
1585
1586 let autoindent_requests = self.autoindent_requests.clone();
1587 Some(async move {
1588 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1589 for request in autoindent_requests {
1590 // Resolve each edited range to its row in the current buffer and in the
1591 // buffer before this batch of edits.
1592 let mut row_ranges = Vec::new();
1593 let mut old_to_new_rows = BTreeMap::new();
1594 let mut language_indent_sizes_by_new_row = Vec::new();
1595 for entry in &request.entries {
1596 let position = entry.range.start;
1597 let new_row = position.to_point(&snapshot).row;
1598 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1599 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1600
1601 if !entry.first_line_is_new {
1602 let old_row = position.to_point(&request.before_edit).row;
1603 old_to_new_rows.insert(old_row, new_row);
1604 }
1605 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1606 }
1607
1608 // Build a map containing the suggested indentation for each of the edited lines
1609 // with respect to the state of the buffer before these edits. This map is keyed
1610 // by the rows for these lines in the current state of the buffer.
1611 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1612 let old_edited_ranges =
1613 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1614 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1615 let mut language_indent_size = IndentSize::default();
1616 for old_edited_range in old_edited_ranges {
1617 let suggestions = request
1618 .before_edit
1619 .suggest_autoindents(old_edited_range.clone())
1620 .into_iter()
1621 .flatten();
1622 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1623 if let Some(suggestion) = suggestion {
1624 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1625
1626 // Find the indent size based on the language for this row.
1627 while let Some((row, size)) = language_indent_sizes.peek() {
1628 if *row > new_row {
1629 break;
1630 }
1631 language_indent_size = *size;
1632 language_indent_sizes.next();
1633 }
1634
1635 let suggested_indent = old_to_new_rows
1636 .get(&suggestion.basis_row)
1637 .and_then(|from_row| {
1638 Some(old_suggestions.get(from_row).copied()?.0)
1639 })
1640 .unwrap_or_else(|| {
1641 request
1642 .before_edit
1643 .indent_size_for_line(suggestion.basis_row)
1644 })
1645 .with_delta(suggestion.delta, language_indent_size);
1646 old_suggestions
1647 .insert(new_row, (suggested_indent, suggestion.within_error));
1648 }
1649 }
1650 yield_now().await;
1651 }
1652
1653 // Compute new suggestions for each line, but only include them in the result
1654 // if they differ from the old suggestion for that line.
1655 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1656 let mut language_indent_size = IndentSize::default();
1657 for (row_range, original_indent_column) in row_ranges {
1658 let new_edited_row_range = if request.is_block_mode {
1659 row_range.start..row_range.start + 1
1660 } else {
1661 row_range.clone()
1662 };
1663
1664 let suggestions = snapshot
1665 .suggest_autoindents(new_edited_row_range.clone())
1666 .into_iter()
1667 .flatten();
1668 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1669 if let Some(suggestion) = suggestion {
1670 // Find the indent size based on the language for this row.
1671 while let Some((row, size)) = language_indent_sizes.peek() {
1672 if *row > new_row {
1673 break;
1674 }
1675 language_indent_size = *size;
1676 language_indent_sizes.next();
1677 }
1678
1679 let suggested_indent = indent_sizes
1680 .get(&suggestion.basis_row)
1681 .copied()
1682 .map(|e| e.0)
1683 .unwrap_or_else(|| {
1684 snapshot.indent_size_for_line(suggestion.basis_row)
1685 })
1686 .with_delta(suggestion.delta, language_indent_size);
1687
1688 if old_suggestions.get(&new_row).map_or(
1689 true,
1690 |(old_indentation, was_within_error)| {
1691 suggested_indent != *old_indentation
1692 && (!suggestion.within_error || *was_within_error)
1693 },
1694 ) {
1695 indent_sizes.insert(
1696 new_row,
1697 (suggested_indent, request.ignore_empty_lines),
1698 );
1699 }
1700 }
1701 }
1702
1703 if let (true, Some(original_indent_column)) =
1704 (request.is_block_mode, original_indent_column)
1705 {
1706 let new_indent =
1707 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1708 *indent
1709 } else {
1710 snapshot.indent_size_for_line(row_range.start)
1711 };
1712 let delta = new_indent.len as i64 - original_indent_column as i64;
1713 if delta != 0 {
1714 for row in row_range.skip(1) {
1715 indent_sizes.entry(row).or_insert_with(|| {
1716 let mut size = snapshot.indent_size_for_line(row);
1717 if size.kind == new_indent.kind {
1718 match delta.cmp(&0) {
1719 Ordering::Greater => size.len += delta as u32,
1720 Ordering::Less => {
1721 size.len = size.len.saturating_sub(-delta as u32)
1722 }
1723 Ordering::Equal => {}
1724 }
1725 }
1726 (size, request.ignore_empty_lines)
1727 });
1728 }
1729 }
1730 }
1731
1732 yield_now().await;
1733 }
1734 }
1735
1736 indent_sizes
1737 .into_iter()
1738 .filter_map(|(row, (indent, ignore_empty_lines))| {
1739 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1740 None
1741 } else {
1742 Some((row, indent))
1743 }
1744 })
1745 .collect()
1746 })
1747 }
1748
1749 fn apply_autoindents(
1750 &mut self,
1751 indent_sizes: BTreeMap<u32, IndentSize>,
1752 cx: &mut Context<Self>,
1753 ) {
1754 self.autoindent_requests.clear();
1755
1756 let edits: Vec<_> = indent_sizes
1757 .into_iter()
1758 .filter_map(|(row, indent_size)| {
1759 let current_size = indent_size_for_line(self, row);
1760 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1761 })
1762 .collect();
1763
1764 let preserve_preview = self.preserve_preview();
1765 self.edit(edits, None, cx);
1766 if preserve_preview {
1767 self.refresh_preview();
1768 }
1769 }
1770
1771 /// Create a minimal edit that will cause the given row to be indented
1772 /// with the given size. After applying this edit, the length of the line
1773 /// will always be at least `new_size.len`.
1774 pub fn edit_for_indent_size_adjustment(
1775 row: u32,
1776 current_size: IndentSize,
1777 new_size: IndentSize,
1778 ) -> Option<(Range<Point>, String)> {
1779 if new_size.kind == current_size.kind {
1780 match new_size.len.cmp(¤t_size.len) {
1781 Ordering::Greater => {
1782 let point = Point::new(row, 0);
1783 Some((
1784 point..point,
1785 iter::repeat(new_size.char())
1786 .take((new_size.len - current_size.len) as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790
1791 Ordering::Less => Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1793 String::new(),
1794 )),
1795
1796 Ordering::Equal => None,
1797 }
1798 } else {
1799 Some((
1800 Point::new(row, 0)..Point::new(row, current_size.len),
1801 iter::repeat(new_size.char())
1802 .take(new_size.len as usize)
1803 .collect::<String>(),
1804 ))
1805 }
1806 }
1807
1808 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1809 /// and the given new text.
1810 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1811 let old_text = self.as_rope().clone();
1812 let base_version = self.version();
1813 cx.background_executor()
1814 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1815 let old_text = old_text.to_string();
1816 let line_ending = LineEnding::detect(&new_text);
1817 LineEnding::normalize(&mut new_text);
1818 let edits = text_diff(&old_text, &new_text);
1819 Diff {
1820 base_version,
1821 line_ending,
1822 edits,
1823 }
1824 })
1825 }
1826
1827 /// Spawns a background task that searches the buffer for any whitespace
1828 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1829 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1830 let old_text = self.as_rope().clone();
1831 let line_ending = self.line_ending();
1832 let base_version = self.version();
1833 cx.background_spawn(async move {
1834 let ranges = trailing_whitespace_ranges(&old_text);
1835 let empty = Arc::<str>::from("");
1836 Diff {
1837 base_version,
1838 line_ending,
1839 edits: ranges
1840 .into_iter()
1841 .map(|range| (range, empty.clone()))
1842 .collect(),
1843 }
1844 })
1845 }
1846
1847 /// Ensures that the buffer ends with a single newline character, and
1848 /// no other whitespace.
1849 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1850 let len = self.len();
1851 let mut offset = len;
1852 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1853 let non_whitespace_len = chunk
1854 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1855 .len();
1856 offset -= chunk.len();
1857 offset += non_whitespace_len;
1858 if non_whitespace_len != 0 {
1859 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1860 return;
1861 }
1862 break;
1863 }
1864 }
1865 self.edit([(offset..len, "\n")], None, cx);
1866 }
1867
1868 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1869 /// calculated, then adjust the diff to account for those changes, and discard any
1870 /// parts of the diff that conflict with those changes.
1871 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1872 let snapshot = self.snapshot();
1873 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1874 let mut delta = 0;
1875 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1876 while let Some(edit_since) = edits_since.peek() {
1877 // If the edit occurs after a diff hunk, then it does not
1878 // affect that hunk.
1879 if edit_since.old.start > range.end {
1880 break;
1881 }
1882 // If the edit precedes the diff hunk, then adjust the hunk
1883 // to reflect the edit.
1884 else if edit_since.old.end < range.start {
1885 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1886 edits_since.next();
1887 }
1888 // If the edit intersects a diff hunk, then discard that hunk.
1889 else {
1890 return None;
1891 }
1892 }
1893
1894 let start = (range.start as i64 + delta) as usize;
1895 let end = (range.end as i64 + delta) as usize;
1896 Some((start..end, new_text))
1897 });
1898
1899 self.start_transaction();
1900 self.text.set_line_ending(diff.line_ending);
1901 self.edit(adjusted_edits, None, cx);
1902 self.end_transaction(cx)
1903 }
1904
1905 fn has_unsaved_edits(&self) -> bool {
1906 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1907
1908 if last_version == self.version {
1909 self.has_unsaved_edits
1910 .set((last_version, has_unsaved_edits));
1911 return has_unsaved_edits;
1912 }
1913
1914 let has_edits = self.has_edits_since(&self.saved_version);
1915 self.has_unsaved_edits
1916 .set((self.version.clone(), has_edits));
1917 has_edits
1918 }
1919
1920 /// Checks if the buffer has unsaved changes.
1921 pub fn is_dirty(&self) -> bool {
1922 if self.capability == Capability::ReadOnly {
1923 return false;
1924 }
1925 if self.has_conflict {
1926 return true;
1927 }
1928 match self.file.as_ref().map(|f| f.disk_state()) {
1929 Some(DiskState::New) | Some(DiskState::Deleted) => {
1930 !self.is_empty() && self.has_unsaved_edits()
1931 }
1932 _ => self.has_unsaved_edits(),
1933 }
1934 }
1935
1936 /// Checks if the buffer and its file have both changed since the buffer
1937 /// was last saved or reloaded.
1938 pub fn has_conflict(&self) -> bool {
1939 if self.has_conflict {
1940 return true;
1941 }
1942 let Some(file) = self.file.as_ref() else {
1943 return false;
1944 };
1945 match file.disk_state() {
1946 DiskState::New => false,
1947 DiskState::Present { mtime } => match self.saved_mtime {
1948 Some(saved_mtime) => {
1949 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1950 }
1951 None => true,
1952 },
1953 DiskState::Deleted => false,
1954 }
1955 }
1956
1957 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1958 pub fn subscribe(&mut self) -> Subscription {
1959 self.text.subscribe()
1960 }
1961
1962 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1963 ///
1964 /// This allows downstream code to check if the buffer's text has changed without
1965 /// waiting for an effect cycle, which would be required if using eents.
1966 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1967 if let Err(ix) = self
1968 .change_bits
1969 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1970 {
1971 self.change_bits.insert(ix, bit);
1972 }
1973 }
1974
1975 fn was_changed(&mut self) {
1976 self.change_bits.retain(|change_bit| {
1977 change_bit.upgrade().map_or(false, |bit| {
1978 bit.replace(true);
1979 true
1980 })
1981 });
1982 }
1983
1984 /// Starts a transaction, if one is not already in-progress. When undoing or
1985 /// redoing edits, all of the edits performed within a transaction are undone
1986 /// or redone together.
1987 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1988 self.start_transaction_at(Instant::now())
1989 }
1990
1991 /// Starts a transaction, providing the current time. Subsequent transactions
1992 /// that occur within a short period of time will be grouped together. This
1993 /// is controlled by the buffer's undo grouping duration.
1994 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1995 self.transaction_depth += 1;
1996 if self.was_dirty_before_starting_transaction.is_none() {
1997 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1998 }
1999 self.text.start_transaction_at(now)
2000 }
2001
2002 /// Terminates the current transaction, if this is the outermost transaction.
2003 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2004 self.end_transaction_at(Instant::now(), cx)
2005 }
2006
2007 /// Terminates the current transaction, providing the current time. Subsequent transactions
2008 /// that occur within a short period of time will be grouped together. This
2009 /// is controlled by the buffer's undo grouping duration.
2010 pub fn end_transaction_at(
2011 &mut self,
2012 now: Instant,
2013 cx: &mut Context<Self>,
2014 ) -> Option<TransactionId> {
2015 assert!(self.transaction_depth > 0);
2016 self.transaction_depth -= 1;
2017 let was_dirty = if self.transaction_depth == 0 {
2018 self.was_dirty_before_starting_transaction.take().unwrap()
2019 } else {
2020 false
2021 };
2022 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2023 self.did_edit(&start_version, was_dirty, cx);
2024 Some(transaction_id)
2025 } else {
2026 None
2027 }
2028 }
2029
2030 /// Manually add a transaction to the buffer's undo history.
2031 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2032 self.text.push_transaction(transaction, now);
2033 }
2034
2035 /// Prevent the last transaction from being grouped with any subsequent transactions,
2036 /// even if they occur with the buffer's undo grouping duration.
2037 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2038 self.text.finalize_last_transaction()
2039 }
2040
2041 /// Manually group all changes since a given transaction.
2042 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2043 self.text.group_until_transaction(transaction_id);
2044 }
2045
2046 /// Manually remove a transaction from the buffer's undo history
2047 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2048 self.text.forget_transaction(transaction_id)
2049 }
2050
2051 /// Retrieve a transaction from the buffer's undo history
2052 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2053 self.text.get_transaction(transaction_id)
2054 }
2055
2056 /// Manually merge two transactions in the buffer's undo history.
2057 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2058 self.text.merge_transactions(transaction, destination);
2059 }
2060
2061 /// Waits for the buffer to receive operations with the given timestamps.
2062 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2063 &mut self,
2064 edit_ids: It,
2065 ) -> impl Future<Output = Result<()>> + use<It> {
2066 self.text.wait_for_edits(edit_ids)
2067 }
2068
2069 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2070 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2071 &mut self,
2072 anchors: It,
2073 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2074 self.text.wait_for_anchors(anchors)
2075 }
2076
2077 /// Waits for the buffer to receive operations up to the given version.
2078 pub fn wait_for_version(
2079 &mut self,
2080 version: clock::Global,
2081 ) -> impl Future<Output = Result<()>> + use<> {
2082 self.text.wait_for_version(version)
2083 }
2084
2085 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2086 /// [`Buffer::wait_for_version`] to resolve with an error.
2087 pub fn give_up_waiting(&mut self) {
2088 self.text.give_up_waiting();
2089 }
2090
2091 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2092 pub fn set_active_selections(
2093 &mut self,
2094 selections: Arc<[Selection<Anchor>]>,
2095 line_mode: bool,
2096 cursor_shape: CursorShape,
2097 cx: &mut Context<Self>,
2098 ) {
2099 let lamport_timestamp = self.text.lamport_clock.tick();
2100 self.remote_selections.insert(
2101 self.text.replica_id(),
2102 SelectionSet {
2103 selections: selections.clone(),
2104 lamport_timestamp,
2105 line_mode,
2106 cursor_shape,
2107 },
2108 );
2109 self.send_operation(
2110 Operation::UpdateSelections {
2111 selections,
2112 line_mode,
2113 lamport_timestamp,
2114 cursor_shape,
2115 },
2116 true,
2117 cx,
2118 );
2119 self.non_text_state_update_count += 1;
2120 cx.notify();
2121 }
2122
2123 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2124 /// this replica.
2125 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2126 if self
2127 .remote_selections
2128 .get(&self.text.replica_id())
2129 .map_or(true, |set| !set.selections.is_empty())
2130 {
2131 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2132 }
2133 }
2134
2135 pub fn set_agent_selections(
2136 &mut self,
2137 selections: Arc<[Selection<Anchor>]>,
2138 line_mode: bool,
2139 cursor_shape: CursorShape,
2140 cx: &mut Context<Self>,
2141 ) {
2142 let lamport_timestamp = self.text.lamport_clock.tick();
2143 self.remote_selections.insert(
2144 AGENT_REPLICA_ID,
2145 SelectionSet {
2146 selections: selections.clone(),
2147 lamport_timestamp,
2148 line_mode,
2149 cursor_shape,
2150 },
2151 );
2152 self.non_text_state_update_count += 1;
2153 cx.notify();
2154 }
2155
2156 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2157 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2158 }
2159
2160 /// Replaces the buffer's entire text.
2161 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2162 where
2163 T: Into<Arc<str>>,
2164 {
2165 self.autoindent_requests.clear();
2166 self.edit([(0..self.len(), text)], None, cx)
2167 }
2168
2169 /// Appends the given text to the end of the buffer.
2170 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2171 where
2172 T: Into<Arc<str>>,
2173 {
2174 self.edit([(self.len()..self.len(), text)], None, cx)
2175 }
2176
2177 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2178 /// delete, and a string of text to insert at that location.
2179 ///
2180 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2181 /// request for the edited ranges, which will be processed when the buffer finishes
2182 /// parsing.
2183 ///
2184 /// Parsing takes place at the end of a transaction, and may compute synchronously
2185 /// or asynchronously, depending on the changes.
2186 pub fn edit<I, S, T>(
2187 &mut self,
2188 edits_iter: I,
2189 autoindent_mode: Option<AutoindentMode>,
2190 cx: &mut Context<Self>,
2191 ) -> Option<clock::Lamport>
2192 where
2193 I: IntoIterator<Item = (Range<S>, T)>,
2194 S: ToOffset,
2195 T: Into<Arc<str>>,
2196 {
2197 // Skip invalid edits and coalesce contiguous ones.
2198 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2199
2200 for (range, new_text) in edits_iter {
2201 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2202
2203 if range.start > range.end {
2204 mem::swap(&mut range.start, &mut range.end);
2205 }
2206 let new_text = new_text.into();
2207 if !new_text.is_empty() || !range.is_empty() {
2208 if let Some((prev_range, prev_text)) = edits.last_mut() {
2209 if prev_range.end >= range.start {
2210 prev_range.end = cmp::max(prev_range.end, range.end);
2211 *prev_text = format!("{prev_text}{new_text}").into();
2212 } else {
2213 edits.push((range, new_text));
2214 }
2215 } else {
2216 edits.push((range, new_text));
2217 }
2218 }
2219 }
2220 if edits.is_empty() {
2221 return None;
2222 }
2223
2224 self.start_transaction();
2225 self.pending_autoindent.take();
2226 let autoindent_request = autoindent_mode
2227 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2228
2229 let edit_operation = self.text.edit(edits.iter().cloned());
2230 let edit_id = edit_operation.timestamp();
2231
2232 if let Some((before_edit, mode)) = autoindent_request {
2233 let mut delta = 0isize;
2234 let entries = edits
2235 .into_iter()
2236 .enumerate()
2237 .zip(&edit_operation.as_edit().unwrap().new_text)
2238 .map(|((ix, (range, _)), new_text)| {
2239 let new_text_length = new_text.len();
2240 let old_start = range.start.to_point(&before_edit);
2241 let new_start = (delta + range.start as isize) as usize;
2242 let range_len = range.end - range.start;
2243 delta += new_text_length as isize - range_len as isize;
2244
2245 // Decide what range of the insertion to auto-indent, and whether
2246 // the first line of the insertion should be considered a newly-inserted line
2247 // or an edit to an existing line.
2248 let mut range_of_insertion_to_indent = 0..new_text_length;
2249 let mut first_line_is_new = true;
2250
2251 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2252 let old_line_end = before_edit.line_len(old_start.row);
2253
2254 if old_start.column > old_line_start {
2255 first_line_is_new = false;
2256 }
2257
2258 if !new_text.contains('\n')
2259 && (old_start.column + (range_len as u32) < old_line_end
2260 || old_line_end == old_line_start)
2261 {
2262 first_line_is_new = false;
2263 }
2264
2265 // When inserting text starting with a newline, avoid auto-indenting the
2266 // previous line.
2267 if new_text.starts_with('\n') {
2268 range_of_insertion_to_indent.start += 1;
2269 first_line_is_new = true;
2270 }
2271
2272 let mut original_indent_column = None;
2273 if let AutoindentMode::Block {
2274 original_indent_columns,
2275 } = &mode
2276 {
2277 original_indent_column = Some(if new_text.starts_with('\n') {
2278 indent_size_for_text(
2279 new_text[range_of_insertion_to_indent.clone()].chars(),
2280 )
2281 .len
2282 } else {
2283 original_indent_columns
2284 .get(ix)
2285 .copied()
2286 .flatten()
2287 .unwrap_or_else(|| {
2288 indent_size_for_text(
2289 new_text[range_of_insertion_to_indent.clone()].chars(),
2290 )
2291 .len
2292 })
2293 });
2294
2295 // Avoid auto-indenting the line after the edit.
2296 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2297 range_of_insertion_to_indent.end -= 1;
2298 }
2299 }
2300
2301 AutoindentRequestEntry {
2302 first_line_is_new,
2303 original_indent_column,
2304 indent_size: before_edit.language_indent_size_at(range.start, cx),
2305 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2306 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2307 }
2308 })
2309 .collect();
2310
2311 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2312 before_edit,
2313 entries,
2314 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2315 ignore_empty_lines: false,
2316 }));
2317 }
2318
2319 self.end_transaction(cx);
2320 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2321 Some(edit_id)
2322 }
2323
2324 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2325 self.was_changed();
2326
2327 if self.edits_since::<usize>(old_version).next().is_none() {
2328 return;
2329 }
2330
2331 self.reparse(cx);
2332 cx.emit(BufferEvent::Edited);
2333 if was_dirty != self.is_dirty() {
2334 cx.emit(BufferEvent::DirtyChanged);
2335 }
2336 cx.notify();
2337 }
2338
2339 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2340 where
2341 I: IntoIterator<Item = Range<T>>,
2342 T: ToOffset + Copy,
2343 {
2344 let before_edit = self.snapshot();
2345 let entries = ranges
2346 .into_iter()
2347 .map(|range| AutoindentRequestEntry {
2348 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2349 first_line_is_new: true,
2350 indent_size: before_edit.language_indent_size_at(range.start, cx),
2351 original_indent_column: None,
2352 })
2353 .collect();
2354 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2355 before_edit,
2356 entries,
2357 is_block_mode: false,
2358 ignore_empty_lines: true,
2359 }));
2360 self.request_autoindent(cx);
2361 }
2362
2363 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2364 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2365 pub fn insert_empty_line(
2366 &mut self,
2367 position: impl ToPoint,
2368 space_above: bool,
2369 space_below: bool,
2370 cx: &mut Context<Self>,
2371 ) -> Point {
2372 let mut position = position.to_point(self);
2373
2374 self.start_transaction();
2375
2376 self.edit(
2377 [(position..position, "\n")],
2378 Some(AutoindentMode::EachLine),
2379 cx,
2380 );
2381
2382 if position.column > 0 {
2383 position += Point::new(1, 0);
2384 }
2385
2386 if !self.is_line_blank(position.row) {
2387 self.edit(
2388 [(position..position, "\n")],
2389 Some(AutoindentMode::EachLine),
2390 cx,
2391 );
2392 }
2393
2394 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2395 self.edit(
2396 [(position..position, "\n")],
2397 Some(AutoindentMode::EachLine),
2398 cx,
2399 );
2400 position.row += 1;
2401 }
2402
2403 if space_below
2404 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2405 {
2406 self.edit(
2407 [(position..position, "\n")],
2408 Some(AutoindentMode::EachLine),
2409 cx,
2410 );
2411 }
2412
2413 self.end_transaction(cx);
2414
2415 position
2416 }
2417
2418 /// Applies the given remote operations to the buffer.
2419 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2420 self.pending_autoindent.take();
2421 let was_dirty = self.is_dirty();
2422 let old_version = self.version.clone();
2423 let mut deferred_ops = Vec::new();
2424 let buffer_ops = ops
2425 .into_iter()
2426 .filter_map(|op| match op {
2427 Operation::Buffer(op) => Some(op),
2428 _ => {
2429 if self.can_apply_op(&op) {
2430 self.apply_op(op, cx);
2431 } else {
2432 deferred_ops.push(op);
2433 }
2434 None
2435 }
2436 })
2437 .collect::<Vec<_>>();
2438 for operation in buffer_ops.iter() {
2439 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2440 }
2441 self.text.apply_ops(buffer_ops);
2442 self.deferred_ops.insert(deferred_ops);
2443 self.flush_deferred_ops(cx);
2444 self.did_edit(&old_version, was_dirty, cx);
2445 // Notify independently of whether the buffer was edited as the operations could include a
2446 // selection update.
2447 cx.notify();
2448 }
2449
2450 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2451 let mut deferred_ops = Vec::new();
2452 for op in self.deferred_ops.drain().iter().cloned() {
2453 if self.can_apply_op(&op) {
2454 self.apply_op(op, cx);
2455 } else {
2456 deferred_ops.push(op);
2457 }
2458 }
2459 self.deferred_ops.insert(deferred_ops);
2460 }
2461
2462 pub fn has_deferred_ops(&self) -> bool {
2463 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2464 }
2465
2466 fn can_apply_op(&self, operation: &Operation) -> bool {
2467 match operation {
2468 Operation::Buffer(_) => {
2469 unreachable!("buffer operations should never be applied at this layer")
2470 }
2471 Operation::UpdateDiagnostics {
2472 diagnostics: diagnostic_set,
2473 ..
2474 } => diagnostic_set.iter().all(|diagnostic| {
2475 self.text.can_resolve(&diagnostic.range.start)
2476 && self.text.can_resolve(&diagnostic.range.end)
2477 }),
2478 Operation::UpdateSelections { selections, .. } => selections
2479 .iter()
2480 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2481 Operation::UpdateCompletionTriggers { .. } => true,
2482 }
2483 }
2484
2485 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2486 match operation {
2487 Operation::Buffer(_) => {
2488 unreachable!("buffer operations should never be applied at this layer")
2489 }
2490 Operation::UpdateDiagnostics {
2491 server_id,
2492 diagnostics: diagnostic_set,
2493 lamport_timestamp,
2494 } => {
2495 let snapshot = self.snapshot();
2496 self.apply_diagnostic_update(
2497 server_id,
2498 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2499 lamport_timestamp,
2500 cx,
2501 );
2502 }
2503 Operation::UpdateSelections {
2504 selections,
2505 lamport_timestamp,
2506 line_mode,
2507 cursor_shape,
2508 } => {
2509 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2510 if set.lamport_timestamp > lamport_timestamp {
2511 return;
2512 }
2513 }
2514
2515 self.remote_selections.insert(
2516 lamport_timestamp.replica_id,
2517 SelectionSet {
2518 selections,
2519 lamport_timestamp,
2520 line_mode,
2521 cursor_shape,
2522 },
2523 );
2524 self.text.lamport_clock.observe(lamport_timestamp);
2525 self.non_text_state_update_count += 1;
2526 }
2527 Operation::UpdateCompletionTriggers {
2528 triggers,
2529 lamport_timestamp,
2530 server_id,
2531 } => {
2532 if triggers.is_empty() {
2533 self.completion_triggers_per_language_server
2534 .remove(&server_id);
2535 self.completion_triggers = self
2536 .completion_triggers_per_language_server
2537 .values()
2538 .flat_map(|triggers| triggers.into_iter().cloned())
2539 .collect();
2540 } else {
2541 self.completion_triggers_per_language_server
2542 .insert(server_id, triggers.iter().cloned().collect());
2543 self.completion_triggers.extend(triggers);
2544 }
2545 self.text.lamport_clock.observe(lamport_timestamp);
2546 }
2547 }
2548 }
2549
2550 fn apply_diagnostic_update(
2551 &mut self,
2552 server_id: LanguageServerId,
2553 diagnostics: DiagnosticSet,
2554 lamport_timestamp: clock::Lamport,
2555 cx: &mut Context<Self>,
2556 ) {
2557 if lamport_timestamp > self.diagnostics_timestamp {
2558 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2559 if diagnostics.is_empty() {
2560 if let Ok(ix) = ix {
2561 self.diagnostics.remove(ix);
2562 }
2563 } else {
2564 match ix {
2565 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2566 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2567 };
2568 }
2569 self.diagnostics_timestamp = lamport_timestamp;
2570 self.non_text_state_update_count += 1;
2571 self.text.lamport_clock.observe(lamport_timestamp);
2572 cx.notify();
2573 cx.emit(BufferEvent::DiagnosticsUpdated);
2574 }
2575 }
2576
2577 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2578 self.was_changed();
2579 cx.emit(BufferEvent::Operation {
2580 operation,
2581 is_local,
2582 });
2583 }
2584
2585 /// Removes the selections for a given peer.
2586 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2587 self.remote_selections.remove(&replica_id);
2588 cx.notify();
2589 }
2590
2591 /// Undoes the most recent transaction.
2592 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2593 let was_dirty = self.is_dirty();
2594 let old_version = self.version.clone();
2595
2596 if let Some((transaction_id, operation)) = self.text.undo() {
2597 self.send_operation(Operation::Buffer(operation), true, cx);
2598 self.did_edit(&old_version, was_dirty, cx);
2599 Some(transaction_id)
2600 } else {
2601 None
2602 }
2603 }
2604
2605 /// Manually undoes a specific transaction in the buffer's undo history.
2606 pub fn undo_transaction(
2607 &mut self,
2608 transaction_id: TransactionId,
2609 cx: &mut Context<Self>,
2610 ) -> bool {
2611 let was_dirty = self.is_dirty();
2612 let old_version = self.version.clone();
2613 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2614 self.send_operation(Operation::Buffer(operation), true, cx);
2615 self.did_edit(&old_version, was_dirty, cx);
2616 true
2617 } else {
2618 false
2619 }
2620 }
2621
2622 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2623 pub fn undo_to_transaction(
2624 &mut self,
2625 transaction_id: TransactionId,
2626 cx: &mut Context<Self>,
2627 ) -> bool {
2628 let was_dirty = self.is_dirty();
2629 let old_version = self.version.clone();
2630
2631 let operations = self.text.undo_to_transaction(transaction_id);
2632 let undone = !operations.is_empty();
2633 for operation in operations {
2634 self.send_operation(Operation::Buffer(operation), true, cx);
2635 }
2636 if undone {
2637 self.did_edit(&old_version, was_dirty, cx)
2638 }
2639 undone
2640 }
2641
2642 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2643 let was_dirty = self.is_dirty();
2644 let operation = self.text.undo_operations(counts);
2645 let old_version = self.version.clone();
2646 self.send_operation(Operation::Buffer(operation), true, cx);
2647 self.did_edit(&old_version, was_dirty, cx);
2648 }
2649
2650 /// Manually redoes a specific transaction in the buffer's redo history.
2651 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2652 let was_dirty = self.is_dirty();
2653 let old_version = self.version.clone();
2654
2655 if let Some((transaction_id, operation)) = self.text.redo() {
2656 self.send_operation(Operation::Buffer(operation), true, cx);
2657 self.did_edit(&old_version, was_dirty, cx);
2658 Some(transaction_id)
2659 } else {
2660 None
2661 }
2662 }
2663
2664 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2665 pub fn redo_to_transaction(
2666 &mut self,
2667 transaction_id: TransactionId,
2668 cx: &mut Context<Self>,
2669 ) -> bool {
2670 let was_dirty = self.is_dirty();
2671 let old_version = self.version.clone();
2672
2673 let operations = self.text.redo_to_transaction(transaction_id);
2674 let redone = !operations.is_empty();
2675 for operation in operations {
2676 self.send_operation(Operation::Buffer(operation), true, cx);
2677 }
2678 if redone {
2679 self.did_edit(&old_version, was_dirty, cx)
2680 }
2681 redone
2682 }
2683
2684 /// Override current completion triggers with the user-provided completion triggers.
2685 pub fn set_completion_triggers(
2686 &mut self,
2687 server_id: LanguageServerId,
2688 triggers: BTreeSet<String>,
2689 cx: &mut Context<Self>,
2690 ) {
2691 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2692 if triggers.is_empty() {
2693 self.completion_triggers_per_language_server
2694 .remove(&server_id);
2695 self.completion_triggers = self
2696 .completion_triggers_per_language_server
2697 .values()
2698 .flat_map(|triggers| triggers.into_iter().cloned())
2699 .collect();
2700 } else {
2701 self.completion_triggers_per_language_server
2702 .insert(server_id, triggers.clone());
2703 self.completion_triggers.extend(triggers.iter().cloned());
2704 }
2705 self.send_operation(
2706 Operation::UpdateCompletionTriggers {
2707 triggers: triggers.iter().cloned().collect(),
2708 lamport_timestamp: self.completion_triggers_timestamp,
2709 server_id,
2710 },
2711 true,
2712 cx,
2713 );
2714 cx.notify();
2715 }
2716
2717 /// Returns a list of strings which trigger a completion menu for this language.
2718 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2719 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2720 &self.completion_triggers
2721 }
2722
2723 /// Call this directly after performing edits to prevent the preview tab
2724 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2725 /// to return false until there are additional edits.
2726 pub fn refresh_preview(&mut self) {
2727 self.preview_version = self.version.clone();
2728 }
2729
2730 /// Whether we should preserve the preview status of a tab containing this buffer.
2731 pub fn preserve_preview(&self) -> bool {
2732 !self.has_edits_since(&self.preview_version)
2733 }
2734}
2735
2736#[doc(hidden)]
2737#[cfg(any(test, feature = "test-support"))]
2738impl Buffer {
2739 pub fn edit_via_marked_text(
2740 &mut self,
2741 marked_string: &str,
2742 autoindent_mode: Option<AutoindentMode>,
2743 cx: &mut Context<Self>,
2744 ) {
2745 let edits = self.edits_for_marked_text(marked_string);
2746 self.edit(edits, autoindent_mode, cx);
2747 }
2748
2749 pub fn set_group_interval(&mut self, group_interval: Duration) {
2750 self.text.set_group_interval(group_interval);
2751 }
2752
2753 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2754 where
2755 T: rand::Rng,
2756 {
2757 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2758 let mut last_end = None;
2759 for _ in 0..old_range_count {
2760 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2761 break;
2762 }
2763
2764 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2765 let mut range = self.random_byte_range(new_start, rng);
2766 if rng.gen_bool(0.2) {
2767 mem::swap(&mut range.start, &mut range.end);
2768 }
2769 last_end = Some(range.end);
2770
2771 let new_text_len = rng.gen_range(0..10);
2772 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2773 new_text = new_text.to_uppercase();
2774
2775 edits.push((range, new_text));
2776 }
2777 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2778 self.edit(edits, None, cx);
2779 }
2780
2781 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2782 let was_dirty = self.is_dirty();
2783 let old_version = self.version.clone();
2784
2785 let ops = self.text.randomly_undo_redo(rng);
2786 if !ops.is_empty() {
2787 for op in ops {
2788 self.send_operation(Operation::Buffer(op), true, cx);
2789 self.did_edit(&old_version, was_dirty, cx);
2790 }
2791 }
2792 }
2793}
2794
2795impl EventEmitter<BufferEvent> for Buffer {}
2796
2797impl Deref for Buffer {
2798 type Target = TextBuffer;
2799
2800 fn deref(&self) -> &Self::Target {
2801 &self.text
2802 }
2803}
2804
2805impl BufferSnapshot {
2806 /// Returns [`IndentSize`] for a given line that respects user settings and
2807 /// language preferences.
2808 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2809 indent_size_for_line(self, row)
2810 }
2811
2812 /// Returns [`IndentSize`] for a given position that respects user settings
2813 /// and language preferences.
2814 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2815 let settings = language_settings(
2816 self.language_at(position).map(|l| l.name()),
2817 self.file(),
2818 cx,
2819 );
2820 if settings.hard_tabs {
2821 IndentSize::tab()
2822 } else {
2823 IndentSize::spaces(settings.tab_size.get())
2824 }
2825 }
2826
2827 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2828 /// is passed in as `single_indent_size`.
2829 pub fn suggested_indents(
2830 &self,
2831 rows: impl Iterator<Item = u32>,
2832 single_indent_size: IndentSize,
2833 ) -> BTreeMap<u32, IndentSize> {
2834 let mut result = BTreeMap::new();
2835
2836 for row_range in contiguous_ranges(rows, 10) {
2837 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2838 Some(suggestions) => suggestions,
2839 _ => break,
2840 };
2841
2842 for (row, suggestion) in row_range.zip(suggestions) {
2843 let indent_size = if let Some(suggestion) = suggestion {
2844 result
2845 .get(&suggestion.basis_row)
2846 .copied()
2847 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2848 .with_delta(suggestion.delta, single_indent_size)
2849 } else {
2850 self.indent_size_for_line(row)
2851 };
2852
2853 result.insert(row, indent_size);
2854 }
2855 }
2856
2857 result
2858 }
2859
2860 fn suggest_autoindents(
2861 &self,
2862 row_range: Range<u32>,
2863 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2864 let config = &self.language.as_ref()?.config;
2865 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2866
2867 // Find the suggested indentation ranges based on the syntax tree.
2868 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2869 let end = Point::new(row_range.end, 0);
2870 let range = (start..end).to_offset(&self.text);
2871 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2872 Some(&grammar.indents_config.as_ref()?.query)
2873 });
2874 let indent_configs = matches
2875 .grammars()
2876 .iter()
2877 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2878 .collect::<Vec<_>>();
2879
2880 let mut indent_ranges = Vec::<Range<Point>>::new();
2881 let mut outdent_positions = Vec::<Point>::new();
2882 while let Some(mat) = matches.peek() {
2883 let mut start: Option<Point> = None;
2884 let mut end: Option<Point> = None;
2885
2886 let config = &indent_configs[mat.grammar_index];
2887 for capture in mat.captures {
2888 if capture.index == config.indent_capture_ix {
2889 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2890 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2891 } else if Some(capture.index) == config.start_capture_ix {
2892 start = Some(Point::from_ts_point(capture.node.end_position()));
2893 } else if Some(capture.index) == config.end_capture_ix {
2894 end = Some(Point::from_ts_point(capture.node.start_position()));
2895 } else if Some(capture.index) == config.outdent_capture_ix {
2896 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2897 }
2898 }
2899
2900 matches.advance();
2901 if let Some((start, end)) = start.zip(end) {
2902 if start.row == end.row {
2903 continue;
2904 }
2905
2906 let range = start..end;
2907 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2908 Err(ix) => indent_ranges.insert(ix, range),
2909 Ok(ix) => {
2910 let prev_range = &mut indent_ranges[ix];
2911 prev_range.end = prev_range.end.max(range.end);
2912 }
2913 }
2914 }
2915 }
2916
2917 let mut error_ranges = Vec::<Range<Point>>::new();
2918 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2919 grammar.error_query.as_ref()
2920 });
2921 while let Some(mat) = matches.peek() {
2922 let node = mat.captures[0].node;
2923 let start = Point::from_ts_point(node.start_position());
2924 let end = Point::from_ts_point(node.end_position());
2925 let range = start..end;
2926 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2927 Ok(ix) | Err(ix) => ix,
2928 };
2929 let mut end_ix = ix;
2930 while let Some(existing_range) = error_ranges.get(end_ix) {
2931 if existing_range.end < end {
2932 end_ix += 1;
2933 } else {
2934 break;
2935 }
2936 }
2937 error_ranges.splice(ix..end_ix, [range]);
2938 matches.advance();
2939 }
2940
2941 outdent_positions.sort();
2942 for outdent_position in outdent_positions {
2943 // find the innermost indent range containing this outdent_position
2944 // set its end to the outdent position
2945 if let Some(range_to_truncate) = indent_ranges
2946 .iter_mut()
2947 .filter(|indent_range| indent_range.contains(&outdent_position))
2948 .next_back()
2949 {
2950 range_to_truncate.end = outdent_position;
2951 }
2952 }
2953
2954 // Find the suggested indentation increases and decreased based on regexes.
2955 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2956 self.for_each_line(
2957 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2958 ..Point::new(row_range.end, 0),
2959 |row, line| {
2960 if config
2961 .decrease_indent_pattern
2962 .as_ref()
2963 .map_or(false, |regex| regex.is_match(line))
2964 {
2965 indent_change_rows.push((row, Ordering::Less));
2966 }
2967 if config
2968 .increase_indent_pattern
2969 .as_ref()
2970 .map_or(false, |regex| regex.is_match(line))
2971 {
2972 indent_change_rows.push((row + 1, Ordering::Greater));
2973 }
2974 },
2975 );
2976
2977 let mut indent_changes = indent_change_rows.into_iter().peekable();
2978 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2979 prev_non_blank_row.unwrap_or(0)
2980 } else {
2981 row_range.start.saturating_sub(1)
2982 };
2983 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2984 Some(row_range.map(move |row| {
2985 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2986
2987 let mut indent_from_prev_row = false;
2988 let mut outdent_from_prev_row = false;
2989 let mut outdent_to_row = u32::MAX;
2990 let mut from_regex = false;
2991
2992 while let Some((indent_row, delta)) = indent_changes.peek() {
2993 match indent_row.cmp(&row) {
2994 Ordering::Equal => match delta {
2995 Ordering::Less => {
2996 from_regex = true;
2997 outdent_from_prev_row = true
2998 }
2999 Ordering::Greater => {
3000 indent_from_prev_row = true;
3001 from_regex = true
3002 }
3003 _ => {}
3004 },
3005
3006 Ordering::Greater => break,
3007 Ordering::Less => {}
3008 }
3009
3010 indent_changes.next();
3011 }
3012
3013 for range in &indent_ranges {
3014 if range.start.row >= row {
3015 break;
3016 }
3017 if range.start.row == prev_row && range.end > row_start {
3018 indent_from_prev_row = true;
3019 }
3020 if range.end > prev_row_start && range.end <= row_start {
3021 outdent_to_row = outdent_to_row.min(range.start.row);
3022 }
3023 }
3024
3025 let within_error = error_ranges
3026 .iter()
3027 .any(|e| e.start.row < row && e.end > row_start);
3028
3029 let suggestion = if outdent_to_row == prev_row
3030 || (outdent_from_prev_row && indent_from_prev_row)
3031 {
3032 Some(IndentSuggestion {
3033 basis_row: prev_row,
3034 delta: Ordering::Equal,
3035 within_error: within_error && !from_regex,
3036 })
3037 } else if indent_from_prev_row {
3038 Some(IndentSuggestion {
3039 basis_row: prev_row,
3040 delta: Ordering::Greater,
3041 within_error: within_error && !from_regex,
3042 })
3043 } else if outdent_to_row < prev_row {
3044 Some(IndentSuggestion {
3045 basis_row: outdent_to_row,
3046 delta: Ordering::Equal,
3047 within_error: within_error && !from_regex,
3048 })
3049 } else if outdent_from_prev_row {
3050 Some(IndentSuggestion {
3051 basis_row: prev_row,
3052 delta: Ordering::Less,
3053 within_error: within_error && !from_regex,
3054 })
3055 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3056 {
3057 Some(IndentSuggestion {
3058 basis_row: prev_row,
3059 delta: Ordering::Equal,
3060 within_error: within_error && !from_regex,
3061 })
3062 } else {
3063 None
3064 };
3065
3066 prev_row = row;
3067 prev_row_start = row_start;
3068 suggestion
3069 }))
3070 }
3071
3072 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3073 while row > 0 {
3074 row -= 1;
3075 if !self.is_line_blank(row) {
3076 return Some(row);
3077 }
3078 }
3079 None
3080 }
3081
3082 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3083 let captures = self.syntax.captures(range, &self.text, |grammar| {
3084 grammar.highlights_query.as_ref()
3085 });
3086 let highlight_maps = captures
3087 .grammars()
3088 .iter()
3089 .map(|grammar| grammar.highlight_map())
3090 .collect();
3091 (captures, highlight_maps)
3092 }
3093
3094 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3095 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3096 /// returned in chunks where each chunk has a single syntax highlighting style and
3097 /// diagnostic status.
3098 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3099 let range = range.start.to_offset(self)..range.end.to_offset(self);
3100
3101 let mut syntax = None;
3102 if language_aware {
3103 syntax = Some(self.get_highlights(range.clone()));
3104 }
3105 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3106 let diagnostics = language_aware;
3107 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3108 }
3109
3110 pub fn highlighted_text_for_range<T: ToOffset>(
3111 &self,
3112 range: Range<T>,
3113 override_style: Option<HighlightStyle>,
3114 syntax_theme: &SyntaxTheme,
3115 ) -> HighlightedText {
3116 HighlightedText::from_buffer_range(
3117 range,
3118 &self.text,
3119 &self.syntax,
3120 override_style,
3121 syntax_theme,
3122 )
3123 }
3124
3125 /// Invokes the given callback for each line of text in the given range of the buffer.
3126 /// Uses callback to avoid allocating a string for each line.
3127 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3128 let mut line = String::new();
3129 let mut row = range.start.row;
3130 for chunk in self
3131 .as_rope()
3132 .chunks_in_range(range.to_offset(self))
3133 .chain(["\n"])
3134 {
3135 for (newline_ix, text) in chunk.split('\n').enumerate() {
3136 if newline_ix > 0 {
3137 callback(row, &line);
3138 row += 1;
3139 line.clear();
3140 }
3141 line.push_str(text);
3142 }
3143 }
3144 }
3145
3146 /// Iterates over every [`SyntaxLayer`] in the buffer.
3147 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3148 self.syntax
3149 .layers_for_range(0..self.len(), &self.text, true)
3150 }
3151
3152 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3153 let offset = position.to_offset(self);
3154 self.syntax
3155 .layers_for_range(offset..offset, &self.text, false)
3156 .filter(|l| l.node().end_byte() > offset)
3157 .last()
3158 }
3159
3160 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3161 &self,
3162 range: Range<D>,
3163 ) -> Option<SyntaxLayer> {
3164 let range = range.to_offset(self);
3165 return self
3166 .syntax
3167 .layers_for_range(range, &self.text, false)
3168 .max_by(|a, b| {
3169 if a.depth != b.depth {
3170 a.depth.cmp(&b.depth)
3171 } else if a.offset.0 != b.offset.0 {
3172 a.offset.0.cmp(&b.offset.0)
3173 } else {
3174 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3175 }
3176 });
3177 }
3178
3179 /// Returns the main [`Language`].
3180 pub fn language(&self) -> Option<&Arc<Language>> {
3181 self.language.as_ref()
3182 }
3183
3184 /// Returns the [`Language`] at the given location.
3185 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3186 self.syntax_layer_at(position)
3187 .map(|info| info.language)
3188 .or(self.language.as_ref())
3189 }
3190
3191 /// Returns the settings for the language at the given location.
3192 pub fn settings_at<'a, D: ToOffset>(
3193 &'a self,
3194 position: D,
3195 cx: &'a App,
3196 ) -> Cow<'a, LanguageSettings> {
3197 language_settings(
3198 self.language_at(position).map(|l| l.name()),
3199 self.file.as_ref(),
3200 cx,
3201 )
3202 }
3203
3204 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3205 CharClassifier::new(self.language_scope_at(point))
3206 }
3207
3208 /// Returns the [`LanguageScope`] at the given location.
3209 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3210 let offset = position.to_offset(self);
3211 let mut scope = None;
3212 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3213
3214 // Use the layer that has the smallest node intersecting the given point.
3215 for layer in self
3216 .syntax
3217 .layers_for_range(offset..offset, &self.text, false)
3218 {
3219 let mut cursor = layer.node().walk();
3220
3221 let mut range = None;
3222 loop {
3223 let child_range = cursor.node().byte_range();
3224 if !child_range.contains(&offset) {
3225 break;
3226 }
3227
3228 range = Some(child_range);
3229 if cursor.goto_first_child_for_byte(offset).is_none() {
3230 break;
3231 }
3232 }
3233
3234 if let Some(range) = range {
3235 if smallest_range_and_depth.as_ref().map_or(
3236 true,
3237 |(smallest_range, smallest_range_depth)| {
3238 if layer.depth > *smallest_range_depth {
3239 true
3240 } else if layer.depth == *smallest_range_depth {
3241 range.len() < smallest_range.len()
3242 } else {
3243 false
3244 }
3245 },
3246 ) {
3247 smallest_range_and_depth = Some((range, layer.depth));
3248 scope = Some(LanguageScope {
3249 language: layer.language.clone(),
3250 override_id: layer.override_id(offset, &self.text),
3251 });
3252 }
3253 }
3254 }
3255
3256 scope.or_else(|| {
3257 self.language.clone().map(|language| LanguageScope {
3258 language,
3259 override_id: None,
3260 })
3261 })
3262 }
3263
3264 /// Returns a tuple of the range and character kind of the word
3265 /// surrounding the given position.
3266 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3267 let mut start = start.to_offset(self);
3268 let mut end = start;
3269 let mut next_chars = self.chars_at(start).peekable();
3270 let mut prev_chars = self.reversed_chars_at(start).peekable();
3271
3272 let classifier = self.char_classifier_at(start);
3273 let word_kind = cmp::max(
3274 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3275 next_chars.peek().copied().map(|c| classifier.kind(c)),
3276 );
3277
3278 for ch in prev_chars {
3279 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3280 start -= ch.len_utf8();
3281 } else {
3282 break;
3283 }
3284 }
3285
3286 for ch in next_chars {
3287 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3288 end += ch.len_utf8();
3289 } else {
3290 break;
3291 }
3292 }
3293
3294 (start..end, word_kind)
3295 }
3296
3297 /// Returns the closest syntax node enclosing the given range.
3298 pub fn syntax_ancestor<'a, T: ToOffset>(
3299 &'a self,
3300 range: Range<T>,
3301 ) -> Option<tree_sitter::Node<'a>> {
3302 let range = range.start.to_offset(self)..range.end.to_offset(self);
3303 let mut result: Option<tree_sitter::Node<'a>> = None;
3304 'outer: for layer in self
3305 .syntax
3306 .layers_for_range(range.clone(), &self.text, true)
3307 {
3308 let mut cursor = layer.node().walk();
3309
3310 // Descend to the first leaf that touches the start of the range,
3311 // and if the range is non-empty, extends beyond the start.
3312 while cursor.goto_first_child_for_byte(range.start).is_some() {
3313 if !range.is_empty() && cursor.node().end_byte() == range.start {
3314 cursor.goto_next_sibling();
3315 }
3316 }
3317
3318 // Ascend to the smallest ancestor that strictly contains the range.
3319 loop {
3320 let node_range = cursor.node().byte_range();
3321 if node_range.start <= range.start
3322 && node_range.end >= range.end
3323 && node_range.len() > range.len()
3324 {
3325 break;
3326 }
3327 if !cursor.goto_parent() {
3328 continue 'outer;
3329 }
3330 }
3331
3332 let left_node = cursor.node();
3333 let mut layer_result = left_node;
3334
3335 // For an empty range, try to find another node immediately to the right of the range.
3336 if left_node.end_byte() == range.start {
3337 let mut right_node = None;
3338 while !cursor.goto_next_sibling() {
3339 if !cursor.goto_parent() {
3340 break;
3341 }
3342 }
3343
3344 while cursor.node().start_byte() == range.start {
3345 right_node = Some(cursor.node());
3346 if !cursor.goto_first_child() {
3347 break;
3348 }
3349 }
3350
3351 // If there is a candidate node on both sides of the (empty) range, then
3352 // decide between the two by favoring a named node over an anonymous token.
3353 // If both nodes are the same in that regard, favor the right one.
3354 if let Some(right_node) = right_node {
3355 if right_node.is_named() || !left_node.is_named() {
3356 layer_result = right_node;
3357 }
3358 }
3359 }
3360
3361 if let Some(previous_result) = &result {
3362 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3363 continue;
3364 }
3365 }
3366 result = Some(layer_result);
3367 }
3368
3369 result
3370 }
3371
3372 /// Returns the outline for the buffer.
3373 ///
3374 /// This method allows passing an optional [`SyntaxTheme`] to
3375 /// syntax-highlight the returned symbols.
3376 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3377 self.outline_items_containing(0..self.len(), true, theme)
3378 .map(Outline::new)
3379 }
3380
3381 /// Returns all the symbols that contain the given position.
3382 ///
3383 /// This method allows passing an optional [`SyntaxTheme`] to
3384 /// syntax-highlight the returned symbols.
3385 pub fn symbols_containing<T: ToOffset>(
3386 &self,
3387 position: T,
3388 theme: Option<&SyntaxTheme>,
3389 ) -> Option<Vec<OutlineItem<Anchor>>> {
3390 let position = position.to_offset(self);
3391 let mut items = self.outline_items_containing(
3392 position.saturating_sub(1)..self.len().min(position + 1),
3393 false,
3394 theme,
3395 )?;
3396 let mut prev_depth = None;
3397 items.retain(|item| {
3398 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3399 prev_depth = Some(item.depth);
3400 result
3401 });
3402 Some(items)
3403 }
3404
3405 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3406 let range = range.to_offset(self);
3407 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3408 grammar.outline_config.as_ref().map(|c| &c.query)
3409 });
3410 let configs = matches
3411 .grammars()
3412 .iter()
3413 .map(|g| g.outline_config.as_ref().unwrap())
3414 .collect::<Vec<_>>();
3415
3416 while let Some(mat) = matches.peek() {
3417 let config = &configs[mat.grammar_index];
3418 let containing_item_node = maybe!({
3419 let item_node = mat.captures.iter().find_map(|cap| {
3420 if cap.index == config.item_capture_ix {
3421 Some(cap.node)
3422 } else {
3423 None
3424 }
3425 })?;
3426
3427 let item_byte_range = item_node.byte_range();
3428 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3429 None
3430 } else {
3431 Some(item_node)
3432 }
3433 });
3434
3435 if let Some(item_node) = containing_item_node {
3436 return Some(
3437 Point::from_ts_point(item_node.start_position())
3438 ..Point::from_ts_point(item_node.end_position()),
3439 );
3440 }
3441
3442 matches.advance();
3443 }
3444 None
3445 }
3446
3447 pub fn outline_items_containing<T: ToOffset>(
3448 &self,
3449 range: Range<T>,
3450 include_extra_context: bool,
3451 theme: Option<&SyntaxTheme>,
3452 ) -> Option<Vec<OutlineItem<Anchor>>> {
3453 let range = range.to_offset(self);
3454 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3455 grammar.outline_config.as_ref().map(|c| &c.query)
3456 });
3457 let configs = matches
3458 .grammars()
3459 .iter()
3460 .map(|g| g.outline_config.as_ref().unwrap())
3461 .collect::<Vec<_>>();
3462
3463 let mut items = Vec::new();
3464 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3465 while let Some(mat) = matches.peek() {
3466 let config = &configs[mat.grammar_index];
3467 if let Some(item) =
3468 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3469 {
3470 items.push(item);
3471 } else if let Some(capture) = mat
3472 .captures
3473 .iter()
3474 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3475 {
3476 let capture_range = capture.node.start_position()..capture.node.end_position();
3477 let mut capture_row_range =
3478 capture_range.start.row as u32..capture_range.end.row as u32;
3479 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3480 {
3481 capture_row_range.end -= 1;
3482 }
3483 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3484 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3485 last_row_range.end = capture_row_range.end;
3486 } else {
3487 annotation_row_ranges.push(capture_row_range);
3488 }
3489 } else {
3490 annotation_row_ranges.push(capture_row_range);
3491 }
3492 }
3493 matches.advance();
3494 }
3495
3496 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3497
3498 // Assign depths based on containment relationships and convert to anchors.
3499 let mut item_ends_stack = Vec::<Point>::new();
3500 let mut anchor_items = Vec::new();
3501 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3502 for item in items {
3503 while let Some(last_end) = item_ends_stack.last().copied() {
3504 if last_end < item.range.end {
3505 item_ends_stack.pop();
3506 } else {
3507 break;
3508 }
3509 }
3510
3511 let mut annotation_row_range = None;
3512 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3513 let row_preceding_item = item.range.start.row.saturating_sub(1);
3514 if next_annotation_row_range.end < row_preceding_item {
3515 annotation_row_ranges.next();
3516 } else {
3517 if next_annotation_row_range.end == row_preceding_item {
3518 annotation_row_range = Some(next_annotation_row_range.clone());
3519 annotation_row_ranges.next();
3520 }
3521 break;
3522 }
3523 }
3524
3525 anchor_items.push(OutlineItem {
3526 depth: item_ends_stack.len(),
3527 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3528 text: item.text,
3529 highlight_ranges: item.highlight_ranges,
3530 name_ranges: item.name_ranges,
3531 body_range: item.body_range.map(|body_range| {
3532 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3533 }),
3534 annotation_range: annotation_row_range.map(|annotation_range| {
3535 self.anchor_after(Point::new(annotation_range.start, 0))
3536 ..self.anchor_before(Point::new(
3537 annotation_range.end,
3538 self.line_len(annotation_range.end),
3539 ))
3540 }),
3541 });
3542 item_ends_stack.push(item.range.end);
3543 }
3544
3545 Some(anchor_items)
3546 }
3547
3548 fn next_outline_item(
3549 &self,
3550 config: &OutlineConfig,
3551 mat: &SyntaxMapMatch,
3552 range: &Range<usize>,
3553 include_extra_context: bool,
3554 theme: Option<&SyntaxTheme>,
3555 ) -> Option<OutlineItem<Point>> {
3556 let item_node = mat.captures.iter().find_map(|cap| {
3557 if cap.index == config.item_capture_ix {
3558 Some(cap.node)
3559 } else {
3560 None
3561 }
3562 })?;
3563
3564 let item_byte_range = item_node.byte_range();
3565 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3566 return None;
3567 }
3568 let item_point_range = Point::from_ts_point(item_node.start_position())
3569 ..Point::from_ts_point(item_node.end_position());
3570
3571 let mut open_point = None;
3572 let mut close_point = None;
3573 let mut buffer_ranges = Vec::new();
3574 for capture in mat.captures {
3575 let node_is_name;
3576 if capture.index == config.name_capture_ix {
3577 node_is_name = true;
3578 } else if Some(capture.index) == config.context_capture_ix
3579 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3580 {
3581 node_is_name = false;
3582 } else {
3583 if Some(capture.index) == config.open_capture_ix {
3584 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3585 } else if Some(capture.index) == config.close_capture_ix {
3586 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3587 }
3588
3589 continue;
3590 }
3591
3592 let mut range = capture.node.start_byte()..capture.node.end_byte();
3593 let start = capture.node.start_position();
3594 if capture.node.end_position().row > start.row {
3595 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3596 }
3597
3598 if !range.is_empty() {
3599 buffer_ranges.push((range, node_is_name));
3600 }
3601 }
3602 if buffer_ranges.is_empty() {
3603 return None;
3604 }
3605 let mut text = String::new();
3606 let mut highlight_ranges = Vec::new();
3607 let mut name_ranges = Vec::new();
3608 let mut chunks = self.chunks(
3609 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3610 true,
3611 );
3612 let mut last_buffer_range_end = 0;
3613
3614 for (buffer_range, is_name) in buffer_ranges {
3615 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3616 if space_added {
3617 text.push(' ');
3618 }
3619 let before_append_len = text.len();
3620 let mut offset = buffer_range.start;
3621 chunks.seek(buffer_range.clone());
3622 for mut chunk in chunks.by_ref() {
3623 if chunk.text.len() > buffer_range.end - offset {
3624 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3625 offset = buffer_range.end;
3626 } else {
3627 offset += chunk.text.len();
3628 }
3629 let style = chunk
3630 .syntax_highlight_id
3631 .zip(theme)
3632 .and_then(|(highlight, theme)| highlight.style(theme));
3633 if let Some(style) = style {
3634 let start = text.len();
3635 let end = start + chunk.text.len();
3636 highlight_ranges.push((start..end, style));
3637 }
3638 text.push_str(chunk.text);
3639 if offset >= buffer_range.end {
3640 break;
3641 }
3642 }
3643 if is_name {
3644 let after_append_len = text.len();
3645 let start = if space_added && !name_ranges.is_empty() {
3646 before_append_len - 1
3647 } else {
3648 before_append_len
3649 };
3650 name_ranges.push(start..after_append_len);
3651 }
3652 last_buffer_range_end = buffer_range.end;
3653 }
3654
3655 Some(OutlineItem {
3656 depth: 0, // We'll calculate the depth later
3657 range: item_point_range,
3658 text,
3659 highlight_ranges,
3660 name_ranges,
3661 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3662 annotation_range: None,
3663 })
3664 }
3665
3666 pub fn function_body_fold_ranges<T: ToOffset>(
3667 &self,
3668 within: Range<T>,
3669 ) -> impl Iterator<Item = Range<usize>> + '_ {
3670 self.text_object_ranges(within, TreeSitterOptions::default())
3671 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3672 }
3673
3674 /// For each grammar in the language, runs the provided
3675 /// [`tree_sitter::Query`] against the given range.
3676 pub fn matches(
3677 &self,
3678 range: Range<usize>,
3679 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3680 ) -> SyntaxMapMatches {
3681 self.syntax.matches(range, self, query)
3682 }
3683
3684 pub fn all_bracket_ranges(
3685 &self,
3686 range: Range<usize>,
3687 ) -> impl Iterator<Item = BracketMatch> + '_ {
3688 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3689 grammar.brackets_config.as_ref().map(|c| &c.query)
3690 });
3691 let configs = matches
3692 .grammars()
3693 .iter()
3694 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3695 .collect::<Vec<_>>();
3696
3697 iter::from_fn(move || {
3698 while let Some(mat) = matches.peek() {
3699 let mut open = None;
3700 let mut close = None;
3701 let config = &configs[mat.grammar_index];
3702 let pattern = &config.patterns[mat.pattern_index];
3703 for capture in mat.captures {
3704 if capture.index == config.open_capture_ix {
3705 open = Some(capture.node.byte_range());
3706 } else if capture.index == config.close_capture_ix {
3707 close = Some(capture.node.byte_range());
3708 }
3709 }
3710
3711 matches.advance();
3712
3713 let Some((open_range, close_range)) = open.zip(close) else {
3714 continue;
3715 };
3716
3717 let bracket_range = open_range.start..=close_range.end;
3718 if !bracket_range.overlaps(&range) {
3719 continue;
3720 }
3721
3722 return Some(BracketMatch {
3723 open_range,
3724 close_range,
3725 newline_only: pattern.newline_only,
3726 });
3727 }
3728 None
3729 })
3730 }
3731
3732 /// Returns bracket range pairs overlapping or adjacent to `range`
3733 pub fn bracket_ranges<T: ToOffset>(
3734 &self,
3735 range: Range<T>,
3736 ) -> impl Iterator<Item = BracketMatch> + '_ {
3737 // Find bracket pairs that *inclusively* contain the given range.
3738 let range = range.start.to_offset(self).saturating_sub(1)
3739 ..self.len().min(range.end.to_offset(self) + 1);
3740 self.all_bracket_ranges(range)
3741 .filter(|pair| !pair.newline_only)
3742 }
3743
3744 pub fn text_object_ranges<T: ToOffset>(
3745 &self,
3746 range: Range<T>,
3747 options: TreeSitterOptions,
3748 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3749 let range = range.start.to_offset(self).saturating_sub(1)
3750 ..self.len().min(range.end.to_offset(self) + 1);
3751
3752 let mut matches =
3753 self.syntax
3754 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3755 grammar.text_object_config.as_ref().map(|c| &c.query)
3756 });
3757
3758 let configs = matches
3759 .grammars()
3760 .iter()
3761 .map(|grammar| grammar.text_object_config.as_ref())
3762 .collect::<Vec<_>>();
3763
3764 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3765
3766 iter::from_fn(move || {
3767 loop {
3768 while let Some(capture) = captures.pop() {
3769 if capture.0.overlaps(&range) {
3770 return Some(capture);
3771 }
3772 }
3773
3774 let mat = matches.peek()?;
3775
3776 let Some(config) = configs[mat.grammar_index].as_ref() else {
3777 matches.advance();
3778 continue;
3779 };
3780
3781 for capture in mat.captures {
3782 let Some(ix) = config
3783 .text_objects_by_capture_ix
3784 .binary_search_by_key(&capture.index, |e| e.0)
3785 .ok()
3786 else {
3787 continue;
3788 };
3789 let text_object = config.text_objects_by_capture_ix[ix].1;
3790 let byte_range = capture.node.byte_range();
3791
3792 let mut found = false;
3793 for (range, existing) in captures.iter_mut() {
3794 if existing == &text_object {
3795 range.start = range.start.min(byte_range.start);
3796 range.end = range.end.max(byte_range.end);
3797 found = true;
3798 break;
3799 }
3800 }
3801
3802 if !found {
3803 captures.push((byte_range, text_object));
3804 }
3805 }
3806
3807 matches.advance();
3808 }
3809 })
3810 }
3811
3812 /// Returns enclosing bracket ranges containing the given range
3813 pub fn enclosing_bracket_ranges<T: ToOffset>(
3814 &self,
3815 range: Range<T>,
3816 ) -> impl Iterator<Item = BracketMatch> + '_ {
3817 let range = range.start.to_offset(self)..range.end.to_offset(self);
3818
3819 self.bracket_ranges(range.clone()).filter(move |pair| {
3820 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3821 })
3822 }
3823
3824 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3825 ///
3826 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3827 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3828 &self,
3829 range: Range<T>,
3830 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3831 ) -> Option<(Range<usize>, Range<usize>)> {
3832 let range = range.start.to_offset(self)..range.end.to_offset(self);
3833
3834 // Get the ranges of the innermost pair of brackets.
3835 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3836
3837 for pair in self.enclosing_bracket_ranges(range.clone()) {
3838 if let Some(range_filter) = range_filter {
3839 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3840 continue;
3841 }
3842 }
3843
3844 let len = pair.close_range.end - pair.open_range.start;
3845
3846 if let Some((existing_open, existing_close)) = &result {
3847 let existing_len = existing_close.end - existing_open.start;
3848 if len > existing_len {
3849 continue;
3850 }
3851 }
3852
3853 result = Some((pair.open_range, pair.close_range));
3854 }
3855
3856 result
3857 }
3858
3859 /// Returns anchor ranges for any matches of the redaction query.
3860 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3861 /// will be run on the relevant section of the buffer.
3862 pub fn redacted_ranges<T: ToOffset>(
3863 &self,
3864 range: Range<T>,
3865 ) -> impl Iterator<Item = Range<usize>> + '_ {
3866 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3867 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3868 grammar
3869 .redactions_config
3870 .as_ref()
3871 .map(|config| &config.query)
3872 });
3873
3874 let configs = syntax_matches
3875 .grammars()
3876 .iter()
3877 .map(|grammar| grammar.redactions_config.as_ref())
3878 .collect::<Vec<_>>();
3879
3880 iter::from_fn(move || {
3881 let redacted_range = syntax_matches
3882 .peek()
3883 .and_then(|mat| {
3884 configs[mat.grammar_index].and_then(|config| {
3885 mat.captures
3886 .iter()
3887 .find(|capture| capture.index == config.redaction_capture_ix)
3888 })
3889 })
3890 .map(|mat| mat.node.byte_range());
3891 syntax_matches.advance();
3892 redacted_range
3893 })
3894 }
3895
3896 pub fn injections_intersecting_range<T: ToOffset>(
3897 &self,
3898 range: Range<T>,
3899 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3900 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3901
3902 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3903 grammar
3904 .injection_config
3905 .as_ref()
3906 .map(|config| &config.query)
3907 });
3908
3909 let configs = syntax_matches
3910 .grammars()
3911 .iter()
3912 .map(|grammar| grammar.injection_config.as_ref())
3913 .collect::<Vec<_>>();
3914
3915 iter::from_fn(move || {
3916 let ranges = syntax_matches.peek().and_then(|mat| {
3917 let config = &configs[mat.grammar_index]?;
3918 let content_capture_range = mat.captures.iter().find_map(|capture| {
3919 if capture.index == config.content_capture_ix {
3920 Some(capture.node.byte_range())
3921 } else {
3922 None
3923 }
3924 })?;
3925 let language = self.language_at(content_capture_range.start)?;
3926 Some((content_capture_range, language))
3927 });
3928 syntax_matches.advance();
3929 ranges
3930 })
3931 }
3932
3933 pub fn debug_variable_ranges(
3934 &self,
3935 offset_range: Range<usize>,
3936 ) -> impl Iterator<Item = DebugVariableRanges> + '_ {
3937 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3938 grammar
3939 .debug_variables_config
3940 .as_ref()
3941 .map(|config| &config.query)
3942 });
3943
3944 let configs = syntax_matches
3945 .grammars()
3946 .iter()
3947 .map(|grammar| grammar.debug_variables_config.as_ref())
3948 .collect::<Vec<_>>();
3949
3950 iter::from_fn(move || {
3951 loop {
3952 let mat = syntax_matches.peek()?;
3953
3954 let variable_ranges = configs[mat.grammar_index].and_then(|config| {
3955 let full_range = mat.captures.iter().fold(
3956 Range {
3957 start: usize::MAX,
3958 end: 0,
3959 },
3960 |mut acc, next| {
3961 let byte_range = next.node.byte_range();
3962 if acc.start > byte_range.start {
3963 acc.start = byte_range.start;
3964 }
3965 if acc.end < byte_range.end {
3966 acc.end = byte_range.end;
3967 }
3968 acc
3969 },
3970 );
3971 if full_range.start > full_range.end {
3972 // We did not find a full spanning range of this match.
3973 return None;
3974 }
3975
3976 let captures = mat.captures.iter().filter_map(|capture| {
3977 Some((
3978 capture,
3979 config.captures.get(capture.index as usize).cloned()?,
3980 ))
3981 });
3982
3983 let mut variable_range = None;
3984 for (query, capture) in captures {
3985 if let DebugVariableCapture::Variable = capture {
3986 let _ = variable_range.insert(query.node.byte_range());
3987 }
3988 }
3989
3990 Some(DebugVariableRanges {
3991 buffer_id: self.remote_id(),
3992 range: variable_range?,
3993 })
3994 });
3995
3996 syntax_matches.advance();
3997 if variable_ranges.is_some() {
3998 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3999 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4000 return variable_ranges;
4001 }
4002 }
4003 })
4004 }
4005
4006 pub fn runnable_ranges(
4007 &self,
4008 offset_range: Range<usize>,
4009 ) -> impl Iterator<Item = RunnableRange> + '_ {
4010 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4011 grammar.runnable_config.as_ref().map(|config| &config.query)
4012 });
4013
4014 let test_configs = syntax_matches
4015 .grammars()
4016 .iter()
4017 .map(|grammar| grammar.runnable_config.as_ref())
4018 .collect::<Vec<_>>();
4019
4020 iter::from_fn(move || {
4021 loop {
4022 let mat = syntax_matches.peek()?;
4023
4024 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4025 let mut run_range = None;
4026 let full_range = mat.captures.iter().fold(
4027 Range {
4028 start: usize::MAX,
4029 end: 0,
4030 },
4031 |mut acc, next| {
4032 let byte_range = next.node.byte_range();
4033 if acc.start > byte_range.start {
4034 acc.start = byte_range.start;
4035 }
4036 if acc.end < byte_range.end {
4037 acc.end = byte_range.end;
4038 }
4039 acc
4040 },
4041 );
4042 if full_range.start > full_range.end {
4043 // We did not find a full spanning range of this match.
4044 return None;
4045 }
4046 let extra_captures: SmallVec<[_; 1]> =
4047 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4048 test_configs
4049 .extra_captures
4050 .get(capture.index as usize)
4051 .cloned()
4052 .and_then(|tag_name| match tag_name {
4053 RunnableCapture::Named(name) => {
4054 Some((capture.node.byte_range(), name))
4055 }
4056 RunnableCapture::Run => {
4057 let _ = run_range.insert(capture.node.byte_range());
4058 None
4059 }
4060 })
4061 }));
4062 let run_range = run_range?;
4063 let tags = test_configs
4064 .query
4065 .property_settings(mat.pattern_index)
4066 .iter()
4067 .filter_map(|property| {
4068 if *property.key == *"tag" {
4069 property
4070 .value
4071 .as_ref()
4072 .map(|value| RunnableTag(value.to_string().into()))
4073 } else {
4074 None
4075 }
4076 })
4077 .collect();
4078 let extra_captures = extra_captures
4079 .into_iter()
4080 .map(|(range, name)| {
4081 (
4082 name.to_string(),
4083 self.text_for_range(range.clone()).collect::<String>(),
4084 )
4085 })
4086 .collect();
4087 // All tags should have the same range.
4088 Some(RunnableRange {
4089 run_range,
4090 full_range,
4091 runnable: Runnable {
4092 tags,
4093 language: mat.language,
4094 buffer: self.remote_id(),
4095 },
4096 extra_captures,
4097 buffer_id: self.remote_id(),
4098 })
4099 });
4100
4101 syntax_matches.advance();
4102 if test_range.is_some() {
4103 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4104 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4105 return test_range;
4106 }
4107 }
4108 })
4109 }
4110
4111 /// Returns selections for remote peers intersecting the given range.
4112 #[allow(clippy::type_complexity)]
4113 pub fn selections_in_range(
4114 &self,
4115 range: Range<Anchor>,
4116 include_local: bool,
4117 ) -> impl Iterator<
4118 Item = (
4119 ReplicaId,
4120 bool,
4121 CursorShape,
4122 impl Iterator<Item = &Selection<Anchor>> + '_,
4123 ),
4124 > + '_ {
4125 self.remote_selections
4126 .iter()
4127 .filter(move |(replica_id, set)| {
4128 (include_local || **replica_id != self.text.replica_id())
4129 && !set.selections.is_empty()
4130 })
4131 .map(move |(replica_id, set)| {
4132 let start_ix = match set.selections.binary_search_by(|probe| {
4133 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4134 }) {
4135 Ok(ix) | Err(ix) => ix,
4136 };
4137 let end_ix = match set.selections.binary_search_by(|probe| {
4138 probe.start.cmp(&range.end, self).then(Ordering::Less)
4139 }) {
4140 Ok(ix) | Err(ix) => ix,
4141 };
4142
4143 (
4144 *replica_id,
4145 set.line_mode,
4146 set.cursor_shape,
4147 set.selections[start_ix..end_ix].iter(),
4148 )
4149 })
4150 }
4151
4152 /// Returns if the buffer contains any diagnostics.
4153 pub fn has_diagnostics(&self) -> bool {
4154 !self.diagnostics.is_empty()
4155 }
4156
4157 /// Returns all the diagnostics intersecting the given range.
4158 pub fn diagnostics_in_range<'a, T, O>(
4159 &'a self,
4160 search_range: Range<T>,
4161 reversed: bool,
4162 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4163 where
4164 T: 'a + Clone + ToOffset,
4165 O: 'a + FromAnchor,
4166 {
4167 let mut iterators: Vec<_> = self
4168 .diagnostics
4169 .iter()
4170 .map(|(_, collection)| {
4171 collection
4172 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4173 .peekable()
4174 })
4175 .collect();
4176
4177 std::iter::from_fn(move || {
4178 let (next_ix, _) = iterators
4179 .iter_mut()
4180 .enumerate()
4181 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4182 .min_by(|(_, a), (_, b)| {
4183 let cmp = a
4184 .range
4185 .start
4186 .cmp(&b.range.start, self)
4187 // when range is equal, sort by diagnostic severity
4188 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4189 // and stabilize order with group_id
4190 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4191 if reversed { cmp.reverse() } else { cmp }
4192 })?;
4193 iterators[next_ix]
4194 .next()
4195 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4196 diagnostic,
4197 range: FromAnchor::from_anchor(&range.start, self)
4198 ..FromAnchor::from_anchor(&range.end, self),
4199 })
4200 })
4201 }
4202
4203 /// Returns all the diagnostic groups associated with the given
4204 /// language server ID. If no language server ID is provided,
4205 /// all diagnostics groups are returned.
4206 pub fn diagnostic_groups(
4207 &self,
4208 language_server_id: Option<LanguageServerId>,
4209 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4210 let mut groups = Vec::new();
4211
4212 if let Some(language_server_id) = language_server_id {
4213 if let Ok(ix) = self
4214 .diagnostics
4215 .binary_search_by_key(&language_server_id, |e| e.0)
4216 {
4217 self.diagnostics[ix]
4218 .1
4219 .groups(language_server_id, &mut groups, self);
4220 }
4221 } else {
4222 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4223 diagnostics.groups(*language_server_id, &mut groups, self);
4224 }
4225 }
4226
4227 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4228 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4229 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4230 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4231 });
4232
4233 groups
4234 }
4235
4236 /// Returns an iterator over the diagnostics for the given group.
4237 pub fn diagnostic_group<O>(
4238 &self,
4239 group_id: usize,
4240 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4241 where
4242 O: FromAnchor + 'static,
4243 {
4244 self.diagnostics
4245 .iter()
4246 .flat_map(move |(_, set)| set.group(group_id, self))
4247 }
4248
4249 /// An integer version number that accounts for all updates besides
4250 /// the buffer's text itself (which is versioned via a version vector).
4251 pub fn non_text_state_update_count(&self) -> usize {
4252 self.non_text_state_update_count
4253 }
4254
4255 /// Returns a snapshot of underlying file.
4256 pub fn file(&self) -> Option<&Arc<dyn File>> {
4257 self.file.as_ref()
4258 }
4259
4260 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4261 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4262 if let Some(file) = self.file() {
4263 if file.path().file_name().is_none() || include_root {
4264 Some(file.full_path(cx))
4265 } else {
4266 Some(file.path().to_path_buf())
4267 }
4268 } else {
4269 None
4270 }
4271 }
4272
4273 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4274 let query_str = query.fuzzy_contents;
4275 if query_str.map_or(false, |query| query.is_empty()) {
4276 return BTreeMap::default();
4277 }
4278
4279 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4280 language,
4281 override_id: None,
4282 }));
4283
4284 let mut query_ix = 0;
4285 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4286 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4287
4288 let mut words = BTreeMap::default();
4289 let mut current_word_start_ix = None;
4290 let mut chunk_ix = query.range.start;
4291 for chunk in self.chunks(query.range, false) {
4292 for (i, c) in chunk.text.char_indices() {
4293 let ix = chunk_ix + i;
4294 if classifier.is_word(c) {
4295 if current_word_start_ix.is_none() {
4296 current_word_start_ix = Some(ix);
4297 }
4298
4299 if let Some(query_chars) = &query_chars {
4300 if query_ix < query_len {
4301 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4302 query_ix += 1;
4303 }
4304 }
4305 }
4306 continue;
4307 } else if let Some(word_start) = current_word_start_ix.take() {
4308 if query_ix == query_len {
4309 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4310 let mut word_text = self.text_for_range(word_start..ix).peekable();
4311 let first_char = word_text
4312 .peek()
4313 .and_then(|first_chunk| first_chunk.chars().next());
4314 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4315 if !query.skip_digits
4316 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4317 {
4318 words.insert(word_text.collect(), word_range);
4319 }
4320 }
4321 }
4322 query_ix = 0;
4323 }
4324 chunk_ix += chunk.text.len();
4325 }
4326
4327 words
4328 }
4329}
4330
4331pub struct WordsQuery<'a> {
4332 /// Only returns words with all chars from the fuzzy string in them.
4333 pub fuzzy_contents: Option<&'a str>,
4334 /// Skips words that start with a digit.
4335 pub skip_digits: bool,
4336 /// Buffer offset range, to look for words.
4337 pub range: Range<usize>,
4338}
4339
4340fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4341 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4342}
4343
4344fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4345 let mut result = IndentSize::spaces(0);
4346 for c in text {
4347 let kind = match c {
4348 ' ' => IndentKind::Space,
4349 '\t' => IndentKind::Tab,
4350 _ => break,
4351 };
4352 if result.len == 0 {
4353 result.kind = kind;
4354 }
4355 result.len += 1;
4356 }
4357 result
4358}
4359
4360impl Clone for BufferSnapshot {
4361 fn clone(&self) -> Self {
4362 Self {
4363 text: self.text.clone(),
4364 syntax: self.syntax.clone(),
4365 file: self.file.clone(),
4366 remote_selections: self.remote_selections.clone(),
4367 diagnostics: self.diagnostics.clone(),
4368 language: self.language.clone(),
4369 non_text_state_update_count: self.non_text_state_update_count,
4370 }
4371 }
4372}
4373
4374impl Deref for BufferSnapshot {
4375 type Target = text::BufferSnapshot;
4376
4377 fn deref(&self) -> &Self::Target {
4378 &self.text
4379 }
4380}
4381
4382unsafe impl Send for BufferChunks<'_> {}
4383
4384impl<'a> BufferChunks<'a> {
4385 pub(crate) fn new(
4386 text: &'a Rope,
4387 range: Range<usize>,
4388 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4389 diagnostics: bool,
4390 buffer_snapshot: Option<&'a BufferSnapshot>,
4391 ) -> Self {
4392 let mut highlights = None;
4393 if let Some((captures, highlight_maps)) = syntax {
4394 highlights = Some(BufferChunkHighlights {
4395 captures,
4396 next_capture: None,
4397 stack: Default::default(),
4398 highlight_maps,
4399 })
4400 }
4401
4402 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4403 let chunks = text.chunks_in_range(range.clone());
4404
4405 let mut this = BufferChunks {
4406 range,
4407 buffer_snapshot,
4408 chunks,
4409 diagnostic_endpoints,
4410 error_depth: 0,
4411 warning_depth: 0,
4412 information_depth: 0,
4413 hint_depth: 0,
4414 unnecessary_depth: 0,
4415 highlights,
4416 };
4417 this.initialize_diagnostic_endpoints();
4418 this
4419 }
4420
4421 /// Seeks to the given byte offset in the buffer.
4422 pub fn seek(&mut self, range: Range<usize>) {
4423 let old_range = std::mem::replace(&mut self.range, range.clone());
4424 self.chunks.set_range(self.range.clone());
4425 if let Some(highlights) = self.highlights.as_mut() {
4426 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4427 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4428 highlights
4429 .stack
4430 .retain(|(end_offset, _)| *end_offset > range.start);
4431 if let Some(capture) = &highlights.next_capture {
4432 if range.start >= capture.node.start_byte() {
4433 let next_capture_end = capture.node.end_byte();
4434 if range.start < next_capture_end {
4435 highlights.stack.push((
4436 next_capture_end,
4437 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4438 ));
4439 }
4440 highlights.next_capture.take();
4441 }
4442 }
4443 } else if let Some(snapshot) = self.buffer_snapshot {
4444 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4445 *highlights = BufferChunkHighlights {
4446 captures,
4447 next_capture: None,
4448 stack: Default::default(),
4449 highlight_maps,
4450 };
4451 } else {
4452 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4453 // Seeking such BufferChunks is not supported.
4454 debug_assert!(
4455 false,
4456 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4457 );
4458 }
4459
4460 highlights.captures.set_byte_range(self.range.clone());
4461 self.initialize_diagnostic_endpoints();
4462 }
4463 }
4464
4465 fn initialize_diagnostic_endpoints(&mut self) {
4466 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4467 if let Some(buffer) = self.buffer_snapshot {
4468 let mut diagnostic_endpoints = Vec::new();
4469 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4470 diagnostic_endpoints.push(DiagnosticEndpoint {
4471 offset: entry.range.start,
4472 is_start: true,
4473 severity: entry.diagnostic.severity,
4474 is_unnecessary: entry.diagnostic.is_unnecessary,
4475 });
4476 diagnostic_endpoints.push(DiagnosticEndpoint {
4477 offset: entry.range.end,
4478 is_start: false,
4479 severity: entry.diagnostic.severity,
4480 is_unnecessary: entry.diagnostic.is_unnecessary,
4481 });
4482 }
4483 diagnostic_endpoints
4484 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4485 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4486 self.hint_depth = 0;
4487 self.error_depth = 0;
4488 self.warning_depth = 0;
4489 self.information_depth = 0;
4490 }
4491 }
4492 }
4493
4494 /// The current byte offset in the buffer.
4495 pub fn offset(&self) -> usize {
4496 self.range.start
4497 }
4498
4499 pub fn range(&self) -> Range<usize> {
4500 self.range.clone()
4501 }
4502
4503 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4504 let depth = match endpoint.severity {
4505 DiagnosticSeverity::ERROR => &mut self.error_depth,
4506 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4507 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4508 DiagnosticSeverity::HINT => &mut self.hint_depth,
4509 _ => return,
4510 };
4511 if endpoint.is_start {
4512 *depth += 1;
4513 } else {
4514 *depth -= 1;
4515 }
4516
4517 if endpoint.is_unnecessary {
4518 if endpoint.is_start {
4519 self.unnecessary_depth += 1;
4520 } else {
4521 self.unnecessary_depth -= 1;
4522 }
4523 }
4524 }
4525
4526 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4527 if self.error_depth > 0 {
4528 Some(DiagnosticSeverity::ERROR)
4529 } else if self.warning_depth > 0 {
4530 Some(DiagnosticSeverity::WARNING)
4531 } else if self.information_depth > 0 {
4532 Some(DiagnosticSeverity::INFORMATION)
4533 } else if self.hint_depth > 0 {
4534 Some(DiagnosticSeverity::HINT)
4535 } else {
4536 None
4537 }
4538 }
4539
4540 fn current_code_is_unnecessary(&self) -> bool {
4541 self.unnecessary_depth > 0
4542 }
4543}
4544
4545impl<'a> Iterator for BufferChunks<'a> {
4546 type Item = Chunk<'a>;
4547
4548 fn next(&mut self) -> Option<Self::Item> {
4549 let mut next_capture_start = usize::MAX;
4550 let mut next_diagnostic_endpoint = usize::MAX;
4551
4552 if let Some(highlights) = self.highlights.as_mut() {
4553 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4554 if *parent_capture_end <= self.range.start {
4555 highlights.stack.pop();
4556 } else {
4557 break;
4558 }
4559 }
4560
4561 if highlights.next_capture.is_none() {
4562 highlights.next_capture = highlights.captures.next();
4563 }
4564
4565 while let Some(capture) = highlights.next_capture.as_ref() {
4566 if self.range.start < capture.node.start_byte() {
4567 next_capture_start = capture.node.start_byte();
4568 break;
4569 } else {
4570 let highlight_id =
4571 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4572 highlights
4573 .stack
4574 .push((capture.node.end_byte(), highlight_id));
4575 highlights.next_capture = highlights.captures.next();
4576 }
4577 }
4578 }
4579
4580 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4581 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4582 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4583 if endpoint.offset <= self.range.start {
4584 self.update_diagnostic_depths(endpoint);
4585 diagnostic_endpoints.next();
4586 } else {
4587 next_diagnostic_endpoint = endpoint.offset;
4588 break;
4589 }
4590 }
4591 }
4592 self.diagnostic_endpoints = diagnostic_endpoints;
4593
4594 if let Some(chunk) = self.chunks.peek() {
4595 let chunk_start = self.range.start;
4596 let mut chunk_end = (self.chunks.offset() + chunk.len())
4597 .min(next_capture_start)
4598 .min(next_diagnostic_endpoint);
4599 let mut highlight_id = None;
4600 if let Some(highlights) = self.highlights.as_ref() {
4601 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4602 chunk_end = chunk_end.min(*parent_capture_end);
4603 highlight_id = Some(*parent_highlight_id);
4604 }
4605 }
4606
4607 let slice =
4608 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4609 self.range.start = chunk_end;
4610 if self.range.start == self.chunks.offset() + chunk.len() {
4611 self.chunks.next().unwrap();
4612 }
4613
4614 Some(Chunk {
4615 text: slice,
4616 syntax_highlight_id: highlight_id,
4617 diagnostic_severity: self.current_diagnostic_severity(),
4618 is_unnecessary: self.current_code_is_unnecessary(),
4619 ..Default::default()
4620 })
4621 } else {
4622 None
4623 }
4624 }
4625}
4626
4627impl operation_queue::Operation for Operation {
4628 fn lamport_timestamp(&self) -> clock::Lamport {
4629 match self {
4630 Operation::Buffer(_) => {
4631 unreachable!("buffer operations should never be deferred at this layer")
4632 }
4633 Operation::UpdateDiagnostics {
4634 lamport_timestamp, ..
4635 }
4636 | Operation::UpdateSelections {
4637 lamport_timestamp, ..
4638 }
4639 | Operation::UpdateCompletionTriggers {
4640 lamport_timestamp, ..
4641 } => *lamport_timestamp,
4642 }
4643 }
4644}
4645
4646impl Default for Diagnostic {
4647 fn default() -> Self {
4648 Self {
4649 source: Default::default(),
4650 code: None,
4651 code_description: None,
4652 severity: DiagnosticSeverity::ERROR,
4653 message: Default::default(),
4654 markdown: None,
4655 group_id: 0,
4656 is_primary: false,
4657 is_disk_based: false,
4658 is_unnecessary: false,
4659 data: None,
4660 }
4661 }
4662}
4663
4664impl IndentSize {
4665 /// Returns an [`IndentSize`] representing the given spaces.
4666 pub fn spaces(len: u32) -> Self {
4667 Self {
4668 len,
4669 kind: IndentKind::Space,
4670 }
4671 }
4672
4673 /// Returns an [`IndentSize`] representing a tab.
4674 pub fn tab() -> Self {
4675 Self {
4676 len: 1,
4677 kind: IndentKind::Tab,
4678 }
4679 }
4680
4681 /// An iterator over the characters represented by this [`IndentSize`].
4682 pub fn chars(&self) -> impl Iterator<Item = char> {
4683 iter::repeat(self.char()).take(self.len as usize)
4684 }
4685
4686 /// The character representation of this [`IndentSize`].
4687 pub fn char(&self) -> char {
4688 match self.kind {
4689 IndentKind::Space => ' ',
4690 IndentKind::Tab => '\t',
4691 }
4692 }
4693
4694 /// Consumes the current [`IndentSize`] and returns a new one that has
4695 /// been shrunk or enlarged by the given size along the given direction.
4696 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4697 match direction {
4698 Ordering::Less => {
4699 if self.kind == size.kind && self.len >= size.len {
4700 self.len -= size.len;
4701 }
4702 }
4703 Ordering::Equal => {}
4704 Ordering::Greater => {
4705 if self.len == 0 {
4706 self = size;
4707 } else if self.kind == size.kind {
4708 self.len += size.len;
4709 }
4710 }
4711 }
4712 self
4713 }
4714
4715 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4716 match self.kind {
4717 IndentKind::Space => self.len as usize,
4718 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4719 }
4720 }
4721}
4722
4723#[cfg(any(test, feature = "test-support"))]
4724pub struct TestFile {
4725 pub path: Arc<Path>,
4726 pub root_name: String,
4727 pub local_root: Option<PathBuf>,
4728}
4729
4730#[cfg(any(test, feature = "test-support"))]
4731impl File for TestFile {
4732 fn path(&self) -> &Arc<Path> {
4733 &self.path
4734 }
4735
4736 fn full_path(&self, _: &gpui::App) -> PathBuf {
4737 PathBuf::from(&self.root_name).join(self.path.as_ref())
4738 }
4739
4740 fn as_local(&self) -> Option<&dyn LocalFile> {
4741 if self.local_root.is_some() {
4742 Some(self)
4743 } else {
4744 None
4745 }
4746 }
4747
4748 fn disk_state(&self) -> DiskState {
4749 unimplemented!()
4750 }
4751
4752 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4753 self.path().file_name().unwrap_or(self.root_name.as_ref())
4754 }
4755
4756 fn worktree_id(&self, _: &App) -> WorktreeId {
4757 WorktreeId::from_usize(0)
4758 }
4759
4760 fn to_proto(&self, _: &App) -> rpc::proto::File {
4761 unimplemented!()
4762 }
4763
4764 fn is_private(&self) -> bool {
4765 false
4766 }
4767}
4768
4769#[cfg(any(test, feature = "test-support"))]
4770impl LocalFile for TestFile {
4771 fn abs_path(&self, _cx: &App) -> PathBuf {
4772 PathBuf::from(self.local_root.as_ref().unwrap())
4773 .join(&self.root_name)
4774 .join(self.path.as_ref())
4775 }
4776
4777 fn load(&self, _cx: &App) -> Task<Result<String>> {
4778 unimplemented!()
4779 }
4780
4781 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4782 unimplemented!()
4783 }
4784}
4785
4786pub(crate) fn contiguous_ranges(
4787 values: impl Iterator<Item = u32>,
4788 max_len: usize,
4789) -> impl Iterator<Item = Range<u32>> {
4790 let mut values = values;
4791 let mut current_range: Option<Range<u32>> = None;
4792 std::iter::from_fn(move || {
4793 loop {
4794 if let Some(value) = values.next() {
4795 if let Some(range) = &mut current_range {
4796 if value == range.end && range.len() < max_len {
4797 range.end += 1;
4798 continue;
4799 }
4800 }
4801
4802 let prev_range = current_range.clone();
4803 current_range = Some(value..(value + 1));
4804 if prev_range.is_some() {
4805 return prev_range;
4806 }
4807 } else {
4808 return current_range.take();
4809 }
4810 }
4811 })
4812}
4813
4814#[derive(Default, Debug)]
4815pub struct CharClassifier {
4816 scope: Option<LanguageScope>,
4817 for_completion: bool,
4818 ignore_punctuation: bool,
4819}
4820
4821impl CharClassifier {
4822 pub fn new(scope: Option<LanguageScope>) -> Self {
4823 Self {
4824 scope,
4825 for_completion: false,
4826 ignore_punctuation: false,
4827 }
4828 }
4829
4830 pub fn for_completion(self, for_completion: bool) -> Self {
4831 Self {
4832 for_completion,
4833 ..self
4834 }
4835 }
4836
4837 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4838 Self {
4839 ignore_punctuation,
4840 ..self
4841 }
4842 }
4843
4844 pub fn is_whitespace(&self, c: char) -> bool {
4845 self.kind(c) == CharKind::Whitespace
4846 }
4847
4848 pub fn is_word(&self, c: char) -> bool {
4849 self.kind(c) == CharKind::Word
4850 }
4851
4852 pub fn is_punctuation(&self, c: char) -> bool {
4853 self.kind(c) == CharKind::Punctuation
4854 }
4855
4856 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4857 if c.is_alphanumeric() || c == '_' {
4858 return CharKind::Word;
4859 }
4860
4861 if let Some(scope) = &self.scope {
4862 let characters = if self.for_completion {
4863 scope.completion_query_characters()
4864 } else {
4865 scope.word_characters()
4866 };
4867 if let Some(characters) = characters {
4868 if characters.contains(&c) {
4869 return CharKind::Word;
4870 }
4871 }
4872 }
4873
4874 if c.is_whitespace() {
4875 return CharKind::Whitespace;
4876 }
4877
4878 if ignore_punctuation {
4879 CharKind::Word
4880 } else {
4881 CharKind::Punctuation
4882 }
4883 }
4884
4885 pub fn kind(&self, c: char) -> CharKind {
4886 self.kind_with(c, self.ignore_punctuation)
4887 }
4888}
4889
4890/// Find all of the ranges of whitespace that occur at the ends of lines
4891/// in the given rope.
4892///
4893/// This could also be done with a regex search, but this implementation
4894/// avoids copying text.
4895pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4896 let mut ranges = Vec::new();
4897
4898 let mut offset = 0;
4899 let mut prev_chunk_trailing_whitespace_range = 0..0;
4900 for chunk in rope.chunks() {
4901 let mut prev_line_trailing_whitespace_range = 0..0;
4902 for (i, line) in chunk.split('\n').enumerate() {
4903 let line_end_offset = offset + line.len();
4904 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4905 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4906
4907 if i == 0 && trimmed_line_len == 0 {
4908 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4909 }
4910 if !prev_line_trailing_whitespace_range.is_empty() {
4911 ranges.push(prev_line_trailing_whitespace_range);
4912 }
4913
4914 offset = line_end_offset + 1;
4915 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4916 }
4917
4918 offset -= 1;
4919 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4920 }
4921
4922 if !prev_chunk_trailing_whitespace_range.is_empty() {
4923 ranges.push(prev_chunk_trailing_whitespace_range);
4924 }
4925
4926 ranges
4927}