1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
233 pub data: Option<Value>,
234 /// Whether to underline the corresponding text range in the editor.
235 pub underline: bool,
236}
237
238/// An operation used to synchronize this buffer with its other replicas.
239#[derive(Clone, Debug, PartialEq)]
240pub enum Operation {
241 /// A text operation.
242 Buffer(text::Operation),
243
244 /// An update to the buffer's diagnostics.
245 UpdateDiagnostics {
246 /// The id of the language server that produced the new diagnostics.
247 server_id: LanguageServerId,
248 /// The diagnostics.
249 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
250 /// The buffer's lamport timestamp.
251 lamport_timestamp: clock::Lamport,
252 },
253
254 /// An update to the most recent selections in this buffer.
255 UpdateSelections {
256 /// The selections.
257 selections: Arc<[Selection<Anchor>]>,
258 /// The buffer's lamport timestamp.
259 lamport_timestamp: clock::Lamport,
260 /// Whether the selections are in 'line mode'.
261 line_mode: bool,
262 /// The [`CursorShape`] associated with these selections.
263 cursor_shape: CursorShape,
264 },
265
266 /// An update to the characters that should trigger autocompletion
267 /// for this buffer.
268 UpdateCompletionTriggers {
269 /// The characters that trigger autocompletion.
270 triggers: Vec<String>,
271 /// The buffer's lamport timestamp.
272 lamport_timestamp: clock::Lamport,
273 /// The language server ID.
274 server_id: LanguageServerId,
275 },
276}
277
278/// An event that occurs in a buffer.
279#[derive(Clone, Debug, PartialEq)]
280pub enum BufferEvent {
281 /// The buffer was changed in a way that must be
282 /// propagated to its other replicas.
283 Operation {
284 operation: Operation,
285 is_local: bool,
286 },
287 /// The buffer was edited.
288 Edited,
289 /// The buffer's `dirty` bit changed.
290 DirtyChanged,
291 /// The buffer was saved.
292 Saved,
293 /// The buffer's file was changed on disk.
294 FileHandleChanged,
295 /// The buffer was reloaded.
296 Reloaded,
297 /// The buffer is in need of a reload
298 ReloadNeeded,
299 /// The buffer's language was changed.
300 LanguageChanged,
301 /// The buffer's syntax trees were updated.
302 Reparsed,
303 /// The buffer's diagnostics were updated.
304 DiagnosticsUpdated,
305 /// The buffer gained or lost editing capabilities.
306 CapabilityChanged,
307 /// The buffer was explicitly requested to close.
308 Closed,
309 /// The buffer was discarded when closing.
310 Discarded,
311}
312
313/// The file associated with a buffer.
314pub trait File: Send + Sync + Any {
315 /// Returns the [`LocalFile`] associated with this file, if the
316 /// file is local.
317 fn as_local(&self) -> Option<&dyn LocalFile>;
318
319 /// Returns whether this file is local.
320 fn is_local(&self) -> bool {
321 self.as_local().is_some()
322 }
323
324 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
325 /// only available in some states, such as modification time.
326 fn disk_state(&self) -> DiskState;
327
328 /// Returns the path of this file relative to the worktree's root directory.
329 fn path(&self) -> &Arc<Path>;
330
331 /// Returns the path of this file relative to the worktree's parent directory (this means it
332 /// includes the name of the worktree's root folder).
333 fn full_path(&self, cx: &App) -> PathBuf;
334
335 /// Returns the last component of this handle's absolute path. If this handle refers to the root
336 /// of its worktree, then this method will return the name of the worktree itself.
337 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
338
339 /// Returns the id of the worktree to which this file belongs.
340 ///
341 /// This is needed for looking up project-specific settings.
342 fn worktree_id(&self, cx: &App) -> WorktreeId;
343
344 /// Converts this file into a protobuf message.
345 fn to_proto(&self, cx: &App) -> rpc::proto::File;
346
347 /// Return whether Zed considers this to be a private file.
348 fn is_private(&self) -> bool;
349}
350
351/// The file's storage status - whether it's stored (`Present`), and if so when it was last
352/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
353/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
354/// indicator for new files.
355#[derive(Copy, Clone, Debug, PartialEq)]
356pub enum DiskState {
357 /// File created in Zed that has not been saved.
358 New,
359 /// File present on the filesystem.
360 Present { mtime: MTime },
361 /// Deleted file that was previously present.
362 Deleted,
363}
364
365impl DiskState {
366 /// Returns the file's last known modification time on disk.
367 pub fn mtime(self) -> Option<MTime> {
368 match self {
369 DiskState::New => None,
370 DiskState::Present { mtime } => Some(mtime),
371 DiskState::Deleted => None,
372 }
373 }
374
375 pub fn exists(&self) -> bool {
376 match self {
377 DiskState::New => false,
378 DiskState::Present { .. } => true,
379 DiskState::Deleted => false,
380 }
381 }
382}
383
384/// The file associated with a buffer, in the case where the file is on the local disk.
385pub trait LocalFile: File {
386 /// Returns the absolute path of this file
387 fn abs_path(&self, cx: &App) -> PathBuf;
388
389 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
390 fn load(&self, cx: &App) -> Task<Result<String>>;
391
392 /// Loads the file's contents from disk.
393 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
394}
395
396/// The auto-indent behavior associated with an editing operation.
397/// For some editing operations, each affected line of text has its
398/// indentation recomputed. For other operations, the entire block
399/// of edited text is adjusted uniformly.
400#[derive(Clone, Debug)]
401pub enum AutoindentMode {
402 /// Indent each line of inserted text.
403 EachLine,
404 /// Apply the same indentation adjustment to all of the lines
405 /// in a given insertion.
406 Block {
407 /// The original indentation column of the first line of each
408 /// insertion, if it has been copied.
409 ///
410 /// Knowing this makes it possible to preserve the relative indentation
411 /// of every line in the insertion from when it was copied.
412 ///
413 /// If the original indent column is `a`, and the first line of insertion
414 /// is then auto-indented to column `b`, then every other line of
415 /// the insertion will be auto-indented to column `b - a`
416 original_indent_columns: Vec<Option<u32>>,
417 },
418}
419
420#[derive(Clone)]
421struct AutoindentRequest {
422 before_edit: BufferSnapshot,
423 entries: Vec<AutoindentRequestEntry>,
424 is_block_mode: bool,
425 ignore_empty_lines: bool,
426}
427
428#[derive(Debug, Clone)]
429struct AutoindentRequestEntry {
430 /// A range of the buffer whose indentation should be adjusted.
431 range: Range<Anchor>,
432 /// Whether or not these lines should be considered brand new, for the
433 /// purpose of auto-indent. When text is not new, its indentation will
434 /// only be adjusted if the suggested indentation level has *changed*
435 /// since the edit was made.
436 first_line_is_new: bool,
437 indent_size: IndentSize,
438 original_indent_column: Option<u32>,
439}
440
441#[derive(Debug)]
442struct IndentSuggestion {
443 basis_row: u32,
444 delta: Ordering,
445 within_error: bool,
446}
447
448struct BufferChunkHighlights<'a> {
449 captures: SyntaxMapCaptures<'a>,
450 next_capture: Option<SyntaxMapCapture<'a>>,
451 stack: Vec<(usize, HighlightId)>,
452 highlight_maps: Vec<HighlightMap>,
453}
454
455/// An iterator that yields chunks of a buffer's text, along with their
456/// syntax highlights and diagnostic status.
457pub struct BufferChunks<'a> {
458 buffer_snapshot: Option<&'a BufferSnapshot>,
459 range: Range<usize>,
460 chunks: text::Chunks<'a>,
461 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
462 error_depth: usize,
463 warning_depth: usize,
464 information_depth: usize,
465 hint_depth: usize,
466 unnecessary_depth: usize,
467 underline: bool,
468 highlights: Option<BufferChunkHighlights<'a>>,
469}
470
471/// A chunk of a buffer's text, along with its syntax highlight and
472/// diagnostic status.
473#[derive(Clone, Debug, Default)]
474pub struct Chunk<'a> {
475 /// The text of the chunk.
476 pub text: &'a str,
477 /// The syntax highlighting style of the chunk.
478 pub syntax_highlight_id: Option<HighlightId>,
479 /// The highlight style that has been applied to this chunk in
480 /// the editor.
481 pub highlight_style: Option<HighlightStyle>,
482 /// The severity of diagnostic associated with this chunk, if any.
483 pub diagnostic_severity: Option<DiagnosticSeverity>,
484 /// Whether this chunk of text is marked as unnecessary.
485 pub is_unnecessary: bool,
486 /// Whether this chunk of text was originally a tab character.
487 pub is_tab: bool,
488 /// Whether to underline the corresponding text range in the editor.
489 pub underline: bool,
490}
491
492/// A set of edits to a given version of a buffer, computed asynchronously.
493#[derive(Debug)]
494pub struct Diff {
495 pub base_version: clock::Global,
496 pub line_ending: LineEnding,
497 pub edits: Vec<(Range<usize>, Arc<str>)>,
498}
499
500#[derive(Debug, Clone, Copy)]
501pub(crate) struct DiagnosticEndpoint {
502 offset: usize,
503 is_start: bool,
504 underline: bool,
505 severity: DiagnosticSeverity,
506 is_unnecessary: bool,
507}
508
509/// A class of characters, used for characterizing a run of text.
510#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
511pub enum CharKind {
512 /// Whitespace.
513 Whitespace,
514 /// Punctuation.
515 Punctuation,
516 /// Word.
517 Word,
518}
519
520/// A runnable is a set of data about a region that could be resolved into a task
521pub struct Runnable {
522 pub tags: SmallVec<[RunnableTag; 1]>,
523 pub language: Arc<Language>,
524 pub buffer: BufferId,
525}
526
527#[derive(Default, Clone, Debug)]
528pub struct HighlightedText {
529 pub text: SharedString,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533#[derive(Default, Debug)]
534struct HighlightedTextBuilder {
535 pub text: String,
536 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
537}
538
539impl HighlightedText {
540 pub fn from_buffer_range<T: ToOffset>(
541 range: Range<T>,
542 snapshot: &text::BufferSnapshot,
543 syntax_snapshot: &SyntaxSnapshot,
544 override_style: Option<HighlightStyle>,
545 syntax_theme: &SyntaxTheme,
546 ) -> Self {
547 let mut highlighted_text = HighlightedTextBuilder::default();
548 highlighted_text.add_text_from_buffer_range(
549 range,
550 snapshot,
551 syntax_snapshot,
552 override_style,
553 syntax_theme,
554 );
555 highlighted_text.build()
556 }
557
558 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
559 gpui::StyledText::new(self.text.clone())
560 .with_default_highlights(default_style, self.highlights.iter().cloned())
561 }
562
563 /// Returns the first line without leading whitespace unless highlighted
564 /// and a boolean indicating if there are more lines after
565 pub fn first_line_preview(self) -> (Self, bool) {
566 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
567 let first_line = &self.text[..newline_ix];
568
569 // Trim leading whitespace, unless an edit starts prior to it.
570 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
571 if let Some((first_highlight_range, _)) = self.highlights.first() {
572 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
573 }
574
575 let preview_text = &first_line[preview_start_ix..];
576 let preview_highlights = self
577 .highlights
578 .into_iter()
579 .take_while(|(range, _)| range.start < newline_ix)
580 .filter_map(|(mut range, highlight)| {
581 range.start = range.start.saturating_sub(preview_start_ix);
582 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
583 if range.is_empty() {
584 None
585 } else {
586 Some((range, highlight))
587 }
588 });
589
590 let preview = Self {
591 text: SharedString::new(preview_text),
592 highlights: preview_highlights.collect(),
593 };
594
595 (preview, self.text.len() > newline_ix)
596 }
597}
598
599impl HighlightedTextBuilder {
600 pub fn build(self) -> HighlightedText {
601 HighlightedText {
602 text: self.text.into(),
603 highlights: self.highlights,
604 }
605 }
606
607 pub fn add_text_from_buffer_range<T: ToOffset>(
608 &mut self,
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) {
615 let range = range.to_offset(snapshot);
616 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
617 let start = self.text.len();
618 self.text.push_str(chunk.text);
619 let end = self.text.len();
620
621 if let Some(mut highlight_style) = chunk
622 .syntax_highlight_id
623 .and_then(|id| id.style(syntax_theme))
624 {
625 if let Some(override_style) = override_style {
626 highlight_style.highlight(override_style);
627 }
628 self.highlights.push((start..end, highlight_style));
629 } else if let Some(override_style) = override_style {
630 self.highlights.push((start..end, override_style));
631 }
632 }
633 }
634
635 fn highlighted_chunks<'a>(
636 range: Range<usize>,
637 snapshot: &'a text::BufferSnapshot,
638 syntax_snapshot: &'a SyntaxSnapshot,
639 ) -> BufferChunks<'a> {
640 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
641 grammar.highlights_query.as_ref()
642 });
643
644 let highlight_maps = captures
645 .grammars()
646 .iter()
647 .map(|grammar| grammar.highlight_map())
648 .collect();
649
650 BufferChunks::new(
651 snapshot.as_rope(),
652 range,
653 Some((captures, highlight_maps)),
654 false,
655 None,
656 )
657 }
658}
659
660#[derive(Clone)]
661pub struct EditPreview {
662 old_snapshot: text::BufferSnapshot,
663 applied_edits_snapshot: text::BufferSnapshot,
664 syntax_snapshot: SyntaxSnapshot,
665}
666
667impl EditPreview {
668 pub fn highlight_edits(
669 &self,
670 current_snapshot: &BufferSnapshot,
671 edits: &[(Range<Anchor>, String)],
672 include_deletions: bool,
673 cx: &App,
674 ) -> HighlightedText {
675 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
676 return HighlightedText::default();
677 };
678
679 let mut highlighted_text = HighlightedTextBuilder::default();
680
681 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
682
683 let insertion_highlight_style = HighlightStyle {
684 background_color: Some(cx.theme().status().created_background),
685 ..Default::default()
686 };
687 let deletion_highlight_style = HighlightStyle {
688 background_color: Some(cx.theme().status().deleted_background),
689 ..Default::default()
690 };
691 let syntax_theme = cx.theme().syntax();
692
693 for (range, edit_text) in edits {
694 let edit_new_end_in_preview_snapshot = range
695 .end
696 .bias_right(&self.old_snapshot)
697 .to_offset(&self.applied_edits_snapshot);
698 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
699
700 let unchanged_range_in_preview_snapshot =
701 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
702 if !unchanged_range_in_preview_snapshot.is_empty() {
703 highlighted_text.add_text_from_buffer_range(
704 unchanged_range_in_preview_snapshot,
705 &self.applied_edits_snapshot,
706 &self.syntax_snapshot,
707 None,
708 &syntax_theme,
709 );
710 }
711
712 let range_in_current_snapshot = range.to_offset(current_snapshot);
713 if include_deletions && !range_in_current_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 range_in_current_snapshot,
716 ¤t_snapshot.text,
717 ¤t_snapshot.syntax,
718 Some(deletion_highlight_style),
719 &syntax_theme,
720 );
721 }
722
723 if !edit_text.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
726 &self.applied_edits_snapshot,
727 &self.syntax_snapshot,
728 Some(insertion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
734 }
735
736 highlighted_text.add_text_from_buffer_range(
737 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743
744 highlighted_text.build()
745 }
746
747 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
748 let (first, _) = edits.first()?;
749 let (last, _) = edits.last()?;
750
751 let start = first
752 .start
753 .bias_left(&self.old_snapshot)
754 .to_point(&self.applied_edits_snapshot);
755 let end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
761 let range = Point::new(start.row, 0)
762 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
763
764 Some(range.to_offset(&self.applied_edits_snapshot))
765 }
766}
767
768#[derive(Clone, Debug, PartialEq, Eq)]
769pub struct BracketMatch {
770 pub open_range: Range<usize>,
771 pub close_range: Range<usize>,
772 pub newline_only: bool,
773}
774
775impl Buffer {
776 /// Create a new buffer with the given base text.
777 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
778 Self::build(
779 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
780 None,
781 Capability::ReadWrite,
782 )
783 }
784
785 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
786 pub fn local_normalized(
787 base_text_normalized: Rope,
788 line_ending: LineEnding,
789 cx: &Context<Self>,
790 ) -> Self {
791 Self::build(
792 TextBuffer::new_normalized(
793 0,
794 cx.entity_id().as_non_zero_u64().into(),
795 line_ending,
796 base_text_normalized,
797 ),
798 None,
799 Capability::ReadWrite,
800 )
801 }
802
803 /// Create a new buffer that is a replica of a remote buffer.
804 pub fn remote(
805 remote_id: BufferId,
806 replica_id: ReplicaId,
807 capability: Capability,
808 base_text: impl Into<String>,
809 ) -> Self {
810 Self::build(
811 TextBuffer::new(replica_id, remote_id, base_text.into()),
812 None,
813 capability,
814 )
815 }
816
817 /// Create a new buffer that is a replica of a remote buffer, populating its
818 /// state from the given protobuf message.
819 pub fn from_proto(
820 replica_id: ReplicaId,
821 capability: Capability,
822 message: proto::BufferState,
823 file: Option<Arc<dyn File>>,
824 ) -> Result<Self> {
825 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
826 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
827 let mut this = Self::build(buffer, file, capability);
828 this.text.set_line_ending(proto::deserialize_line_ending(
829 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
830 ));
831 this.saved_version = proto::deserialize_version(&message.saved_version);
832 this.saved_mtime = message.saved_mtime.map(|time| time.into());
833 Ok(this)
834 }
835
836 /// Serialize the buffer's state to a protobuf message.
837 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
838 proto::BufferState {
839 id: self.remote_id().into(),
840 file: self.file.as_ref().map(|f| f.to_proto(cx)),
841 base_text: self.base_text().to_string(),
842 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
843 saved_version: proto::serialize_version(&self.saved_version),
844 saved_mtime: self.saved_mtime.map(|time| time.into()),
845 }
846 }
847
848 /// Serialize as protobufs all of the changes to the buffer since the given version.
849 pub fn serialize_ops(
850 &self,
851 since: Option<clock::Global>,
852 cx: &App,
853 ) -> Task<Vec<proto::Operation>> {
854 let mut operations = Vec::new();
855 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
856
857 operations.extend(self.remote_selections.iter().map(|(_, set)| {
858 proto::serialize_operation(&Operation::UpdateSelections {
859 selections: set.selections.clone(),
860 lamport_timestamp: set.lamport_timestamp,
861 line_mode: set.line_mode,
862 cursor_shape: set.cursor_shape,
863 })
864 }));
865
866 for (server_id, diagnostics) in &self.diagnostics {
867 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
868 lamport_timestamp: self.diagnostics_timestamp,
869 server_id: *server_id,
870 diagnostics: diagnostics.iter().cloned().collect(),
871 }));
872 }
873
874 for (server_id, completions) in &self.completion_triggers_per_language_server {
875 operations.push(proto::serialize_operation(
876 &Operation::UpdateCompletionTriggers {
877 triggers: completions.iter().cloned().collect(),
878 lamport_timestamp: self.completion_triggers_timestamp,
879 server_id: *server_id,
880 },
881 ));
882 }
883
884 let text_operations = self.text.operations().clone();
885 cx.background_spawn(async move {
886 let since = since.unwrap_or_default();
887 operations.extend(
888 text_operations
889 .iter()
890 .filter(|(_, op)| !since.observed(op.timestamp()))
891 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
892 );
893 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
894 operations
895 })
896 }
897
898 /// Assign a language to the buffer, returning the buffer.
899 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
900 self.set_language(Some(language), cx);
901 self
902 }
903
904 /// Returns the [`Capability`] of this buffer.
905 pub fn capability(&self) -> Capability {
906 self.capability
907 }
908
909 /// Whether this buffer can only be read.
910 pub fn read_only(&self) -> bool {
911 self.capability == Capability::ReadOnly
912 }
913
914 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
915 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
916 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
917 let snapshot = buffer.snapshot();
918 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
919 Self {
920 saved_mtime,
921 saved_version: buffer.version(),
922 preview_version: buffer.version(),
923 reload_task: None,
924 transaction_depth: 0,
925 was_dirty_before_starting_transaction: None,
926 has_unsaved_edits: Cell::new((buffer.version(), false)),
927 text: buffer,
928 branch_state: None,
929 file,
930 capability,
931 syntax_map,
932 reparse: None,
933 non_text_state_update_count: 0,
934 sync_parse_timeout: Duration::from_millis(1),
935 parse_status: async_watch::channel(ParseStatus::Idle),
936 autoindent_requests: Default::default(),
937 pending_autoindent: Default::default(),
938 language: None,
939 remote_selections: Default::default(),
940 diagnostics: Default::default(),
941 diagnostics_timestamp: Default::default(),
942 completion_triggers: Default::default(),
943 completion_triggers_per_language_server: Default::default(),
944 completion_triggers_timestamp: Default::default(),
945 deferred_ops: OperationQueue::new(),
946 has_conflict: false,
947 change_bits: Default::default(),
948 _subscriptions: Vec::new(),
949 }
950 }
951
952 pub fn build_snapshot(
953 text: Rope,
954 language: Option<Arc<Language>>,
955 language_registry: Option<Arc<LanguageRegistry>>,
956 cx: &mut App,
957 ) -> impl Future<Output = BufferSnapshot> + use<> {
958 let entity_id = cx.reserve_entity::<Self>().entity_id();
959 let buffer_id = entity_id.as_non_zero_u64().into();
960 async move {
961 let text =
962 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
963 let mut syntax = SyntaxMap::new(&text).snapshot();
964 if let Some(language) = language.clone() {
965 let text = text.clone();
966 let language = language.clone();
967 let language_registry = language_registry.clone();
968 syntax.reparse(&text, language_registry, language);
969 }
970 BufferSnapshot {
971 text,
972 syntax,
973 file: None,
974 diagnostics: Default::default(),
975 remote_selections: Default::default(),
976 language,
977 non_text_state_update_count: 0,
978 }
979 }
980 }
981
982 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
983 let entity_id = cx.reserve_entity::<Self>().entity_id();
984 let buffer_id = entity_id.as_non_zero_u64().into();
985 let text =
986 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
987 let syntax = SyntaxMap::new(&text).snapshot();
988 BufferSnapshot {
989 text,
990 syntax,
991 file: None,
992 diagnostics: Default::default(),
993 remote_selections: Default::default(),
994 language: None,
995 non_text_state_update_count: 0,
996 }
997 }
998
999 #[cfg(any(test, feature = "test-support"))]
1000 pub fn build_snapshot_sync(
1001 text: Rope,
1002 language: Option<Arc<Language>>,
1003 language_registry: Option<Arc<LanguageRegistry>>,
1004 cx: &mut App,
1005 ) -> BufferSnapshot {
1006 let entity_id = cx.reserve_entity::<Self>().entity_id();
1007 let buffer_id = entity_id.as_non_zero_u64().into();
1008 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1009 let mut syntax = SyntaxMap::new(&text).snapshot();
1010 if let Some(language) = language.clone() {
1011 let text = text.clone();
1012 let language = language.clone();
1013 let language_registry = language_registry.clone();
1014 syntax.reparse(&text, language_registry, language);
1015 }
1016 BufferSnapshot {
1017 text,
1018 syntax,
1019 file: None,
1020 diagnostics: Default::default(),
1021 remote_selections: Default::default(),
1022 language,
1023 non_text_state_update_count: 0,
1024 }
1025 }
1026
1027 /// Retrieve a snapshot of the buffer's current state. This is computationally
1028 /// cheap, and allows reading from the buffer on a background thread.
1029 pub fn snapshot(&self) -> BufferSnapshot {
1030 let text = self.text.snapshot();
1031 let mut syntax_map = self.syntax_map.lock();
1032 syntax_map.interpolate(&text);
1033 let syntax = syntax_map.snapshot();
1034
1035 BufferSnapshot {
1036 text,
1037 syntax,
1038 file: self.file.clone(),
1039 remote_selections: self.remote_selections.clone(),
1040 diagnostics: self.diagnostics.clone(),
1041 language: self.language.clone(),
1042 non_text_state_update_count: self.non_text_state_update_count,
1043 }
1044 }
1045
1046 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1047 let this = cx.entity();
1048 cx.new(|cx| {
1049 let mut branch = Self {
1050 branch_state: Some(BufferBranchState {
1051 base_buffer: this.clone(),
1052 merged_operations: Default::default(),
1053 }),
1054 language: self.language.clone(),
1055 has_conflict: self.has_conflict,
1056 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1057 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1058 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1059 };
1060 if let Some(language_registry) = self.language_registry() {
1061 branch.set_language_registry(language_registry);
1062 }
1063
1064 // Reparse the branch buffer so that we get syntax highlighting immediately.
1065 branch.reparse(cx);
1066
1067 branch
1068 })
1069 }
1070
1071 pub fn preview_edits(
1072 &self,
1073 edits: Arc<[(Range<Anchor>, String)]>,
1074 cx: &App,
1075 ) -> Task<EditPreview> {
1076 let registry = self.language_registry();
1077 let language = self.language().cloned();
1078 let old_snapshot = self.text.snapshot();
1079 let mut branch_buffer = self.text.branch();
1080 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1081 cx.background_spawn(async move {
1082 if !edits.is_empty() {
1083 if let Some(language) = language.clone() {
1084 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1085 }
1086
1087 branch_buffer.edit(edits.iter().cloned());
1088 let snapshot = branch_buffer.snapshot();
1089 syntax_snapshot.interpolate(&snapshot);
1090
1091 if let Some(language) = language {
1092 syntax_snapshot.reparse(&snapshot, registry, language);
1093 }
1094 }
1095 EditPreview {
1096 old_snapshot,
1097 applied_edits_snapshot: branch_buffer.snapshot(),
1098 syntax_snapshot,
1099 }
1100 })
1101 }
1102
1103 /// Applies all of the changes in this buffer that intersect any of the
1104 /// given `ranges` to its base buffer.
1105 ///
1106 /// If `ranges` is empty, then all changes will be applied. This buffer must
1107 /// be a branch buffer to call this method.
1108 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1109 let Some(base_buffer) = self.base_buffer() else {
1110 debug_panic!("not a branch buffer");
1111 return;
1112 };
1113
1114 let mut ranges = if ranges.is_empty() {
1115 &[0..usize::MAX]
1116 } else {
1117 ranges.as_slice()
1118 }
1119 .into_iter()
1120 .peekable();
1121
1122 let mut edits = Vec::new();
1123 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1124 let mut is_included = false;
1125 while let Some(range) = ranges.peek() {
1126 if range.end < edit.new.start {
1127 ranges.next().unwrap();
1128 } else {
1129 if range.start <= edit.new.end {
1130 is_included = true;
1131 }
1132 break;
1133 }
1134 }
1135
1136 if is_included {
1137 edits.push((
1138 edit.old.clone(),
1139 self.text_for_range(edit.new.clone()).collect::<String>(),
1140 ));
1141 }
1142 }
1143
1144 let operation = base_buffer.update(cx, |base_buffer, cx| {
1145 // cx.emit(BufferEvent::DiffBaseChanged);
1146 base_buffer.edit(edits, None, cx)
1147 });
1148
1149 if let Some(operation) = operation {
1150 if let Some(BufferBranchState {
1151 merged_operations, ..
1152 }) = &mut self.branch_state
1153 {
1154 merged_operations.push(operation);
1155 }
1156 }
1157 }
1158
1159 fn on_base_buffer_event(
1160 &mut self,
1161 _: Entity<Buffer>,
1162 event: &BufferEvent,
1163 cx: &mut Context<Self>,
1164 ) {
1165 let BufferEvent::Operation { operation, .. } = event else {
1166 return;
1167 };
1168 let Some(BufferBranchState {
1169 merged_operations, ..
1170 }) = &mut self.branch_state
1171 else {
1172 return;
1173 };
1174
1175 let mut operation_to_undo = None;
1176 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1177 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1178 merged_operations.remove(ix);
1179 operation_to_undo = Some(operation.timestamp);
1180 }
1181 }
1182
1183 self.apply_ops([operation.clone()], cx);
1184
1185 if let Some(timestamp) = operation_to_undo {
1186 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1187 self.undo_operations(counts, cx);
1188 }
1189 }
1190
1191 #[cfg(test)]
1192 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1193 &self.text
1194 }
1195
1196 /// Retrieve a snapshot of the buffer's raw text, without any
1197 /// language-related state like the syntax tree or diagnostics.
1198 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1199 self.text.snapshot()
1200 }
1201
1202 /// The file associated with the buffer, if any.
1203 pub fn file(&self) -> Option<&Arc<dyn File>> {
1204 self.file.as_ref()
1205 }
1206
1207 /// The version of the buffer that was last saved or reloaded from disk.
1208 pub fn saved_version(&self) -> &clock::Global {
1209 &self.saved_version
1210 }
1211
1212 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1213 pub fn saved_mtime(&self) -> Option<MTime> {
1214 self.saved_mtime
1215 }
1216
1217 /// Assign a language to the buffer.
1218 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1219 self.non_text_state_update_count += 1;
1220 self.syntax_map.lock().clear(&self.text);
1221 self.language = language;
1222 self.was_changed();
1223 self.reparse(cx);
1224 cx.emit(BufferEvent::LanguageChanged);
1225 }
1226
1227 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1228 /// other languages if parts of the buffer are written in different languages.
1229 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1230 self.syntax_map
1231 .lock()
1232 .set_language_registry(language_registry);
1233 }
1234
1235 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1236 self.syntax_map.lock().language_registry()
1237 }
1238
1239 /// Assign the buffer a new [`Capability`].
1240 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1241 self.capability = capability;
1242 cx.emit(BufferEvent::CapabilityChanged)
1243 }
1244
1245 /// This method is called to signal that the buffer has been saved.
1246 pub fn did_save(
1247 &mut self,
1248 version: clock::Global,
1249 mtime: Option<MTime>,
1250 cx: &mut Context<Self>,
1251 ) {
1252 self.saved_version = version;
1253 self.has_unsaved_edits
1254 .set((self.saved_version().clone(), false));
1255 self.has_conflict = false;
1256 self.saved_mtime = mtime;
1257 self.was_changed();
1258 cx.emit(BufferEvent::Saved);
1259 cx.notify();
1260 }
1261
1262 /// This method is called to signal that the buffer has been discarded.
1263 pub fn discarded(&self, cx: &mut Context<Self>) {
1264 cx.emit(BufferEvent::Discarded);
1265 cx.notify();
1266 }
1267
1268 /// Reloads the contents of the buffer from disk.
1269 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1270 let (tx, rx) = futures::channel::oneshot::channel();
1271 let prev_version = self.text.version();
1272 self.reload_task = Some(cx.spawn(async move |this, cx| {
1273 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1274 let file = this.file.as_ref()?.as_local()?;
1275
1276 Some((file.disk_state().mtime(), file.load(cx)))
1277 })?
1278 else {
1279 return Ok(());
1280 };
1281
1282 let new_text = new_text.await?;
1283 let diff = this
1284 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1285 .await;
1286 this.update(cx, |this, cx| {
1287 if this.version() == diff.base_version {
1288 this.finalize_last_transaction();
1289 this.apply_diff(diff, cx);
1290 tx.send(this.finalize_last_transaction().cloned()).ok();
1291 this.has_conflict = false;
1292 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1293 } else {
1294 if !diff.edits.is_empty()
1295 || this
1296 .edits_since::<usize>(&diff.base_version)
1297 .next()
1298 .is_some()
1299 {
1300 this.has_conflict = true;
1301 }
1302
1303 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1304 }
1305
1306 this.reload_task.take();
1307 })
1308 }));
1309 rx
1310 }
1311
1312 /// This method is called to signal that the buffer has been reloaded.
1313 pub fn did_reload(
1314 &mut self,
1315 version: clock::Global,
1316 line_ending: LineEnding,
1317 mtime: Option<MTime>,
1318 cx: &mut Context<Self>,
1319 ) {
1320 self.saved_version = version;
1321 self.has_unsaved_edits
1322 .set((self.saved_version.clone(), false));
1323 self.text.set_line_ending(line_ending);
1324 self.saved_mtime = mtime;
1325 cx.emit(BufferEvent::Reloaded);
1326 cx.notify();
1327 }
1328
1329 /// Updates the [`File`] backing this buffer. This should be called when
1330 /// the file has changed or has been deleted.
1331 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1332 let was_dirty = self.is_dirty();
1333 let mut file_changed = false;
1334
1335 if let Some(old_file) = self.file.as_ref() {
1336 if new_file.path() != old_file.path() {
1337 file_changed = true;
1338 }
1339
1340 let old_state = old_file.disk_state();
1341 let new_state = new_file.disk_state();
1342 if old_state != new_state {
1343 file_changed = true;
1344 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1345 cx.emit(BufferEvent::ReloadNeeded)
1346 }
1347 }
1348 } else {
1349 file_changed = true;
1350 };
1351
1352 self.file = Some(new_file);
1353 if file_changed {
1354 self.was_changed();
1355 self.non_text_state_update_count += 1;
1356 if was_dirty != self.is_dirty() {
1357 cx.emit(BufferEvent::DirtyChanged);
1358 }
1359 cx.emit(BufferEvent::FileHandleChanged);
1360 cx.notify();
1361 }
1362 }
1363
1364 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1365 Some(self.branch_state.as_ref()?.base_buffer.clone())
1366 }
1367
1368 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1369 pub fn language(&self) -> Option<&Arc<Language>> {
1370 self.language.as_ref()
1371 }
1372
1373 /// Returns the [`Language`] at the given location.
1374 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1375 let offset = position.to_offset(self);
1376 self.syntax_map
1377 .lock()
1378 .layers_for_range(offset..offset, &self.text, false)
1379 .last()
1380 .map(|info| info.language.clone())
1381 .or_else(|| self.language.clone())
1382 }
1383
1384 /// Returns each [`Language`] for the active syntax layers at the given location.
1385 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1386 let offset = position.to_offset(self);
1387 let mut languages: Vec<Arc<Language>> = self
1388 .syntax_map
1389 .lock()
1390 .layers_for_range(offset..offset, &self.text, false)
1391 .map(|info| info.language.clone())
1392 .collect();
1393
1394 if languages.is_empty() {
1395 if let Some(buffer_language) = self.language() {
1396 languages.push(buffer_language.clone());
1397 }
1398 }
1399
1400 languages
1401 }
1402
1403 /// An integer version number that accounts for all updates besides
1404 /// the buffer's text itself (which is versioned via a version vector).
1405 pub fn non_text_state_update_count(&self) -> usize {
1406 self.non_text_state_update_count
1407 }
1408
1409 /// Whether the buffer is being parsed in the background.
1410 #[cfg(any(test, feature = "test-support"))]
1411 pub fn is_parsing(&self) -> bool {
1412 self.reparse.is_some()
1413 }
1414
1415 /// Indicates whether the buffer contains any regions that may be
1416 /// written in a language that hasn't been loaded yet.
1417 pub fn contains_unknown_injections(&self) -> bool {
1418 self.syntax_map.lock().contains_unknown_injections()
1419 }
1420
1421 #[cfg(test)]
1422 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1423 self.sync_parse_timeout = timeout;
1424 }
1425
1426 /// Called after an edit to synchronize the buffer's main parse tree with
1427 /// the buffer's new underlying state.
1428 ///
1429 /// Locks the syntax map and interpolates the edits since the last reparse
1430 /// into the foreground syntax tree.
1431 ///
1432 /// Then takes a stable snapshot of the syntax map before unlocking it.
1433 /// The snapshot with the interpolated edits is sent to a background thread,
1434 /// where we ask Tree-sitter to perform an incremental parse.
1435 ///
1436 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1437 /// waiting on the parse to complete. As soon as it completes, we proceed
1438 /// synchronously, unless a 1ms timeout elapses.
1439 ///
1440 /// If we time out waiting on the parse, we spawn a second task waiting
1441 /// until the parse does complete and return with the interpolated tree still
1442 /// in the foreground. When the background parse completes, call back into
1443 /// the main thread and assign the foreground parse state.
1444 ///
1445 /// If the buffer or grammar changed since the start of the background parse,
1446 /// initiate an additional reparse recursively. To avoid concurrent parses
1447 /// for the same buffer, we only initiate a new parse if we are not already
1448 /// parsing in the background.
1449 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1450 if self.reparse.is_some() {
1451 return;
1452 }
1453 let language = if let Some(language) = self.language.clone() {
1454 language
1455 } else {
1456 return;
1457 };
1458
1459 let text = self.text_snapshot();
1460 let parsed_version = self.version();
1461
1462 let mut syntax_map = self.syntax_map.lock();
1463 syntax_map.interpolate(&text);
1464 let language_registry = syntax_map.language_registry();
1465 let mut syntax_snapshot = syntax_map.snapshot();
1466 drop(syntax_map);
1467
1468 let parse_task = cx.background_spawn({
1469 let language = language.clone();
1470 let language_registry = language_registry.clone();
1471 async move {
1472 syntax_snapshot.reparse(&text, language_registry, language);
1473 syntax_snapshot
1474 }
1475 });
1476
1477 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1478 match cx
1479 .background_executor()
1480 .block_with_timeout(self.sync_parse_timeout, parse_task)
1481 {
1482 Ok(new_syntax_snapshot) => {
1483 self.did_finish_parsing(new_syntax_snapshot, cx);
1484 self.reparse = None;
1485 }
1486 Err(parse_task) => {
1487 self.reparse = Some(cx.spawn(async move |this, cx| {
1488 let new_syntax_map = parse_task.await;
1489 this.update(cx, move |this, cx| {
1490 let grammar_changed =
1491 this.language.as_ref().map_or(true, |current_language| {
1492 !Arc::ptr_eq(&language, current_language)
1493 });
1494 let language_registry_changed = new_syntax_map
1495 .contains_unknown_injections()
1496 && language_registry.map_or(false, |registry| {
1497 registry.version() != new_syntax_map.language_registry_version()
1498 });
1499 let parse_again = language_registry_changed
1500 || grammar_changed
1501 || this.version.changed_since(&parsed_version);
1502 this.did_finish_parsing(new_syntax_map, cx);
1503 this.reparse = None;
1504 if parse_again {
1505 this.reparse(cx);
1506 }
1507 })
1508 .ok();
1509 }));
1510 }
1511 }
1512 }
1513
1514 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1515 self.was_changed();
1516 self.non_text_state_update_count += 1;
1517 self.syntax_map.lock().did_parse(syntax_snapshot);
1518 self.request_autoindent(cx);
1519 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1520 cx.emit(BufferEvent::Reparsed);
1521 cx.notify();
1522 }
1523
1524 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1525 self.parse_status.1.clone()
1526 }
1527
1528 /// Assign to the buffer a set of diagnostics created by a given language server.
1529 pub fn update_diagnostics(
1530 &mut self,
1531 server_id: LanguageServerId,
1532 diagnostics: DiagnosticSet,
1533 cx: &mut Context<Self>,
1534 ) {
1535 let lamport_timestamp = self.text.lamport_clock.tick();
1536 let op = Operation::UpdateDiagnostics {
1537 server_id,
1538 diagnostics: diagnostics.iter().cloned().collect(),
1539 lamport_timestamp,
1540 };
1541 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1542 self.send_operation(op, true, cx);
1543 }
1544
1545 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1546 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1547 return None;
1548 };
1549 Some(&self.diagnostics[idx].1)
1550 }
1551
1552 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1553 if let Some(indent_sizes) = self.compute_autoindents() {
1554 let indent_sizes = cx.background_spawn(indent_sizes);
1555 match cx
1556 .background_executor()
1557 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1558 {
1559 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1560 Err(indent_sizes) => {
1561 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1562 let indent_sizes = indent_sizes.await;
1563 this.update(cx, |this, cx| {
1564 this.apply_autoindents(indent_sizes, cx);
1565 })
1566 .ok();
1567 }));
1568 }
1569 }
1570 } else {
1571 self.autoindent_requests.clear();
1572 }
1573 }
1574
1575 fn compute_autoindents(
1576 &self,
1577 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1578 let max_rows_between_yields = 100;
1579 let snapshot = self.snapshot();
1580 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1581 return None;
1582 }
1583
1584 let autoindent_requests = self.autoindent_requests.clone();
1585 Some(async move {
1586 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1587 for request in autoindent_requests {
1588 // Resolve each edited range to its row in the current buffer and in the
1589 // buffer before this batch of edits.
1590 let mut row_ranges = Vec::new();
1591 let mut old_to_new_rows = BTreeMap::new();
1592 let mut language_indent_sizes_by_new_row = Vec::new();
1593 for entry in &request.entries {
1594 let position = entry.range.start;
1595 let new_row = position.to_point(&snapshot).row;
1596 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1597 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1598
1599 if !entry.first_line_is_new {
1600 let old_row = position.to_point(&request.before_edit).row;
1601 old_to_new_rows.insert(old_row, new_row);
1602 }
1603 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1604 }
1605
1606 // Build a map containing the suggested indentation for each of the edited lines
1607 // with respect to the state of the buffer before these edits. This map is keyed
1608 // by the rows for these lines in the current state of the buffer.
1609 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1610 let old_edited_ranges =
1611 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1612 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1613 let mut language_indent_size = IndentSize::default();
1614 for old_edited_range in old_edited_ranges {
1615 let suggestions = request
1616 .before_edit
1617 .suggest_autoindents(old_edited_range.clone())
1618 .into_iter()
1619 .flatten();
1620 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1621 if let Some(suggestion) = suggestion {
1622 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1623
1624 // Find the indent size based on the language for this row.
1625 while let Some((row, size)) = language_indent_sizes.peek() {
1626 if *row > new_row {
1627 break;
1628 }
1629 language_indent_size = *size;
1630 language_indent_sizes.next();
1631 }
1632
1633 let suggested_indent = old_to_new_rows
1634 .get(&suggestion.basis_row)
1635 .and_then(|from_row| {
1636 Some(old_suggestions.get(from_row).copied()?.0)
1637 })
1638 .unwrap_or_else(|| {
1639 request
1640 .before_edit
1641 .indent_size_for_line(suggestion.basis_row)
1642 })
1643 .with_delta(suggestion.delta, language_indent_size);
1644 old_suggestions
1645 .insert(new_row, (suggested_indent, suggestion.within_error));
1646 }
1647 }
1648 yield_now().await;
1649 }
1650
1651 // Compute new suggestions for each line, but only include them in the result
1652 // if they differ from the old suggestion for that line.
1653 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1654 let mut language_indent_size = IndentSize::default();
1655 for (row_range, original_indent_column) in row_ranges {
1656 let new_edited_row_range = if request.is_block_mode {
1657 row_range.start..row_range.start + 1
1658 } else {
1659 row_range.clone()
1660 };
1661
1662 let suggestions = snapshot
1663 .suggest_autoindents(new_edited_row_range.clone())
1664 .into_iter()
1665 .flatten();
1666 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1667 if let Some(suggestion) = suggestion {
1668 // Find the indent size based on the language for this row.
1669 while let Some((row, size)) = language_indent_sizes.peek() {
1670 if *row > new_row {
1671 break;
1672 }
1673 language_indent_size = *size;
1674 language_indent_sizes.next();
1675 }
1676
1677 let suggested_indent = indent_sizes
1678 .get(&suggestion.basis_row)
1679 .copied()
1680 .map(|e| e.0)
1681 .unwrap_or_else(|| {
1682 snapshot.indent_size_for_line(suggestion.basis_row)
1683 })
1684 .with_delta(suggestion.delta, language_indent_size);
1685
1686 if old_suggestions.get(&new_row).map_or(
1687 true,
1688 |(old_indentation, was_within_error)| {
1689 suggested_indent != *old_indentation
1690 && (!suggestion.within_error || *was_within_error)
1691 },
1692 ) {
1693 indent_sizes.insert(
1694 new_row,
1695 (suggested_indent, request.ignore_empty_lines),
1696 );
1697 }
1698 }
1699 }
1700
1701 if let (true, Some(original_indent_column)) =
1702 (request.is_block_mode, original_indent_column)
1703 {
1704 let new_indent =
1705 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1706 *indent
1707 } else {
1708 snapshot.indent_size_for_line(row_range.start)
1709 };
1710 let delta = new_indent.len as i64 - original_indent_column as i64;
1711 if delta != 0 {
1712 for row in row_range.skip(1) {
1713 indent_sizes.entry(row).or_insert_with(|| {
1714 let mut size = snapshot.indent_size_for_line(row);
1715 if size.kind == new_indent.kind {
1716 match delta.cmp(&0) {
1717 Ordering::Greater => size.len += delta as u32,
1718 Ordering::Less => {
1719 size.len = size.len.saturating_sub(-delta as u32)
1720 }
1721 Ordering::Equal => {}
1722 }
1723 }
1724 (size, request.ignore_empty_lines)
1725 });
1726 }
1727 }
1728 }
1729
1730 yield_now().await;
1731 }
1732 }
1733
1734 indent_sizes
1735 .into_iter()
1736 .filter_map(|(row, (indent, ignore_empty_lines))| {
1737 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1738 None
1739 } else {
1740 Some((row, indent))
1741 }
1742 })
1743 .collect()
1744 })
1745 }
1746
1747 fn apply_autoindents(
1748 &mut self,
1749 indent_sizes: BTreeMap<u32, IndentSize>,
1750 cx: &mut Context<Self>,
1751 ) {
1752 self.autoindent_requests.clear();
1753
1754 let edits: Vec<_> = indent_sizes
1755 .into_iter()
1756 .filter_map(|(row, indent_size)| {
1757 let current_size = indent_size_for_line(self, row);
1758 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1759 })
1760 .collect();
1761
1762 let preserve_preview = self.preserve_preview();
1763 self.edit(edits, None, cx);
1764 if preserve_preview {
1765 self.refresh_preview();
1766 }
1767 }
1768
1769 /// Create a minimal edit that will cause the given row to be indented
1770 /// with the given size. After applying this edit, the length of the line
1771 /// will always be at least `new_size.len`.
1772 pub fn edit_for_indent_size_adjustment(
1773 row: u32,
1774 current_size: IndentSize,
1775 new_size: IndentSize,
1776 ) -> Option<(Range<Point>, String)> {
1777 if new_size.kind == current_size.kind {
1778 match new_size.len.cmp(¤t_size.len) {
1779 Ordering::Greater => {
1780 let point = Point::new(row, 0);
1781 Some((
1782 point..point,
1783 iter::repeat(new_size.char())
1784 .take((new_size.len - current_size.len) as usize)
1785 .collect::<String>(),
1786 ))
1787 }
1788
1789 Ordering::Less => Some((
1790 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1791 String::new(),
1792 )),
1793
1794 Ordering::Equal => None,
1795 }
1796 } else {
1797 Some((
1798 Point::new(row, 0)..Point::new(row, current_size.len),
1799 iter::repeat(new_size.char())
1800 .take(new_size.len as usize)
1801 .collect::<String>(),
1802 ))
1803 }
1804 }
1805
1806 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1807 /// and the given new text.
1808 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1809 let old_text = self.as_rope().clone();
1810 let base_version = self.version();
1811 cx.background_executor()
1812 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1813 let old_text = old_text.to_string();
1814 let line_ending = LineEnding::detect(&new_text);
1815 LineEnding::normalize(&mut new_text);
1816 let edits = text_diff(&old_text, &new_text);
1817 Diff {
1818 base_version,
1819 line_ending,
1820 edits,
1821 }
1822 })
1823 }
1824
1825 /// Spawns a background task that searches the buffer for any whitespace
1826 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1827 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1828 let old_text = self.as_rope().clone();
1829 let line_ending = self.line_ending();
1830 let base_version = self.version();
1831 cx.background_spawn(async move {
1832 let ranges = trailing_whitespace_ranges(&old_text);
1833 let empty = Arc::<str>::from("");
1834 Diff {
1835 base_version,
1836 line_ending,
1837 edits: ranges
1838 .into_iter()
1839 .map(|range| (range, empty.clone()))
1840 .collect(),
1841 }
1842 })
1843 }
1844
1845 /// Ensures that the buffer ends with a single newline character, and
1846 /// no other whitespace.
1847 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1848 let len = self.len();
1849 let mut offset = len;
1850 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1851 let non_whitespace_len = chunk
1852 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1853 .len();
1854 offset -= chunk.len();
1855 offset += non_whitespace_len;
1856 if non_whitespace_len != 0 {
1857 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1858 return;
1859 }
1860 break;
1861 }
1862 }
1863 self.edit([(offset..len, "\n")], None, cx);
1864 }
1865
1866 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1867 /// calculated, then adjust the diff to account for those changes, and discard any
1868 /// parts of the diff that conflict with those changes.
1869 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1870 let snapshot = self.snapshot();
1871 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1872 let mut delta = 0;
1873 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1874 while let Some(edit_since) = edits_since.peek() {
1875 // If the edit occurs after a diff hunk, then it does not
1876 // affect that hunk.
1877 if edit_since.old.start > range.end {
1878 break;
1879 }
1880 // If the edit precedes the diff hunk, then adjust the hunk
1881 // to reflect the edit.
1882 else if edit_since.old.end < range.start {
1883 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1884 edits_since.next();
1885 }
1886 // If the edit intersects a diff hunk, then discard that hunk.
1887 else {
1888 return None;
1889 }
1890 }
1891
1892 let start = (range.start as i64 + delta) as usize;
1893 let end = (range.end as i64 + delta) as usize;
1894 Some((start..end, new_text))
1895 });
1896
1897 self.start_transaction();
1898 self.text.set_line_ending(diff.line_ending);
1899 self.edit(adjusted_edits, None, cx);
1900 self.end_transaction(cx)
1901 }
1902
1903 fn has_unsaved_edits(&self) -> bool {
1904 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1905
1906 if last_version == self.version {
1907 self.has_unsaved_edits
1908 .set((last_version, has_unsaved_edits));
1909 return has_unsaved_edits;
1910 }
1911
1912 let has_edits = self.has_edits_since(&self.saved_version);
1913 self.has_unsaved_edits
1914 .set((self.version.clone(), has_edits));
1915 has_edits
1916 }
1917
1918 /// Checks if the buffer has unsaved changes.
1919 pub fn is_dirty(&self) -> bool {
1920 if self.capability == Capability::ReadOnly {
1921 return false;
1922 }
1923 if self.has_conflict {
1924 return true;
1925 }
1926 match self.file.as_ref().map(|f| f.disk_state()) {
1927 Some(DiskState::New) | Some(DiskState::Deleted) => {
1928 !self.is_empty() && self.has_unsaved_edits()
1929 }
1930 _ => self.has_unsaved_edits(),
1931 }
1932 }
1933
1934 /// Checks if the buffer and its file have both changed since the buffer
1935 /// was last saved or reloaded.
1936 pub fn has_conflict(&self) -> bool {
1937 if self.has_conflict {
1938 return true;
1939 }
1940 let Some(file) = self.file.as_ref() else {
1941 return false;
1942 };
1943 match file.disk_state() {
1944 DiskState::New => false,
1945 DiskState::Present { mtime } => match self.saved_mtime {
1946 Some(saved_mtime) => {
1947 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1948 }
1949 None => true,
1950 },
1951 DiskState::Deleted => false,
1952 }
1953 }
1954
1955 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1956 pub fn subscribe(&mut self) -> Subscription {
1957 self.text.subscribe()
1958 }
1959
1960 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1961 ///
1962 /// This allows downstream code to check if the buffer's text has changed without
1963 /// waiting for an effect cycle, which would be required if using eents.
1964 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1965 if let Err(ix) = self
1966 .change_bits
1967 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1968 {
1969 self.change_bits.insert(ix, bit);
1970 }
1971 }
1972
1973 fn was_changed(&mut self) {
1974 self.change_bits.retain(|change_bit| {
1975 change_bit.upgrade().map_or(false, |bit| {
1976 bit.replace(true);
1977 true
1978 })
1979 });
1980 }
1981
1982 /// Starts a transaction, if one is not already in-progress. When undoing or
1983 /// redoing edits, all of the edits performed within a transaction are undone
1984 /// or redone together.
1985 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1986 self.start_transaction_at(Instant::now())
1987 }
1988
1989 /// Starts a transaction, providing the current time. Subsequent transactions
1990 /// that occur within a short period of time will be grouped together. This
1991 /// is controlled by the buffer's undo grouping duration.
1992 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1993 self.transaction_depth += 1;
1994 if self.was_dirty_before_starting_transaction.is_none() {
1995 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1996 }
1997 self.text.start_transaction_at(now)
1998 }
1999
2000 /// Terminates the current transaction, if this is the outermost transaction.
2001 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2002 self.end_transaction_at(Instant::now(), cx)
2003 }
2004
2005 /// Terminates the current transaction, providing the current time. Subsequent transactions
2006 /// that occur within a short period of time will be grouped together. This
2007 /// is controlled by the buffer's undo grouping duration.
2008 pub fn end_transaction_at(
2009 &mut self,
2010 now: Instant,
2011 cx: &mut Context<Self>,
2012 ) -> Option<TransactionId> {
2013 assert!(self.transaction_depth > 0);
2014 self.transaction_depth -= 1;
2015 let was_dirty = if self.transaction_depth == 0 {
2016 self.was_dirty_before_starting_transaction.take().unwrap()
2017 } else {
2018 false
2019 };
2020 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2021 self.did_edit(&start_version, was_dirty, cx);
2022 Some(transaction_id)
2023 } else {
2024 None
2025 }
2026 }
2027
2028 /// Manually add a transaction to the buffer's undo history.
2029 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2030 self.text.push_transaction(transaction, now);
2031 }
2032
2033 /// Prevent the last transaction from being grouped with any subsequent transactions,
2034 /// even if they occur with the buffer's undo grouping duration.
2035 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2036 self.text.finalize_last_transaction()
2037 }
2038
2039 /// Manually group all changes since a given transaction.
2040 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2041 self.text.group_until_transaction(transaction_id);
2042 }
2043
2044 /// Manually remove a transaction from the buffer's undo history
2045 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2046 self.text.forget_transaction(transaction_id)
2047 }
2048
2049 /// Retrieve a transaction from the buffer's undo history
2050 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2051 self.text.get_transaction(transaction_id)
2052 }
2053
2054 /// Manually merge two transactions in the buffer's undo history.
2055 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2056 self.text.merge_transactions(transaction, destination);
2057 }
2058
2059 /// Waits for the buffer to receive operations with the given timestamps.
2060 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2061 &mut self,
2062 edit_ids: It,
2063 ) -> impl Future<Output = Result<()>> + use<It> {
2064 self.text.wait_for_edits(edit_ids)
2065 }
2066
2067 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2068 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2069 &mut self,
2070 anchors: It,
2071 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2072 self.text.wait_for_anchors(anchors)
2073 }
2074
2075 /// Waits for the buffer to receive operations up to the given version.
2076 pub fn wait_for_version(
2077 &mut self,
2078 version: clock::Global,
2079 ) -> impl Future<Output = Result<()>> + use<> {
2080 self.text.wait_for_version(version)
2081 }
2082
2083 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2084 /// [`Buffer::wait_for_version`] to resolve with an error.
2085 pub fn give_up_waiting(&mut self) {
2086 self.text.give_up_waiting();
2087 }
2088
2089 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2090 pub fn set_active_selections(
2091 &mut self,
2092 selections: Arc<[Selection<Anchor>]>,
2093 line_mode: bool,
2094 cursor_shape: CursorShape,
2095 cx: &mut Context<Self>,
2096 ) {
2097 let lamport_timestamp = self.text.lamport_clock.tick();
2098 self.remote_selections.insert(
2099 self.text.replica_id(),
2100 SelectionSet {
2101 selections: selections.clone(),
2102 lamport_timestamp,
2103 line_mode,
2104 cursor_shape,
2105 },
2106 );
2107 self.send_operation(
2108 Operation::UpdateSelections {
2109 selections,
2110 line_mode,
2111 lamport_timestamp,
2112 cursor_shape,
2113 },
2114 true,
2115 cx,
2116 );
2117 self.non_text_state_update_count += 1;
2118 cx.notify();
2119 }
2120
2121 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2122 /// this replica.
2123 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2124 if self
2125 .remote_selections
2126 .get(&self.text.replica_id())
2127 .map_or(true, |set| !set.selections.is_empty())
2128 {
2129 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2130 }
2131 }
2132
2133 pub fn set_agent_selections(
2134 &mut self,
2135 selections: Arc<[Selection<Anchor>]>,
2136 line_mode: bool,
2137 cursor_shape: CursorShape,
2138 cx: &mut Context<Self>,
2139 ) {
2140 let lamport_timestamp = self.text.lamport_clock.tick();
2141 self.remote_selections.insert(
2142 AGENT_REPLICA_ID,
2143 SelectionSet {
2144 selections: selections.clone(),
2145 lamport_timestamp,
2146 line_mode,
2147 cursor_shape,
2148 },
2149 );
2150 self.non_text_state_update_count += 1;
2151 cx.notify();
2152 }
2153
2154 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2155 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2156 }
2157
2158 /// Replaces the buffer's entire text.
2159 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2160 where
2161 T: Into<Arc<str>>,
2162 {
2163 self.autoindent_requests.clear();
2164 self.edit([(0..self.len(), text)], None, cx)
2165 }
2166
2167 /// Appends the given text to the end of the buffer.
2168 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2169 where
2170 T: Into<Arc<str>>,
2171 {
2172 self.edit([(self.len()..self.len(), text)], None, cx)
2173 }
2174
2175 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2176 /// delete, and a string of text to insert at that location.
2177 ///
2178 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2179 /// request for the edited ranges, which will be processed when the buffer finishes
2180 /// parsing.
2181 ///
2182 /// Parsing takes place at the end of a transaction, and may compute synchronously
2183 /// or asynchronously, depending on the changes.
2184 pub fn edit<I, S, T>(
2185 &mut self,
2186 edits_iter: I,
2187 autoindent_mode: Option<AutoindentMode>,
2188 cx: &mut Context<Self>,
2189 ) -> Option<clock::Lamport>
2190 where
2191 I: IntoIterator<Item = (Range<S>, T)>,
2192 S: ToOffset,
2193 T: Into<Arc<str>>,
2194 {
2195 // Skip invalid edits and coalesce contiguous ones.
2196 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2197
2198 for (range, new_text) in edits_iter {
2199 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2200
2201 if range.start > range.end {
2202 mem::swap(&mut range.start, &mut range.end);
2203 }
2204 let new_text = new_text.into();
2205 if !new_text.is_empty() || !range.is_empty() {
2206 if let Some((prev_range, prev_text)) = edits.last_mut() {
2207 if prev_range.end >= range.start {
2208 prev_range.end = cmp::max(prev_range.end, range.end);
2209 *prev_text = format!("{prev_text}{new_text}").into();
2210 } else {
2211 edits.push((range, new_text));
2212 }
2213 } else {
2214 edits.push((range, new_text));
2215 }
2216 }
2217 }
2218 if edits.is_empty() {
2219 return None;
2220 }
2221
2222 self.start_transaction();
2223 self.pending_autoindent.take();
2224 let autoindent_request = autoindent_mode
2225 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2226
2227 let edit_operation = self.text.edit(edits.iter().cloned());
2228 let edit_id = edit_operation.timestamp();
2229
2230 if let Some((before_edit, mode)) = autoindent_request {
2231 let mut delta = 0isize;
2232 let entries = edits
2233 .into_iter()
2234 .enumerate()
2235 .zip(&edit_operation.as_edit().unwrap().new_text)
2236 .map(|((ix, (range, _)), new_text)| {
2237 let new_text_length = new_text.len();
2238 let old_start = range.start.to_point(&before_edit);
2239 let new_start = (delta + range.start as isize) as usize;
2240 let range_len = range.end - range.start;
2241 delta += new_text_length as isize - range_len as isize;
2242
2243 // Decide what range of the insertion to auto-indent, and whether
2244 // the first line of the insertion should be considered a newly-inserted line
2245 // or an edit to an existing line.
2246 let mut range_of_insertion_to_indent = 0..new_text_length;
2247 let mut first_line_is_new = true;
2248
2249 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2250 let old_line_end = before_edit.line_len(old_start.row);
2251
2252 if old_start.column > old_line_start {
2253 first_line_is_new = false;
2254 }
2255
2256 if !new_text.contains('\n')
2257 && (old_start.column + (range_len as u32) < old_line_end
2258 || old_line_end == old_line_start)
2259 {
2260 first_line_is_new = false;
2261 }
2262
2263 // When inserting text starting with a newline, avoid auto-indenting the
2264 // previous line.
2265 if new_text.starts_with('\n') {
2266 range_of_insertion_to_indent.start += 1;
2267 first_line_is_new = true;
2268 }
2269
2270 let mut original_indent_column = None;
2271 if let AutoindentMode::Block {
2272 original_indent_columns,
2273 } = &mode
2274 {
2275 original_indent_column = Some(if new_text.starts_with('\n') {
2276 indent_size_for_text(
2277 new_text[range_of_insertion_to_indent.clone()].chars(),
2278 )
2279 .len
2280 } else {
2281 original_indent_columns
2282 .get(ix)
2283 .copied()
2284 .flatten()
2285 .unwrap_or_else(|| {
2286 indent_size_for_text(
2287 new_text[range_of_insertion_to_indent.clone()].chars(),
2288 )
2289 .len
2290 })
2291 });
2292
2293 // Avoid auto-indenting the line after the edit.
2294 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2295 range_of_insertion_to_indent.end -= 1;
2296 }
2297 }
2298
2299 AutoindentRequestEntry {
2300 first_line_is_new,
2301 original_indent_column,
2302 indent_size: before_edit.language_indent_size_at(range.start, cx),
2303 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2304 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2305 }
2306 })
2307 .collect();
2308
2309 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2310 before_edit,
2311 entries,
2312 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2313 ignore_empty_lines: false,
2314 }));
2315 }
2316
2317 self.end_transaction(cx);
2318 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2319 Some(edit_id)
2320 }
2321
2322 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2323 self.was_changed();
2324
2325 if self.edits_since::<usize>(old_version).next().is_none() {
2326 return;
2327 }
2328
2329 self.reparse(cx);
2330 cx.emit(BufferEvent::Edited);
2331 if was_dirty != self.is_dirty() {
2332 cx.emit(BufferEvent::DirtyChanged);
2333 }
2334 cx.notify();
2335 }
2336
2337 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2338 where
2339 I: IntoIterator<Item = Range<T>>,
2340 T: ToOffset + Copy,
2341 {
2342 let before_edit = self.snapshot();
2343 let entries = ranges
2344 .into_iter()
2345 .map(|range| AutoindentRequestEntry {
2346 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2347 first_line_is_new: true,
2348 indent_size: before_edit.language_indent_size_at(range.start, cx),
2349 original_indent_column: None,
2350 })
2351 .collect();
2352 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2353 before_edit,
2354 entries,
2355 is_block_mode: false,
2356 ignore_empty_lines: true,
2357 }));
2358 self.request_autoindent(cx);
2359 }
2360
2361 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2362 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2363 pub fn insert_empty_line(
2364 &mut self,
2365 position: impl ToPoint,
2366 space_above: bool,
2367 space_below: bool,
2368 cx: &mut Context<Self>,
2369 ) -> Point {
2370 let mut position = position.to_point(self);
2371
2372 self.start_transaction();
2373
2374 self.edit(
2375 [(position..position, "\n")],
2376 Some(AutoindentMode::EachLine),
2377 cx,
2378 );
2379
2380 if position.column > 0 {
2381 position += Point::new(1, 0);
2382 }
2383
2384 if !self.is_line_blank(position.row) {
2385 self.edit(
2386 [(position..position, "\n")],
2387 Some(AutoindentMode::EachLine),
2388 cx,
2389 );
2390 }
2391
2392 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2393 self.edit(
2394 [(position..position, "\n")],
2395 Some(AutoindentMode::EachLine),
2396 cx,
2397 );
2398 position.row += 1;
2399 }
2400
2401 if space_below
2402 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2403 {
2404 self.edit(
2405 [(position..position, "\n")],
2406 Some(AutoindentMode::EachLine),
2407 cx,
2408 );
2409 }
2410
2411 self.end_transaction(cx);
2412
2413 position
2414 }
2415
2416 /// Applies the given remote operations to the buffer.
2417 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2418 self.pending_autoindent.take();
2419 let was_dirty = self.is_dirty();
2420 let old_version = self.version.clone();
2421 let mut deferred_ops = Vec::new();
2422 let buffer_ops = ops
2423 .into_iter()
2424 .filter_map(|op| match op {
2425 Operation::Buffer(op) => Some(op),
2426 _ => {
2427 if self.can_apply_op(&op) {
2428 self.apply_op(op, cx);
2429 } else {
2430 deferred_ops.push(op);
2431 }
2432 None
2433 }
2434 })
2435 .collect::<Vec<_>>();
2436 for operation in buffer_ops.iter() {
2437 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2438 }
2439 self.text.apply_ops(buffer_ops);
2440 self.deferred_ops.insert(deferred_ops);
2441 self.flush_deferred_ops(cx);
2442 self.did_edit(&old_version, was_dirty, cx);
2443 // Notify independently of whether the buffer was edited as the operations could include a
2444 // selection update.
2445 cx.notify();
2446 }
2447
2448 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2449 let mut deferred_ops = Vec::new();
2450 for op in self.deferred_ops.drain().iter().cloned() {
2451 if self.can_apply_op(&op) {
2452 self.apply_op(op, cx);
2453 } else {
2454 deferred_ops.push(op);
2455 }
2456 }
2457 self.deferred_ops.insert(deferred_ops);
2458 }
2459
2460 pub fn has_deferred_ops(&self) -> bool {
2461 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2462 }
2463
2464 fn can_apply_op(&self, operation: &Operation) -> bool {
2465 match operation {
2466 Operation::Buffer(_) => {
2467 unreachable!("buffer operations should never be applied at this layer")
2468 }
2469 Operation::UpdateDiagnostics {
2470 diagnostics: diagnostic_set,
2471 ..
2472 } => diagnostic_set.iter().all(|diagnostic| {
2473 self.text.can_resolve(&diagnostic.range.start)
2474 && self.text.can_resolve(&diagnostic.range.end)
2475 }),
2476 Operation::UpdateSelections { selections, .. } => selections
2477 .iter()
2478 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2479 Operation::UpdateCompletionTriggers { .. } => true,
2480 }
2481 }
2482
2483 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2484 match operation {
2485 Operation::Buffer(_) => {
2486 unreachable!("buffer operations should never be applied at this layer")
2487 }
2488 Operation::UpdateDiagnostics {
2489 server_id,
2490 diagnostics: diagnostic_set,
2491 lamport_timestamp,
2492 } => {
2493 let snapshot = self.snapshot();
2494 self.apply_diagnostic_update(
2495 server_id,
2496 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2497 lamport_timestamp,
2498 cx,
2499 );
2500 }
2501 Operation::UpdateSelections {
2502 selections,
2503 lamport_timestamp,
2504 line_mode,
2505 cursor_shape,
2506 } => {
2507 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2508 if set.lamport_timestamp > lamport_timestamp {
2509 return;
2510 }
2511 }
2512
2513 self.remote_selections.insert(
2514 lamport_timestamp.replica_id,
2515 SelectionSet {
2516 selections,
2517 lamport_timestamp,
2518 line_mode,
2519 cursor_shape,
2520 },
2521 );
2522 self.text.lamport_clock.observe(lamport_timestamp);
2523 self.non_text_state_update_count += 1;
2524 }
2525 Operation::UpdateCompletionTriggers {
2526 triggers,
2527 lamport_timestamp,
2528 server_id,
2529 } => {
2530 if triggers.is_empty() {
2531 self.completion_triggers_per_language_server
2532 .remove(&server_id);
2533 self.completion_triggers = self
2534 .completion_triggers_per_language_server
2535 .values()
2536 .flat_map(|triggers| triggers.into_iter().cloned())
2537 .collect();
2538 } else {
2539 self.completion_triggers_per_language_server
2540 .insert(server_id, triggers.iter().cloned().collect());
2541 self.completion_triggers.extend(triggers);
2542 }
2543 self.text.lamport_clock.observe(lamport_timestamp);
2544 }
2545 }
2546 }
2547
2548 fn apply_diagnostic_update(
2549 &mut self,
2550 server_id: LanguageServerId,
2551 diagnostics: DiagnosticSet,
2552 lamport_timestamp: clock::Lamport,
2553 cx: &mut Context<Self>,
2554 ) {
2555 if lamport_timestamp > self.diagnostics_timestamp {
2556 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2557 if diagnostics.is_empty() {
2558 if let Ok(ix) = ix {
2559 self.diagnostics.remove(ix);
2560 }
2561 } else {
2562 match ix {
2563 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2564 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2565 };
2566 }
2567 self.diagnostics_timestamp = lamport_timestamp;
2568 self.non_text_state_update_count += 1;
2569 self.text.lamport_clock.observe(lamport_timestamp);
2570 cx.notify();
2571 cx.emit(BufferEvent::DiagnosticsUpdated);
2572 }
2573 }
2574
2575 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2576 self.was_changed();
2577 cx.emit(BufferEvent::Operation {
2578 operation,
2579 is_local,
2580 });
2581 }
2582
2583 /// Removes the selections for a given peer.
2584 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2585 self.remote_selections.remove(&replica_id);
2586 cx.notify();
2587 }
2588
2589 /// Undoes the most recent transaction.
2590 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2591 let was_dirty = self.is_dirty();
2592 let old_version = self.version.clone();
2593
2594 if let Some((transaction_id, operation)) = self.text.undo() {
2595 self.send_operation(Operation::Buffer(operation), true, cx);
2596 self.did_edit(&old_version, was_dirty, cx);
2597 Some(transaction_id)
2598 } else {
2599 None
2600 }
2601 }
2602
2603 /// Manually undoes a specific transaction in the buffer's undo history.
2604 pub fn undo_transaction(
2605 &mut self,
2606 transaction_id: TransactionId,
2607 cx: &mut Context<Self>,
2608 ) -> bool {
2609 let was_dirty = self.is_dirty();
2610 let old_version = self.version.clone();
2611 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2612 self.send_operation(Operation::Buffer(operation), true, cx);
2613 self.did_edit(&old_version, was_dirty, cx);
2614 true
2615 } else {
2616 false
2617 }
2618 }
2619
2620 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2621 pub fn undo_to_transaction(
2622 &mut self,
2623 transaction_id: TransactionId,
2624 cx: &mut Context<Self>,
2625 ) -> bool {
2626 let was_dirty = self.is_dirty();
2627 let old_version = self.version.clone();
2628
2629 let operations = self.text.undo_to_transaction(transaction_id);
2630 let undone = !operations.is_empty();
2631 for operation in operations {
2632 self.send_operation(Operation::Buffer(operation), true, cx);
2633 }
2634 if undone {
2635 self.did_edit(&old_version, was_dirty, cx)
2636 }
2637 undone
2638 }
2639
2640 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2641 let was_dirty = self.is_dirty();
2642 let operation = self.text.undo_operations(counts);
2643 let old_version = self.version.clone();
2644 self.send_operation(Operation::Buffer(operation), true, cx);
2645 self.did_edit(&old_version, was_dirty, cx);
2646 }
2647
2648 /// Manually redoes a specific transaction in the buffer's redo history.
2649 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2650 let was_dirty = self.is_dirty();
2651 let old_version = self.version.clone();
2652
2653 if let Some((transaction_id, operation)) = self.text.redo() {
2654 self.send_operation(Operation::Buffer(operation), true, cx);
2655 self.did_edit(&old_version, was_dirty, cx);
2656 Some(transaction_id)
2657 } else {
2658 None
2659 }
2660 }
2661
2662 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2663 pub fn redo_to_transaction(
2664 &mut self,
2665 transaction_id: TransactionId,
2666 cx: &mut Context<Self>,
2667 ) -> bool {
2668 let was_dirty = self.is_dirty();
2669 let old_version = self.version.clone();
2670
2671 let operations = self.text.redo_to_transaction(transaction_id);
2672 let redone = !operations.is_empty();
2673 for operation in operations {
2674 self.send_operation(Operation::Buffer(operation), true, cx);
2675 }
2676 if redone {
2677 self.did_edit(&old_version, was_dirty, cx)
2678 }
2679 redone
2680 }
2681
2682 /// Override current completion triggers with the user-provided completion triggers.
2683 pub fn set_completion_triggers(
2684 &mut self,
2685 server_id: LanguageServerId,
2686 triggers: BTreeSet<String>,
2687 cx: &mut Context<Self>,
2688 ) {
2689 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2690 if triggers.is_empty() {
2691 self.completion_triggers_per_language_server
2692 .remove(&server_id);
2693 self.completion_triggers = self
2694 .completion_triggers_per_language_server
2695 .values()
2696 .flat_map(|triggers| triggers.into_iter().cloned())
2697 .collect();
2698 } else {
2699 self.completion_triggers_per_language_server
2700 .insert(server_id, triggers.clone());
2701 self.completion_triggers.extend(triggers.iter().cloned());
2702 }
2703 self.send_operation(
2704 Operation::UpdateCompletionTriggers {
2705 triggers: triggers.into_iter().collect(),
2706 lamport_timestamp: self.completion_triggers_timestamp,
2707 server_id,
2708 },
2709 true,
2710 cx,
2711 );
2712 cx.notify();
2713 }
2714
2715 /// Returns a list of strings which trigger a completion menu for this language.
2716 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2717 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2718 &self.completion_triggers
2719 }
2720
2721 /// Call this directly after performing edits to prevent the preview tab
2722 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2723 /// to return false until there are additional edits.
2724 pub fn refresh_preview(&mut self) {
2725 self.preview_version = self.version.clone();
2726 }
2727
2728 /// Whether we should preserve the preview status of a tab containing this buffer.
2729 pub fn preserve_preview(&self) -> bool {
2730 !self.has_edits_since(&self.preview_version)
2731 }
2732}
2733
2734#[doc(hidden)]
2735#[cfg(any(test, feature = "test-support"))]
2736impl Buffer {
2737 pub fn edit_via_marked_text(
2738 &mut self,
2739 marked_string: &str,
2740 autoindent_mode: Option<AutoindentMode>,
2741 cx: &mut Context<Self>,
2742 ) {
2743 let edits = self.edits_for_marked_text(marked_string);
2744 self.edit(edits, autoindent_mode, cx);
2745 }
2746
2747 pub fn set_group_interval(&mut self, group_interval: Duration) {
2748 self.text.set_group_interval(group_interval);
2749 }
2750
2751 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2752 where
2753 T: rand::Rng,
2754 {
2755 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2756 let mut last_end = None;
2757 for _ in 0..old_range_count {
2758 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2759 break;
2760 }
2761
2762 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2763 let mut range = self.random_byte_range(new_start, rng);
2764 if rng.gen_bool(0.2) {
2765 mem::swap(&mut range.start, &mut range.end);
2766 }
2767 last_end = Some(range.end);
2768
2769 let new_text_len = rng.gen_range(0..10);
2770 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2771 new_text = new_text.to_uppercase();
2772
2773 edits.push((range, new_text));
2774 }
2775 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2776 self.edit(edits, None, cx);
2777 }
2778
2779 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2780 let was_dirty = self.is_dirty();
2781 let old_version = self.version.clone();
2782
2783 let ops = self.text.randomly_undo_redo(rng);
2784 if !ops.is_empty() {
2785 for op in ops {
2786 self.send_operation(Operation::Buffer(op), true, cx);
2787 self.did_edit(&old_version, was_dirty, cx);
2788 }
2789 }
2790 }
2791}
2792
2793impl EventEmitter<BufferEvent> for Buffer {}
2794
2795impl Deref for Buffer {
2796 type Target = TextBuffer;
2797
2798 fn deref(&self) -> &Self::Target {
2799 &self.text
2800 }
2801}
2802
2803impl BufferSnapshot {
2804 /// Returns [`IndentSize`] for a given line that respects user settings and
2805 /// language preferences.
2806 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2807 indent_size_for_line(self, row)
2808 }
2809
2810 /// Returns [`IndentSize`] for a given position that respects user settings
2811 /// and language preferences.
2812 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2813 let settings = language_settings(
2814 self.language_at(position).map(|l| l.name()),
2815 self.file(),
2816 cx,
2817 );
2818 if settings.hard_tabs {
2819 IndentSize::tab()
2820 } else {
2821 IndentSize::spaces(settings.tab_size.get())
2822 }
2823 }
2824
2825 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2826 /// is passed in as `single_indent_size`.
2827 pub fn suggested_indents(
2828 &self,
2829 rows: impl Iterator<Item = u32>,
2830 single_indent_size: IndentSize,
2831 ) -> BTreeMap<u32, IndentSize> {
2832 let mut result = BTreeMap::new();
2833
2834 for row_range in contiguous_ranges(rows, 10) {
2835 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2836 Some(suggestions) => suggestions,
2837 _ => break,
2838 };
2839
2840 for (row, suggestion) in row_range.zip(suggestions) {
2841 let indent_size = if let Some(suggestion) = suggestion {
2842 result
2843 .get(&suggestion.basis_row)
2844 .copied()
2845 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2846 .with_delta(suggestion.delta, single_indent_size)
2847 } else {
2848 self.indent_size_for_line(row)
2849 };
2850
2851 result.insert(row, indent_size);
2852 }
2853 }
2854
2855 result
2856 }
2857
2858 fn suggest_autoindents(
2859 &self,
2860 row_range: Range<u32>,
2861 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2862 let config = &self.language.as_ref()?.config;
2863 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2864 let significant_indentation = config.significant_indentation;
2865
2866 // Find the suggested indentation ranges based on the syntax tree.
2867 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2868 let end = Point::new(row_range.end, 0);
2869 let range = (start..end).to_offset(&self.text);
2870 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2871 Some(&grammar.indents_config.as_ref()?.query)
2872 });
2873 let indent_configs = matches
2874 .grammars()
2875 .iter()
2876 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2877 .collect::<Vec<_>>();
2878
2879 let mut indent_ranges = Vec::<Range<Point>>::new();
2880 let mut outdent_positions = Vec::<Point>::new();
2881 while let Some(mat) = matches.peek() {
2882 let mut start: Option<Point> = None;
2883 let mut end: Option<Point> = None;
2884 let mut outdent: Option<Point> = None;
2885
2886 let config = &indent_configs[mat.grammar_index];
2887 for capture in mat.captures {
2888 if capture.index == config.indent_capture_ix {
2889 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2890 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2891 } else if Some(capture.index) == config.start_capture_ix {
2892 start = Some(Point::from_ts_point(capture.node.end_position()));
2893 } else if Some(capture.index) == config.end_capture_ix {
2894 end = Some(Point::from_ts_point(capture.node.start_position()));
2895 } else if Some(capture.index) == config.outdent_capture_ix {
2896 let point = Point::from_ts_point(capture.node.start_position());
2897 outdent.get_or_insert(point);
2898 outdent_positions.push(point);
2899 }
2900 }
2901
2902 matches.advance();
2903 // in case of significant indentation expand end to outdent position
2904 let end = if significant_indentation {
2905 outdent.or(end)
2906 } else {
2907 end
2908 };
2909 if let Some((start, end)) = start.zip(end) {
2910 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2911 continue;
2912 }
2913 let range = start..end;
2914 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2915 Err(ix) => indent_ranges.insert(ix, range),
2916 Ok(ix) => {
2917 let prev_range = &mut indent_ranges[ix];
2918 prev_range.end = prev_range.end.max(range.end);
2919 }
2920 }
2921 }
2922 }
2923
2924 let mut error_ranges = Vec::<Range<Point>>::new();
2925 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2926 grammar.error_query.as_ref()
2927 });
2928 while let Some(mat) = matches.peek() {
2929 let node = mat.captures[0].node;
2930 let start = Point::from_ts_point(node.start_position());
2931 let end = Point::from_ts_point(node.end_position());
2932 let range = start..end;
2933 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2934 Ok(ix) | Err(ix) => ix,
2935 };
2936 let mut end_ix = ix;
2937 while let Some(existing_range) = error_ranges.get(end_ix) {
2938 if existing_range.end < end {
2939 end_ix += 1;
2940 } else {
2941 break;
2942 }
2943 }
2944 error_ranges.splice(ix..end_ix, [range]);
2945 matches.advance();
2946 }
2947
2948 // we don't use outdent positions to truncate in case of significant indentation
2949 // rather we use them to expand (handled above)
2950 if !significant_indentation {
2951 outdent_positions.sort();
2952 for outdent_position in outdent_positions {
2953 // find the innermost indent range containing this outdent_position
2954 // set its end to the outdent position
2955 if let Some(range_to_truncate) = indent_ranges
2956 .iter_mut()
2957 .filter(|indent_range| indent_range.contains(&outdent_position))
2958 .next_back()
2959 {
2960 range_to_truncate.end = outdent_position;
2961 }
2962 }
2963 }
2964
2965 // Find the suggested indentation increases and decreased based on regexes.
2966 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2967 self.for_each_line(
2968 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2969 ..Point::new(row_range.end, 0),
2970 |row, line| {
2971 if config
2972 .decrease_indent_pattern
2973 .as_ref()
2974 .map_or(false, |regex| regex.is_match(line))
2975 {
2976 indent_change_rows.push((row, Ordering::Less));
2977 }
2978 if config
2979 .increase_indent_pattern
2980 .as_ref()
2981 .map_or(false, |regex| regex.is_match(line))
2982 {
2983 indent_change_rows.push((row + 1, Ordering::Greater));
2984 }
2985 },
2986 );
2987
2988 let mut indent_changes = indent_change_rows.into_iter().peekable();
2989 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2990 prev_non_blank_row.unwrap_or(0)
2991 } else {
2992 row_range.start.saturating_sub(1)
2993 };
2994 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2995 Some(row_range.map(move |row| {
2996 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2997
2998 let mut indent_from_prev_row = false;
2999 let mut outdent_from_prev_row = false;
3000 let mut outdent_to_row = u32::MAX;
3001 let mut from_regex = false;
3002
3003 while let Some((indent_row, delta)) = indent_changes.peek() {
3004 match indent_row.cmp(&row) {
3005 Ordering::Equal => match delta {
3006 Ordering::Less => {
3007 from_regex = true;
3008 outdent_from_prev_row = true
3009 }
3010 Ordering::Greater => {
3011 indent_from_prev_row = true;
3012 from_regex = true
3013 }
3014 _ => {}
3015 },
3016
3017 Ordering::Greater => break,
3018 Ordering::Less => {}
3019 }
3020
3021 indent_changes.next();
3022 }
3023
3024 for range in &indent_ranges {
3025 if range.start.row >= row {
3026 break;
3027 }
3028 if range.start.row == prev_row && range.end > row_start {
3029 indent_from_prev_row = true;
3030 }
3031 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3032 {
3033 indent_from_prev_row = true;
3034 }
3035 if !significant_indentation || !self.is_line_blank(row) {
3036 if range.end > prev_row_start && range.end <= row_start {
3037 outdent_to_row = outdent_to_row.min(range.start.row);
3038 }
3039 }
3040 }
3041
3042 let within_error = error_ranges
3043 .iter()
3044 .any(|e| e.start.row < row && e.end > row_start);
3045
3046 let suggestion = if outdent_to_row == prev_row
3047 || (outdent_from_prev_row && indent_from_prev_row)
3048 {
3049 Some(IndentSuggestion {
3050 basis_row: prev_row,
3051 delta: Ordering::Equal,
3052 within_error: within_error && !from_regex,
3053 })
3054 } else if indent_from_prev_row {
3055 Some(IndentSuggestion {
3056 basis_row: prev_row,
3057 delta: Ordering::Greater,
3058 within_error: within_error && !from_regex,
3059 })
3060 } else if outdent_to_row < prev_row {
3061 Some(IndentSuggestion {
3062 basis_row: outdent_to_row,
3063 delta: Ordering::Equal,
3064 within_error: within_error && !from_regex,
3065 })
3066 } else if outdent_from_prev_row {
3067 Some(IndentSuggestion {
3068 basis_row: prev_row,
3069 delta: Ordering::Less,
3070 within_error: within_error && !from_regex,
3071 })
3072 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3073 {
3074 Some(IndentSuggestion {
3075 basis_row: prev_row,
3076 delta: Ordering::Equal,
3077 within_error: within_error && !from_regex,
3078 })
3079 } else {
3080 None
3081 };
3082
3083 prev_row = row;
3084 prev_row_start = row_start;
3085 suggestion
3086 }))
3087 }
3088
3089 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3090 while row > 0 {
3091 row -= 1;
3092 if !self.is_line_blank(row) {
3093 return Some(row);
3094 }
3095 }
3096 None
3097 }
3098
3099 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3100 let captures = self.syntax.captures(range, &self.text, |grammar| {
3101 grammar.highlights_query.as_ref()
3102 });
3103 let highlight_maps = captures
3104 .grammars()
3105 .iter()
3106 .map(|grammar| grammar.highlight_map())
3107 .collect();
3108 (captures, highlight_maps)
3109 }
3110
3111 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3112 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3113 /// returned in chunks where each chunk has a single syntax highlighting style and
3114 /// diagnostic status.
3115 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3116 let range = range.start.to_offset(self)..range.end.to_offset(self);
3117
3118 let mut syntax = None;
3119 if language_aware {
3120 syntax = Some(self.get_highlights(range.clone()));
3121 }
3122 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3123 let diagnostics = language_aware;
3124 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3125 }
3126
3127 pub fn highlighted_text_for_range<T: ToOffset>(
3128 &self,
3129 range: Range<T>,
3130 override_style: Option<HighlightStyle>,
3131 syntax_theme: &SyntaxTheme,
3132 ) -> HighlightedText {
3133 HighlightedText::from_buffer_range(
3134 range,
3135 &self.text,
3136 &self.syntax,
3137 override_style,
3138 syntax_theme,
3139 )
3140 }
3141
3142 /// Invokes the given callback for each line of text in the given range of the buffer.
3143 /// Uses callback to avoid allocating a string for each line.
3144 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3145 let mut line = String::new();
3146 let mut row = range.start.row;
3147 for chunk in self
3148 .as_rope()
3149 .chunks_in_range(range.to_offset(self))
3150 .chain(["\n"])
3151 {
3152 for (newline_ix, text) in chunk.split('\n').enumerate() {
3153 if newline_ix > 0 {
3154 callback(row, &line);
3155 row += 1;
3156 line.clear();
3157 }
3158 line.push_str(text);
3159 }
3160 }
3161 }
3162
3163 /// Iterates over every [`SyntaxLayer`] in the buffer.
3164 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3165 self.syntax
3166 .layers_for_range(0..self.len(), &self.text, true)
3167 }
3168
3169 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3170 let offset = position.to_offset(self);
3171 self.syntax
3172 .layers_for_range(offset..offset, &self.text, false)
3173 .filter(|l| l.node().end_byte() > offset)
3174 .last()
3175 }
3176
3177 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3178 &self,
3179 range: Range<D>,
3180 ) -> Option<SyntaxLayer> {
3181 let range = range.to_offset(self);
3182 return self
3183 .syntax
3184 .layers_for_range(range, &self.text, false)
3185 .max_by(|a, b| {
3186 if a.depth != b.depth {
3187 a.depth.cmp(&b.depth)
3188 } else if a.offset.0 != b.offset.0 {
3189 a.offset.0.cmp(&b.offset.0)
3190 } else {
3191 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3192 }
3193 });
3194 }
3195
3196 /// Returns the main [`Language`].
3197 pub fn language(&self) -> Option<&Arc<Language>> {
3198 self.language.as_ref()
3199 }
3200
3201 /// Returns the [`Language`] at the given location.
3202 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3203 self.syntax_layer_at(position)
3204 .map(|info| info.language)
3205 .or(self.language.as_ref())
3206 }
3207
3208 /// Returns the settings for the language at the given location.
3209 pub fn settings_at<'a, D: ToOffset>(
3210 &'a self,
3211 position: D,
3212 cx: &'a App,
3213 ) -> Cow<'a, LanguageSettings> {
3214 language_settings(
3215 self.language_at(position).map(|l| l.name()),
3216 self.file.as_ref(),
3217 cx,
3218 )
3219 }
3220
3221 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3222 CharClassifier::new(self.language_scope_at(point))
3223 }
3224
3225 /// Returns the [`LanguageScope`] at the given location.
3226 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3227 let offset = position.to_offset(self);
3228 let mut scope = None;
3229 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3230
3231 // Use the layer that has the smallest node intersecting the given point.
3232 for layer in self
3233 .syntax
3234 .layers_for_range(offset..offset, &self.text, false)
3235 {
3236 let mut cursor = layer.node().walk();
3237
3238 let mut range = None;
3239 loop {
3240 let child_range = cursor.node().byte_range();
3241 if !child_range.contains(&offset) {
3242 break;
3243 }
3244
3245 range = Some(child_range);
3246 if cursor.goto_first_child_for_byte(offset).is_none() {
3247 break;
3248 }
3249 }
3250
3251 if let Some(range) = range {
3252 if smallest_range_and_depth.as_ref().map_or(
3253 true,
3254 |(smallest_range, smallest_range_depth)| {
3255 if layer.depth > *smallest_range_depth {
3256 true
3257 } else if layer.depth == *smallest_range_depth {
3258 range.len() < smallest_range.len()
3259 } else {
3260 false
3261 }
3262 },
3263 ) {
3264 smallest_range_and_depth = Some((range, layer.depth));
3265 scope = Some(LanguageScope {
3266 language: layer.language.clone(),
3267 override_id: layer.override_id(offset, &self.text),
3268 });
3269 }
3270 }
3271 }
3272
3273 scope.or_else(|| {
3274 self.language.clone().map(|language| LanguageScope {
3275 language,
3276 override_id: None,
3277 })
3278 })
3279 }
3280
3281 /// Returns a tuple of the range and character kind of the word
3282 /// surrounding the given position.
3283 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3284 let mut start = start.to_offset(self);
3285 let mut end = start;
3286 let mut next_chars = self.chars_at(start).peekable();
3287 let mut prev_chars = self.reversed_chars_at(start).peekable();
3288
3289 let classifier = self.char_classifier_at(start);
3290 let word_kind = cmp::max(
3291 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3292 next_chars.peek().copied().map(|c| classifier.kind(c)),
3293 );
3294
3295 for ch in prev_chars {
3296 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3297 start -= ch.len_utf8();
3298 } else {
3299 break;
3300 }
3301 }
3302
3303 for ch in next_chars {
3304 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3305 end += ch.len_utf8();
3306 } else {
3307 break;
3308 }
3309 }
3310
3311 (start..end, word_kind)
3312 }
3313
3314 /// Returns the closest syntax node enclosing the given range.
3315 pub fn syntax_ancestor<'a, T: ToOffset>(
3316 &'a self,
3317 range: Range<T>,
3318 ) -> Option<tree_sitter::Node<'a>> {
3319 let range = range.start.to_offset(self)..range.end.to_offset(self);
3320 let mut result: Option<tree_sitter::Node<'a>> = None;
3321 'outer: for layer in self
3322 .syntax
3323 .layers_for_range(range.clone(), &self.text, true)
3324 {
3325 let mut cursor = layer.node().walk();
3326
3327 // Descend to the first leaf that touches the start of the range.
3328 //
3329 // If the range is non-empty and the current node ends exactly at the start,
3330 // move to the next sibling to find a node that extends beyond the start.
3331 //
3332 // If the range is empty and the current node starts after the range position,
3333 // move to the previous sibling to find the node that contains the position.
3334 while cursor.goto_first_child_for_byte(range.start).is_some() {
3335 if !range.is_empty() && cursor.node().end_byte() == range.start {
3336 cursor.goto_next_sibling();
3337 }
3338 if range.is_empty() && cursor.node().start_byte() > range.start {
3339 cursor.goto_previous_sibling();
3340 }
3341 }
3342
3343 // Ascend to the smallest ancestor that strictly contains the range.
3344 loop {
3345 let node_range = cursor.node().byte_range();
3346 if node_range.start <= range.start
3347 && node_range.end >= range.end
3348 && node_range.len() > range.len()
3349 {
3350 break;
3351 }
3352 if !cursor.goto_parent() {
3353 continue 'outer;
3354 }
3355 }
3356
3357 let left_node = cursor.node();
3358 let mut layer_result = left_node;
3359
3360 // For an empty range, try to find another node immediately to the right of the range.
3361 if left_node.end_byte() == range.start {
3362 let mut right_node = None;
3363 while !cursor.goto_next_sibling() {
3364 if !cursor.goto_parent() {
3365 break;
3366 }
3367 }
3368
3369 while cursor.node().start_byte() == range.start {
3370 right_node = Some(cursor.node());
3371 if !cursor.goto_first_child() {
3372 break;
3373 }
3374 }
3375
3376 // If there is a candidate node on both sides of the (empty) range, then
3377 // decide between the two by favoring a named node over an anonymous token.
3378 // If both nodes are the same in that regard, favor the right one.
3379 if let Some(right_node) = right_node {
3380 if right_node.is_named() || !left_node.is_named() {
3381 layer_result = right_node;
3382 }
3383 }
3384 }
3385
3386 if let Some(previous_result) = &result {
3387 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3388 continue;
3389 }
3390 }
3391 result = Some(layer_result);
3392 }
3393
3394 result
3395 }
3396
3397 /// Returns the root syntax node within the given row
3398 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3399 let start_offset = position.to_offset(self);
3400
3401 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3402
3403 let layer = self
3404 .syntax
3405 .layers_for_range(start_offset..start_offset, &self.text, true)
3406 .next()?;
3407
3408 let mut cursor = layer.node().walk();
3409
3410 // Descend to the first leaf that touches the start of the range.
3411 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3412 if cursor.node().end_byte() == start_offset {
3413 cursor.goto_next_sibling();
3414 }
3415 }
3416
3417 // Ascend to the root node within the same row.
3418 while cursor.goto_parent() {
3419 if cursor.node().start_position().row != row {
3420 break;
3421 }
3422 }
3423
3424 return Some(cursor.node());
3425 }
3426
3427 /// Returns the outline for the buffer.
3428 ///
3429 /// This method allows passing an optional [`SyntaxTheme`] to
3430 /// syntax-highlight the returned symbols.
3431 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3432 self.outline_items_containing(0..self.len(), true, theme)
3433 .map(Outline::new)
3434 }
3435
3436 /// Returns all the symbols that contain the given position.
3437 ///
3438 /// This method allows passing an optional [`SyntaxTheme`] to
3439 /// syntax-highlight the returned symbols.
3440 pub fn symbols_containing<T: ToOffset>(
3441 &self,
3442 position: T,
3443 theme: Option<&SyntaxTheme>,
3444 ) -> Option<Vec<OutlineItem<Anchor>>> {
3445 let position = position.to_offset(self);
3446 let mut items = self.outline_items_containing(
3447 position.saturating_sub(1)..self.len().min(position + 1),
3448 false,
3449 theme,
3450 )?;
3451 let mut prev_depth = None;
3452 items.retain(|item| {
3453 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3454 prev_depth = Some(item.depth);
3455 result
3456 });
3457 Some(items)
3458 }
3459
3460 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3461 let range = range.to_offset(self);
3462 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3463 grammar.outline_config.as_ref().map(|c| &c.query)
3464 });
3465 let configs = matches
3466 .grammars()
3467 .iter()
3468 .map(|g| g.outline_config.as_ref().unwrap())
3469 .collect::<Vec<_>>();
3470
3471 while let Some(mat) = matches.peek() {
3472 let config = &configs[mat.grammar_index];
3473 let containing_item_node = maybe!({
3474 let item_node = mat.captures.iter().find_map(|cap| {
3475 if cap.index == config.item_capture_ix {
3476 Some(cap.node)
3477 } else {
3478 None
3479 }
3480 })?;
3481
3482 let item_byte_range = item_node.byte_range();
3483 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3484 None
3485 } else {
3486 Some(item_node)
3487 }
3488 });
3489
3490 if let Some(item_node) = containing_item_node {
3491 return Some(
3492 Point::from_ts_point(item_node.start_position())
3493 ..Point::from_ts_point(item_node.end_position()),
3494 );
3495 }
3496
3497 matches.advance();
3498 }
3499 None
3500 }
3501
3502 pub fn outline_items_containing<T: ToOffset>(
3503 &self,
3504 range: Range<T>,
3505 include_extra_context: bool,
3506 theme: Option<&SyntaxTheme>,
3507 ) -> Option<Vec<OutlineItem<Anchor>>> {
3508 let range = range.to_offset(self);
3509 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3510 grammar.outline_config.as_ref().map(|c| &c.query)
3511 });
3512 let configs = matches
3513 .grammars()
3514 .iter()
3515 .map(|g| g.outline_config.as_ref().unwrap())
3516 .collect::<Vec<_>>();
3517
3518 let mut items = Vec::new();
3519 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3520 while let Some(mat) = matches.peek() {
3521 let config = &configs[mat.grammar_index];
3522 if let Some(item) =
3523 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3524 {
3525 items.push(item);
3526 } else if let Some(capture) = mat
3527 .captures
3528 .iter()
3529 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3530 {
3531 let capture_range = capture.node.start_position()..capture.node.end_position();
3532 let mut capture_row_range =
3533 capture_range.start.row as u32..capture_range.end.row as u32;
3534 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3535 {
3536 capture_row_range.end -= 1;
3537 }
3538 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3539 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3540 last_row_range.end = capture_row_range.end;
3541 } else {
3542 annotation_row_ranges.push(capture_row_range);
3543 }
3544 } else {
3545 annotation_row_ranges.push(capture_row_range);
3546 }
3547 }
3548 matches.advance();
3549 }
3550
3551 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3552
3553 // Assign depths based on containment relationships and convert to anchors.
3554 let mut item_ends_stack = Vec::<Point>::new();
3555 let mut anchor_items = Vec::new();
3556 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3557 for item in items {
3558 while let Some(last_end) = item_ends_stack.last().copied() {
3559 if last_end < item.range.end {
3560 item_ends_stack.pop();
3561 } else {
3562 break;
3563 }
3564 }
3565
3566 let mut annotation_row_range = None;
3567 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3568 let row_preceding_item = item.range.start.row.saturating_sub(1);
3569 if next_annotation_row_range.end < row_preceding_item {
3570 annotation_row_ranges.next();
3571 } else {
3572 if next_annotation_row_range.end == row_preceding_item {
3573 annotation_row_range = Some(next_annotation_row_range.clone());
3574 annotation_row_ranges.next();
3575 }
3576 break;
3577 }
3578 }
3579
3580 anchor_items.push(OutlineItem {
3581 depth: item_ends_stack.len(),
3582 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3583 text: item.text,
3584 highlight_ranges: item.highlight_ranges,
3585 name_ranges: item.name_ranges,
3586 body_range: item.body_range.map(|body_range| {
3587 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3588 }),
3589 annotation_range: annotation_row_range.map(|annotation_range| {
3590 self.anchor_after(Point::new(annotation_range.start, 0))
3591 ..self.anchor_before(Point::new(
3592 annotation_range.end,
3593 self.line_len(annotation_range.end),
3594 ))
3595 }),
3596 });
3597 item_ends_stack.push(item.range.end);
3598 }
3599
3600 Some(anchor_items)
3601 }
3602
3603 fn next_outline_item(
3604 &self,
3605 config: &OutlineConfig,
3606 mat: &SyntaxMapMatch,
3607 range: &Range<usize>,
3608 include_extra_context: bool,
3609 theme: Option<&SyntaxTheme>,
3610 ) -> Option<OutlineItem<Point>> {
3611 let item_node = mat.captures.iter().find_map(|cap| {
3612 if cap.index == config.item_capture_ix {
3613 Some(cap.node)
3614 } else {
3615 None
3616 }
3617 })?;
3618
3619 let item_byte_range = item_node.byte_range();
3620 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3621 return None;
3622 }
3623 let item_point_range = Point::from_ts_point(item_node.start_position())
3624 ..Point::from_ts_point(item_node.end_position());
3625
3626 let mut open_point = None;
3627 let mut close_point = None;
3628 let mut buffer_ranges = Vec::new();
3629 for capture in mat.captures {
3630 let node_is_name;
3631 if capture.index == config.name_capture_ix {
3632 node_is_name = true;
3633 } else if Some(capture.index) == config.context_capture_ix
3634 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3635 {
3636 node_is_name = false;
3637 } else {
3638 if Some(capture.index) == config.open_capture_ix {
3639 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3640 } else if Some(capture.index) == config.close_capture_ix {
3641 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3642 }
3643
3644 continue;
3645 }
3646
3647 let mut range = capture.node.start_byte()..capture.node.end_byte();
3648 let start = capture.node.start_position();
3649 if capture.node.end_position().row > start.row {
3650 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3651 }
3652
3653 if !range.is_empty() {
3654 buffer_ranges.push((range, node_is_name));
3655 }
3656 }
3657 if buffer_ranges.is_empty() {
3658 return None;
3659 }
3660 let mut text = String::new();
3661 let mut highlight_ranges = Vec::new();
3662 let mut name_ranges = Vec::new();
3663 let mut chunks = self.chunks(
3664 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3665 true,
3666 );
3667 let mut last_buffer_range_end = 0;
3668
3669 for (buffer_range, is_name) in buffer_ranges {
3670 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3671 if space_added {
3672 text.push(' ');
3673 }
3674 let before_append_len = text.len();
3675 let mut offset = buffer_range.start;
3676 chunks.seek(buffer_range.clone());
3677 for mut chunk in chunks.by_ref() {
3678 if chunk.text.len() > buffer_range.end - offset {
3679 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3680 offset = buffer_range.end;
3681 } else {
3682 offset += chunk.text.len();
3683 }
3684 let style = chunk
3685 .syntax_highlight_id
3686 .zip(theme)
3687 .and_then(|(highlight, theme)| highlight.style(theme));
3688 if let Some(style) = style {
3689 let start = text.len();
3690 let end = start + chunk.text.len();
3691 highlight_ranges.push((start..end, style));
3692 }
3693 text.push_str(chunk.text);
3694 if offset >= buffer_range.end {
3695 break;
3696 }
3697 }
3698 if is_name {
3699 let after_append_len = text.len();
3700 let start = if space_added && !name_ranges.is_empty() {
3701 before_append_len - 1
3702 } else {
3703 before_append_len
3704 };
3705 name_ranges.push(start..after_append_len);
3706 }
3707 last_buffer_range_end = buffer_range.end;
3708 }
3709
3710 Some(OutlineItem {
3711 depth: 0, // We'll calculate the depth later
3712 range: item_point_range,
3713 text,
3714 highlight_ranges,
3715 name_ranges,
3716 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3717 annotation_range: None,
3718 })
3719 }
3720
3721 pub fn function_body_fold_ranges<T: ToOffset>(
3722 &self,
3723 within: Range<T>,
3724 ) -> impl Iterator<Item = Range<usize>> + '_ {
3725 self.text_object_ranges(within, TreeSitterOptions::default())
3726 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3727 }
3728
3729 /// For each grammar in the language, runs the provided
3730 /// [`tree_sitter::Query`] against the given range.
3731 pub fn matches(
3732 &self,
3733 range: Range<usize>,
3734 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3735 ) -> SyntaxMapMatches {
3736 self.syntax.matches(range, self, query)
3737 }
3738
3739 pub fn all_bracket_ranges(
3740 &self,
3741 range: Range<usize>,
3742 ) -> impl Iterator<Item = BracketMatch> + '_ {
3743 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3744 grammar.brackets_config.as_ref().map(|c| &c.query)
3745 });
3746 let configs = matches
3747 .grammars()
3748 .iter()
3749 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3750 .collect::<Vec<_>>();
3751
3752 iter::from_fn(move || {
3753 while let Some(mat) = matches.peek() {
3754 let mut open = None;
3755 let mut close = None;
3756 let config = &configs[mat.grammar_index];
3757 let pattern = &config.patterns[mat.pattern_index];
3758 for capture in mat.captures {
3759 if capture.index == config.open_capture_ix {
3760 open = Some(capture.node.byte_range());
3761 } else if capture.index == config.close_capture_ix {
3762 close = Some(capture.node.byte_range());
3763 }
3764 }
3765
3766 matches.advance();
3767
3768 let Some((open_range, close_range)) = open.zip(close) else {
3769 continue;
3770 };
3771
3772 let bracket_range = open_range.start..=close_range.end;
3773 if !bracket_range.overlaps(&range) {
3774 continue;
3775 }
3776
3777 return Some(BracketMatch {
3778 open_range,
3779 close_range,
3780 newline_only: pattern.newline_only,
3781 });
3782 }
3783 None
3784 })
3785 }
3786
3787 /// Returns bracket range pairs overlapping or adjacent to `range`
3788 pub fn bracket_ranges<T: ToOffset>(
3789 &self,
3790 range: Range<T>,
3791 ) -> impl Iterator<Item = BracketMatch> + '_ {
3792 // Find bracket pairs that *inclusively* contain the given range.
3793 let range = range.start.to_offset(self).saturating_sub(1)
3794 ..self.len().min(range.end.to_offset(self) + 1);
3795 self.all_bracket_ranges(range)
3796 .filter(|pair| !pair.newline_only)
3797 }
3798
3799 pub fn text_object_ranges<T: ToOffset>(
3800 &self,
3801 range: Range<T>,
3802 options: TreeSitterOptions,
3803 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3804 let range = range.start.to_offset(self).saturating_sub(1)
3805 ..self.len().min(range.end.to_offset(self) + 1);
3806
3807 let mut matches =
3808 self.syntax
3809 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3810 grammar.text_object_config.as_ref().map(|c| &c.query)
3811 });
3812
3813 let configs = matches
3814 .grammars()
3815 .iter()
3816 .map(|grammar| grammar.text_object_config.as_ref())
3817 .collect::<Vec<_>>();
3818
3819 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3820
3821 iter::from_fn(move || {
3822 loop {
3823 while let Some(capture) = captures.pop() {
3824 if capture.0.overlaps(&range) {
3825 return Some(capture);
3826 }
3827 }
3828
3829 let mat = matches.peek()?;
3830
3831 let Some(config) = configs[mat.grammar_index].as_ref() else {
3832 matches.advance();
3833 continue;
3834 };
3835
3836 for capture in mat.captures {
3837 let Some(ix) = config
3838 .text_objects_by_capture_ix
3839 .binary_search_by_key(&capture.index, |e| e.0)
3840 .ok()
3841 else {
3842 continue;
3843 };
3844 let text_object = config.text_objects_by_capture_ix[ix].1;
3845 let byte_range = capture.node.byte_range();
3846
3847 let mut found = false;
3848 for (range, existing) in captures.iter_mut() {
3849 if existing == &text_object {
3850 range.start = range.start.min(byte_range.start);
3851 range.end = range.end.max(byte_range.end);
3852 found = true;
3853 break;
3854 }
3855 }
3856
3857 if !found {
3858 captures.push((byte_range, text_object));
3859 }
3860 }
3861
3862 matches.advance();
3863 }
3864 })
3865 }
3866
3867 /// Returns enclosing bracket ranges containing the given range
3868 pub fn enclosing_bracket_ranges<T: ToOffset>(
3869 &self,
3870 range: Range<T>,
3871 ) -> impl Iterator<Item = BracketMatch> + '_ {
3872 let range = range.start.to_offset(self)..range.end.to_offset(self);
3873
3874 self.bracket_ranges(range.clone()).filter(move |pair| {
3875 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3876 })
3877 }
3878
3879 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3880 ///
3881 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3882 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3883 &self,
3884 range: Range<T>,
3885 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3886 ) -> Option<(Range<usize>, Range<usize>)> {
3887 let range = range.start.to_offset(self)..range.end.to_offset(self);
3888
3889 // Get the ranges of the innermost pair of brackets.
3890 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3891
3892 for pair in self.enclosing_bracket_ranges(range.clone()) {
3893 if let Some(range_filter) = range_filter {
3894 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3895 continue;
3896 }
3897 }
3898
3899 let len = pair.close_range.end - pair.open_range.start;
3900
3901 if let Some((existing_open, existing_close)) = &result {
3902 let existing_len = existing_close.end - existing_open.start;
3903 if len > existing_len {
3904 continue;
3905 }
3906 }
3907
3908 result = Some((pair.open_range, pair.close_range));
3909 }
3910
3911 result
3912 }
3913
3914 /// Returns anchor ranges for any matches of the redaction query.
3915 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3916 /// will be run on the relevant section of the buffer.
3917 pub fn redacted_ranges<T: ToOffset>(
3918 &self,
3919 range: Range<T>,
3920 ) -> impl Iterator<Item = Range<usize>> + '_ {
3921 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3922 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3923 grammar
3924 .redactions_config
3925 .as_ref()
3926 .map(|config| &config.query)
3927 });
3928
3929 let configs = syntax_matches
3930 .grammars()
3931 .iter()
3932 .map(|grammar| grammar.redactions_config.as_ref())
3933 .collect::<Vec<_>>();
3934
3935 iter::from_fn(move || {
3936 let redacted_range = syntax_matches
3937 .peek()
3938 .and_then(|mat| {
3939 configs[mat.grammar_index].and_then(|config| {
3940 mat.captures
3941 .iter()
3942 .find(|capture| capture.index == config.redaction_capture_ix)
3943 })
3944 })
3945 .map(|mat| mat.node.byte_range());
3946 syntax_matches.advance();
3947 redacted_range
3948 })
3949 }
3950
3951 pub fn injections_intersecting_range<T: ToOffset>(
3952 &self,
3953 range: Range<T>,
3954 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3955 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3956
3957 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3958 grammar
3959 .injection_config
3960 .as_ref()
3961 .map(|config| &config.query)
3962 });
3963
3964 let configs = syntax_matches
3965 .grammars()
3966 .iter()
3967 .map(|grammar| grammar.injection_config.as_ref())
3968 .collect::<Vec<_>>();
3969
3970 iter::from_fn(move || {
3971 let ranges = syntax_matches.peek().and_then(|mat| {
3972 let config = &configs[mat.grammar_index]?;
3973 let content_capture_range = mat.captures.iter().find_map(|capture| {
3974 if capture.index == config.content_capture_ix {
3975 Some(capture.node.byte_range())
3976 } else {
3977 None
3978 }
3979 })?;
3980 let language = self.language_at(content_capture_range.start)?;
3981 Some((content_capture_range, language))
3982 });
3983 syntax_matches.advance();
3984 ranges
3985 })
3986 }
3987
3988 pub fn runnable_ranges(
3989 &self,
3990 offset_range: Range<usize>,
3991 ) -> impl Iterator<Item = RunnableRange> + '_ {
3992 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3993 grammar.runnable_config.as_ref().map(|config| &config.query)
3994 });
3995
3996 let test_configs = syntax_matches
3997 .grammars()
3998 .iter()
3999 .map(|grammar| grammar.runnable_config.as_ref())
4000 .collect::<Vec<_>>();
4001
4002 iter::from_fn(move || {
4003 loop {
4004 let mat = syntax_matches.peek()?;
4005
4006 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4007 let mut run_range = None;
4008 let full_range = mat.captures.iter().fold(
4009 Range {
4010 start: usize::MAX,
4011 end: 0,
4012 },
4013 |mut acc, next| {
4014 let byte_range = next.node.byte_range();
4015 if acc.start > byte_range.start {
4016 acc.start = byte_range.start;
4017 }
4018 if acc.end < byte_range.end {
4019 acc.end = byte_range.end;
4020 }
4021 acc
4022 },
4023 );
4024 if full_range.start > full_range.end {
4025 // We did not find a full spanning range of this match.
4026 return None;
4027 }
4028 let extra_captures: SmallVec<[_; 1]> =
4029 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4030 test_configs
4031 .extra_captures
4032 .get(capture.index as usize)
4033 .cloned()
4034 .and_then(|tag_name| match tag_name {
4035 RunnableCapture::Named(name) => {
4036 Some((capture.node.byte_range(), name))
4037 }
4038 RunnableCapture::Run => {
4039 let _ = run_range.insert(capture.node.byte_range());
4040 None
4041 }
4042 })
4043 }));
4044 let run_range = run_range?;
4045 let tags = test_configs
4046 .query
4047 .property_settings(mat.pattern_index)
4048 .iter()
4049 .filter_map(|property| {
4050 if *property.key == *"tag" {
4051 property
4052 .value
4053 .as_ref()
4054 .map(|value| RunnableTag(value.to_string().into()))
4055 } else {
4056 None
4057 }
4058 })
4059 .collect();
4060 let extra_captures = extra_captures
4061 .into_iter()
4062 .map(|(range, name)| {
4063 (
4064 name.to_string(),
4065 self.text_for_range(range.clone()).collect::<String>(),
4066 )
4067 })
4068 .collect();
4069 // All tags should have the same range.
4070 Some(RunnableRange {
4071 run_range,
4072 full_range,
4073 runnable: Runnable {
4074 tags,
4075 language: mat.language,
4076 buffer: self.remote_id(),
4077 },
4078 extra_captures,
4079 buffer_id: self.remote_id(),
4080 })
4081 });
4082
4083 syntax_matches.advance();
4084 if test_range.is_some() {
4085 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4086 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4087 return test_range;
4088 }
4089 }
4090 })
4091 }
4092
4093 /// Returns selections for remote peers intersecting the given range.
4094 #[allow(clippy::type_complexity)]
4095 pub fn selections_in_range(
4096 &self,
4097 range: Range<Anchor>,
4098 include_local: bool,
4099 ) -> impl Iterator<
4100 Item = (
4101 ReplicaId,
4102 bool,
4103 CursorShape,
4104 impl Iterator<Item = &Selection<Anchor>> + '_,
4105 ),
4106 > + '_ {
4107 self.remote_selections
4108 .iter()
4109 .filter(move |(replica_id, set)| {
4110 (include_local || **replica_id != self.text.replica_id())
4111 && !set.selections.is_empty()
4112 })
4113 .map(move |(replica_id, set)| {
4114 let start_ix = match set.selections.binary_search_by(|probe| {
4115 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4116 }) {
4117 Ok(ix) | Err(ix) => ix,
4118 };
4119 let end_ix = match set.selections.binary_search_by(|probe| {
4120 probe.start.cmp(&range.end, self).then(Ordering::Less)
4121 }) {
4122 Ok(ix) | Err(ix) => ix,
4123 };
4124
4125 (
4126 *replica_id,
4127 set.line_mode,
4128 set.cursor_shape,
4129 set.selections[start_ix..end_ix].iter(),
4130 )
4131 })
4132 }
4133
4134 /// Returns if the buffer contains any diagnostics.
4135 pub fn has_diagnostics(&self) -> bool {
4136 !self.diagnostics.is_empty()
4137 }
4138
4139 /// Returns all the diagnostics intersecting the given range.
4140 pub fn diagnostics_in_range<'a, T, O>(
4141 &'a self,
4142 search_range: Range<T>,
4143 reversed: bool,
4144 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4145 where
4146 T: 'a + Clone + ToOffset,
4147 O: 'a + FromAnchor,
4148 {
4149 let mut iterators: Vec<_> = self
4150 .diagnostics
4151 .iter()
4152 .map(|(_, collection)| {
4153 collection
4154 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4155 .peekable()
4156 })
4157 .collect();
4158
4159 std::iter::from_fn(move || {
4160 let (next_ix, _) = iterators
4161 .iter_mut()
4162 .enumerate()
4163 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4164 .min_by(|(_, a), (_, b)| {
4165 let cmp = a
4166 .range
4167 .start
4168 .cmp(&b.range.start, self)
4169 // when range is equal, sort by diagnostic severity
4170 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4171 // and stabilize order with group_id
4172 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4173 if reversed { cmp.reverse() } else { cmp }
4174 })?;
4175 iterators[next_ix]
4176 .next()
4177 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4178 diagnostic,
4179 range: FromAnchor::from_anchor(&range.start, self)
4180 ..FromAnchor::from_anchor(&range.end, self),
4181 })
4182 })
4183 }
4184
4185 /// Returns all the diagnostic groups associated with the given
4186 /// language server ID. If no language server ID is provided,
4187 /// all diagnostics groups are returned.
4188 pub fn diagnostic_groups(
4189 &self,
4190 language_server_id: Option<LanguageServerId>,
4191 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4192 let mut groups = Vec::new();
4193
4194 if let Some(language_server_id) = language_server_id {
4195 if let Ok(ix) = self
4196 .diagnostics
4197 .binary_search_by_key(&language_server_id, |e| e.0)
4198 {
4199 self.diagnostics[ix]
4200 .1
4201 .groups(language_server_id, &mut groups, self);
4202 }
4203 } else {
4204 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4205 diagnostics.groups(*language_server_id, &mut groups, self);
4206 }
4207 }
4208
4209 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4210 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4211 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4212 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4213 });
4214
4215 groups
4216 }
4217
4218 /// Returns an iterator over the diagnostics for the given group.
4219 pub fn diagnostic_group<O>(
4220 &self,
4221 group_id: usize,
4222 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4223 where
4224 O: FromAnchor + 'static,
4225 {
4226 self.diagnostics
4227 .iter()
4228 .flat_map(move |(_, set)| set.group(group_id, self))
4229 }
4230
4231 /// An integer version number that accounts for all updates besides
4232 /// the buffer's text itself (which is versioned via a version vector).
4233 pub fn non_text_state_update_count(&self) -> usize {
4234 self.non_text_state_update_count
4235 }
4236
4237 /// Returns a snapshot of underlying file.
4238 pub fn file(&self) -> Option<&Arc<dyn File>> {
4239 self.file.as_ref()
4240 }
4241
4242 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4243 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4244 if let Some(file) = self.file() {
4245 if file.path().file_name().is_none() || include_root {
4246 Some(file.full_path(cx))
4247 } else {
4248 Some(file.path().to_path_buf())
4249 }
4250 } else {
4251 None
4252 }
4253 }
4254
4255 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4256 let query_str = query.fuzzy_contents;
4257 if query_str.map_or(false, |query| query.is_empty()) {
4258 return BTreeMap::default();
4259 }
4260
4261 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4262 language,
4263 override_id: None,
4264 }));
4265
4266 let mut query_ix = 0;
4267 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4268 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4269
4270 let mut words = BTreeMap::default();
4271 let mut current_word_start_ix = None;
4272 let mut chunk_ix = query.range.start;
4273 for chunk in self.chunks(query.range, false) {
4274 for (i, c) in chunk.text.char_indices() {
4275 let ix = chunk_ix + i;
4276 if classifier.is_word(c) {
4277 if current_word_start_ix.is_none() {
4278 current_word_start_ix = Some(ix);
4279 }
4280
4281 if let Some(query_chars) = &query_chars {
4282 if query_ix < query_len {
4283 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4284 query_ix += 1;
4285 }
4286 }
4287 }
4288 continue;
4289 } else if let Some(word_start) = current_word_start_ix.take() {
4290 if query_ix == query_len {
4291 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4292 let mut word_text = self.text_for_range(word_start..ix).peekable();
4293 let first_char = word_text
4294 .peek()
4295 .and_then(|first_chunk| first_chunk.chars().next());
4296 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4297 if !query.skip_digits
4298 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4299 {
4300 words.insert(word_text.collect(), word_range);
4301 }
4302 }
4303 }
4304 query_ix = 0;
4305 }
4306 chunk_ix += chunk.text.len();
4307 }
4308
4309 words
4310 }
4311}
4312
4313pub struct WordsQuery<'a> {
4314 /// Only returns words with all chars from the fuzzy string in them.
4315 pub fuzzy_contents: Option<&'a str>,
4316 /// Skips words that start with a digit.
4317 pub skip_digits: bool,
4318 /// Buffer offset range, to look for words.
4319 pub range: Range<usize>,
4320}
4321
4322fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4323 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4324}
4325
4326fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4327 let mut result = IndentSize::spaces(0);
4328 for c in text {
4329 let kind = match c {
4330 ' ' => IndentKind::Space,
4331 '\t' => IndentKind::Tab,
4332 _ => break,
4333 };
4334 if result.len == 0 {
4335 result.kind = kind;
4336 }
4337 result.len += 1;
4338 }
4339 result
4340}
4341
4342impl Clone for BufferSnapshot {
4343 fn clone(&self) -> Self {
4344 Self {
4345 text: self.text.clone(),
4346 syntax: self.syntax.clone(),
4347 file: self.file.clone(),
4348 remote_selections: self.remote_selections.clone(),
4349 diagnostics: self.diagnostics.clone(),
4350 language: self.language.clone(),
4351 non_text_state_update_count: self.non_text_state_update_count,
4352 }
4353 }
4354}
4355
4356impl Deref for BufferSnapshot {
4357 type Target = text::BufferSnapshot;
4358
4359 fn deref(&self) -> &Self::Target {
4360 &self.text
4361 }
4362}
4363
4364unsafe impl Send for BufferChunks<'_> {}
4365
4366impl<'a> BufferChunks<'a> {
4367 pub(crate) fn new(
4368 text: &'a Rope,
4369 range: Range<usize>,
4370 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4371 diagnostics: bool,
4372 buffer_snapshot: Option<&'a BufferSnapshot>,
4373 ) -> Self {
4374 let mut highlights = None;
4375 if let Some((captures, highlight_maps)) = syntax {
4376 highlights = Some(BufferChunkHighlights {
4377 captures,
4378 next_capture: None,
4379 stack: Default::default(),
4380 highlight_maps,
4381 })
4382 }
4383
4384 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4385 let chunks = text.chunks_in_range(range.clone());
4386
4387 let mut this = BufferChunks {
4388 range,
4389 buffer_snapshot,
4390 chunks,
4391 diagnostic_endpoints,
4392 error_depth: 0,
4393 warning_depth: 0,
4394 information_depth: 0,
4395 hint_depth: 0,
4396 unnecessary_depth: 0,
4397 underline: true,
4398 highlights,
4399 };
4400 this.initialize_diagnostic_endpoints();
4401 this
4402 }
4403
4404 /// Seeks to the given byte offset in the buffer.
4405 pub fn seek(&mut self, range: Range<usize>) {
4406 let old_range = std::mem::replace(&mut self.range, range.clone());
4407 self.chunks.set_range(self.range.clone());
4408 if let Some(highlights) = self.highlights.as_mut() {
4409 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4410 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4411 highlights
4412 .stack
4413 .retain(|(end_offset, _)| *end_offset > range.start);
4414 if let Some(capture) = &highlights.next_capture {
4415 if range.start >= capture.node.start_byte() {
4416 let next_capture_end = capture.node.end_byte();
4417 if range.start < next_capture_end {
4418 highlights.stack.push((
4419 next_capture_end,
4420 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4421 ));
4422 }
4423 highlights.next_capture.take();
4424 }
4425 }
4426 } else if let Some(snapshot) = self.buffer_snapshot {
4427 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4428 *highlights = BufferChunkHighlights {
4429 captures,
4430 next_capture: None,
4431 stack: Default::default(),
4432 highlight_maps,
4433 };
4434 } else {
4435 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4436 // Seeking such BufferChunks is not supported.
4437 debug_assert!(
4438 false,
4439 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4440 );
4441 }
4442
4443 highlights.captures.set_byte_range(self.range.clone());
4444 self.initialize_diagnostic_endpoints();
4445 }
4446 }
4447
4448 fn initialize_diagnostic_endpoints(&mut self) {
4449 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4450 if let Some(buffer) = self.buffer_snapshot {
4451 let mut diagnostic_endpoints = Vec::new();
4452 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4453 diagnostic_endpoints.push(DiagnosticEndpoint {
4454 offset: entry.range.start,
4455 is_start: true,
4456 severity: entry.diagnostic.severity,
4457 is_unnecessary: entry.diagnostic.is_unnecessary,
4458 underline: entry.diagnostic.underline,
4459 });
4460 diagnostic_endpoints.push(DiagnosticEndpoint {
4461 offset: entry.range.end,
4462 is_start: false,
4463 severity: entry.diagnostic.severity,
4464 is_unnecessary: entry.diagnostic.is_unnecessary,
4465 underline: entry.diagnostic.underline,
4466 });
4467 }
4468 diagnostic_endpoints
4469 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4470 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4471 self.hint_depth = 0;
4472 self.error_depth = 0;
4473 self.warning_depth = 0;
4474 self.information_depth = 0;
4475 }
4476 }
4477 }
4478
4479 /// The current byte offset in the buffer.
4480 pub fn offset(&self) -> usize {
4481 self.range.start
4482 }
4483
4484 pub fn range(&self) -> Range<usize> {
4485 self.range.clone()
4486 }
4487
4488 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4489 let depth = match endpoint.severity {
4490 DiagnosticSeverity::ERROR => &mut self.error_depth,
4491 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4492 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4493 DiagnosticSeverity::HINT => &mut self.hint_depth,
4494 _ => return,
4495 };
4496 if endpoint.is_start {
4497 *depth += 1;
4498 } else {
4499 *depth -= 1;
4500 }
4501
4502 if endpoint.is_unnecessary {
4503 if endpoint.is_start {
4504 self.unnecessary_depth += 1;
4505 } else {
4506 self.unnecessary_depth -= 1;
4507 }
4508 }
4509 }
4510
4511 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4512 if self.error_depth > 0 {
4513 Some(DiagnosticSeverity::ERROR)
4514 } else if self.warning_depth > 0 {
4515 Some(DiagnosticSeverity::WARNING)
4516 } else if self.information_depth > 0 {
4517 Some(DiagnosticSeverity::INFORMATION)
4518 } else if self.hint_depth > 0 {
4519 Some(DiagnosticSeverity::HINT)
4520 } else {
4521 None
4522 }
4523 }
4524
4525 fn current_code_is_unnecessary(&self) -> bool {
4526 self.unnecessary_depth > 0
4527 }
4528}
4529
4530impl<'a> Iterator for BufferChunks<'a> {
4531 type Item = Chunk<'a>;
4532
4533 fn next(&mut self) -> Option<Self::Item> {
4534 let mut next_capture_start = usize::MAX;
4535 let mut next_diagnostic_endpoint = usize::MAX;
4536
4537 if let Some(highlights) = self.highlights.as_mut() {
4538 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4539 if *parent_capture_end <= self.range.start {
4540 highlights.stack.pop();
4541 } else {
4542 break;
4543 }
4544 }
4545
4546 if highlights.next_capture.is_none() {
4547 highlights.next_capture = highlights.captures.next();
4548 }
4549
4550 while let Some(capture) = highlights.next_capture.as_ref() {
4551 if self.range.start < capture.node.start_byte() {
4552 next_capture_start = capture.node.start_byte();
4553 break;
4554 } else {
4555 let highlight_id =
4556 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4557 highlights
4558 .stack
4559 .push((capture.node.end_byte(), highlight_id));
4560 highlights.next_capture = highlights.captures.next();
4561 }
4562 }
4563 }
4564
4565 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4566 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4567 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4568 if endpoint.offset <= self.range.start {
4569 self.update_diagnostic_depths(endpoint);
4570 diagnostic_endpoints.next();
4571 self.underline = endpoint.underline;
4572 } else {
4573 next_diagnostic_endpoint = endpoint.offset;
4574 break;
4575 }
4576 }
4577 }
4578 self.diagnostic_endpoints = diagnostic_endpoints;
4579
4580 if let Some(chunk) = self.chunks.peek() {
4581 let chunk_start = self.range.start;
4582 let mut chunk_end = (self.chunks.offset() + chunk.len())
4583 .min(next_capture_start)
4584 .min(next_diagnostic_endpoint);
4585 let mut highlight_id = None;
4586 if let Some(highlights) = self.highlights.as_ref() {
4587 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4588 chunk_end = chunk_end.min(*parent_capture_end);
4589 highlight_id = Some(*parent_highlight_id);
4590 }
4591 }
4592
4593 let slice =
4594 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4595 self.range.start = chunk_end;
4596 if self.range.start == self.chunks.offset() + chunk.len() {
4597 self.chunks.next().unwrap();
4598 }
4599
4600 Some(Chunk {
4601 text: slice,
4602 syntax_highlight_id: highlight_id,
4603 underline: self.underline,
4604 diagnostic_severity: self.current_diagnostic_severity(),
4605 is_unnecessary: self.current_code_is_unnecessary(),
4606 ..Chunk::default()
4607 })
4608 } else {
4609 None
4610 }
4611 }
4612}
4613
4614impl operation_queue::Operation for Operation {
4615 fn lamport_timestamp(&self) -> clock::Lamport {
4616 match self {
4617 Operation::Buffer(_) => {
4618 unreachable!("buffer operations should never be deferred at this layer")
4619 }
4620 Operation::UpdateDiagnostics {
4621 lamport_timestamp, ..
4622 }
4623 | Operation::UpdateSelections {
4624 lamport_timestamp, ..
4625 }
4626 | Operation::UpdateCompletionTriggers {
4627 lamport_timestamp, ..
4628 } => *lamport_timestamp,
4629 }
4630 }
4631}
4632
4633impl Default for Diagnostic {
4634 fn default() -> Self {
4635 Self {
4636 source: Default::default(),
4637 code: None,
4638 code_description: None,
4639 severity: DiagnosticSeverity::ERROR,
4640 message: Default::default(),
4641 markdown: None,
4642 group_id: 0,
4643 is_primary: false,
4644 is_disk_based: false,
4645 is_unnecessary: false,
4646 underline: true,
4647 data: None,
4648 }
4649 }
4650}
4651
4652impl IndentSize {
4653 /// Returns an [`IndentSize`] representing the given spaces.
4654 pub fn spaces(len: u32) -> Self {
4655 Self {
4656 len,
4657 kind: IndentKind::Space,
4658 }
4659 }
4660
4661 /// Returns an [`IndentSize`] representing a tab.
4662 pub fn tab() -> Self {
4663 Self {
4664 len: 1,
4665 kind: IndentKind::Tab,
4666 }
4667 }
4668
4669 /// An iterator over the characters represented by this [`IndentSize`].
4670 pub fn chars(&self) -> impl Iterator<Item = char> {
4671 iter::repeat(self.char()).take(self.len as usize)
4672 }
4673
4674 /// The character representation of this [`IndentSize`].
4675 pub fn char(&self) -> char {
4676 match self.kind {
4677 IndentKind::Space => ' ',
4678 IndentKind::Tab => '\t',
4679 }
4680 }
4681
4682 /// Consumes the current [`IndentSize`] and returns a new one that has
4683 /// been shrunk or enlarged by the given size along the given direction.
4684 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4685 match direction {
4686 Ordering::Less => {
4687 if self.kind == size.kind && self.len >= size.len {
4688 self.len -= size.len;
4689 }
4690 }
4691 Ordering::Equal => {}
4692 Ordering::Greater => {
4693 if self.len == 0 {
4694 self = size;
4695 } else if self.kind == size.kind {
4696 self.len += size.len;
4697 }
4698 }
4699 }
4700 self
4701 }
4702
4703 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4704 match self.kind {
4705 IndentKind::Space => self.len as usize,
4706 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4707 }
4708 }
4709}
4710
4711#[cfg(any(test, feature = "test-support"))]
4712pub struct TestFile {
4713 pub path: Arc<Path>,
4714 pub root_name: String,
4715 pub local_root: Option<PathBuf>,
4716}
4717
4718#[cfg(any(test, feature = "test-support"))]
4719impl File for TestFile {
4720 fn path(&self) -> &Arc<Path> {
4721 &self.path
4722 }
4723
4724 fn full_path(&self, _: &gpui::App) -> PathBuf {
4725 PathBuf::from(&self.root_name).join(self.path.as_ref())
4726 }
4727
4728 fn as_local(&self) -> Option<&dyn LocalFile> {
4729 if self.local_root.is_some() {
4730 Some(self)
4731 } else {
4732 None
4733 }
4734 }
4735
4736 fn disk_state(&self) -> DiskState {
4737 unimplemented!()
4738 }
4739
4740 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4741 self.path().file_name().unwrap_or(self.root_name.as_ref())
4742 }
4743
4744 fn worktree_id(&self, _: &App) -> WorktreeId {
4745 WorktreeId::from_usize(0)
4746 }
4747
4748 fn to_proto(&self, _: &App) -> rpc::proto::File {
4749 unimplemented!()
4750 }
4751
4752 fn is_private(&self) -> bool {
4753 false
4754 }
4755}
4756
4757#[cfg(any(test, feature = "test-support"))]
4758impl LocalFile for TestFile {
4759 fn abs_path(&self, _cx: &App) -> PathBuf {
4760 PathBuf::from(self.local_root.as_ref().unwrap())
4761 .join(&self.root_name)
4762 .join(self.path.as_ref())
4763 }
4764
4765 fn load(&self, _cx: &App) -> Task<Result<String>> {
4766 unimplemented!()
4767 }
4768
4769 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4770 unimplemented!()
4771 }
4772}
4773
4774pub(crate) fn contiguous_ranges(
4775 values: impl Iterator<Item = u32>,
4776 max_len: usize,
4777) -> impl Iterator<Item = Range<u32>> {
4778 let mut values = values;
4779 let mut current_range: Option<Range<u32>> = None;
4780 std::iter::from_fn(move || {
4781 loop {
4782 if let Some(value) = values.next() {
4783 if let Some(range) = &mut current_range {
4784 if value == range.end && range.len() < max_len {
4785 range.end += 1;
4786 continue;
4787 }
4788 }
4789
4790 let prev_range = current_range.clone();
4791 current_range = Some(value..(value + 1));
4792 if prev_range.is_some() {
4793 return prev_range;
4794 }
4795 } else {
4796 return current_range.take();
4797 }
4798 }
4799 })
4800}
4801
4802#[derive(Default, Debug)]
4803pub struct CharClassifier {
4804 scope: Option<LanguageScope>,
4805 for_completion: bool,
4806 ignore_punctuation: bool,
4807}
4808
4809impl CharClassifier {
4810 pub fn new(scope: Option<LanguageScope>) -> Self {
4811 Self {
4812 scope,
4813 for_completion: false,
4814 ignore_punctuation: false,
4815 }
4816 }
4817
4818 pub fn for_completion(self, for_completion: bool) -> Self {
4819 Self {
4820 for_completion,
4821 ..self
4822 }
4823 }
4824
4825 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4826 Self {
4827 ignore_punctuation,
4828 ..self
4829 }
4830 }
4831
4832 pub fn is_whitespace(&self, c: char) -> bool {
4833 self.kind(c) == CharKind::Whitespace
4834 }
4835
4836 pub fn is_word(&self, c: char) -> bool {
4837 self.kind(c) == CharKind::Word
4838 }
4839
4840 pub fn is_punctuation(&self, c: char) -> bool {
4841 self.kind(c) == CharKind::Punctuation
4842 }
4843
4844 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4845 if c.is_alphanumeric() || c == '_' {
4846 return CharKind::Word;
4847 }
4848
4849 if let Some(scope) = &self.scope {
4850 let characters = if self.for_completion {
4851 scope.completion_query_characters()
4852 } else {
4853 scope.word_characters()
4854 };
4855 if let Some(characters) = characters {
4856 if characters.contains(&c) {
4857 return CharKind::Word;
4858 }
4859 }
4860 }
4861
4862 if c.is_whitespace() {
4863 return CharKind::Whitespace;
4864 }
4865
4866 if ignore_punctuation {
4867 CharKind::Word
4868 } else {
4869 CharKind::Punctuation
4870 }
4871 }
4872
4873 pub fn kind(&self, c: char) -> CharKind {
4874 self.kind_with(c, self.ignore_punctuation)
4875 }
4876}
4877
4878/// Find all of the ranges of whitespace that occur at the ends of lines
4879/// in the given rope.
4880///
4881/// This could also be done with a regex search, but this implementation
4882/// avoids copying text.
4883pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4884 let mut ranges = Vec::new();
4885
4886 let mut offset = 0;
4887 let mut prev_chunk_trailing_whitespace_range = 0..0;
4888 for chunk in rope.chunks() {
4889 let mut prev_line_trailing_whitespace_range = 0..0;
4890 for (i, line) in chunk.split('\n').enumerate() {
4891 let line_end_offset = offset + line.len();
4892 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4893 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4894
4895 if i == 0 && trimmed_line_len == 0 {
4896 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4897 }
4898 if !prev_line_trailing_whitespace_range.is_empty() {
4899 ranges.push(prev_line_trailing_whitespace_range);
4900 }
4901
4902 offset = line_end_offset + 1;
4903 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4904 }
4905
4906 offset -= 1;
4907 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4908 }
4909
4910 if !prev_chunk_trailing_whitespace_range.is_empty() {
4911 ranges.push(prev_chunk_trailing_whitespace_range);
4912 }
4913
4914 ranges
4915}