1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25pub use clock::ReplicaId;
26use clock::{Global, Lamport};
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
134}
135
136#[derive(Debug, Clone)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self) {
146 self.brackets_by_chunks = vec![None; self.chunks.len()];
147 }
148
149 fn new(snapshot: text::BufferSnapshot) -> Self {
150 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
151 Self {
152 brackets_by_chunks: vec![None; chunks.len()],
153 chunks,
154 }
155 }
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq)]
159pub enum ParseStatus {
160 Idle,
161 Parsing,
162}
163
164struct BufferBranchState {
165 base_buffer: Entity<Buffer>,
166 merged_operations: Vec<Lamport>,
167}
168
169/// An immutable, cheaply cloneable representation of a fixed
170/// state of a buffer.
171pub struct BufferSnapshot {
172 pub text: text::BufferSnapshot,
173 pub syntax: SyntaxSnapshot,
174 file: Option<Arc<dyn File>>,
175 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
176 remote_selections: TreeMap<ReplicaId, SelectionSet>,
177 language: Option<Arc<Language>>,
178 non_text_state_update_count: usize,
179 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
180}
181
182/// The kind and amount of indentation in a particular line. For now,
183/// assumes that indentation is all the same character.
184#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
185pub struct IndentSize {
186 /// The number of bytes that comprise the indentation.
187 pub len: u32,
188 /// The kind of whitespace used for indentation.
189 pub kind: IndentKind,
190}
191
192/// A whitespace character that's used for indentation.
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
194pub enum IndentKind {
195 /// An ASCII space character.
196 #[default]
197 Space,
198 /// An ASCII tab character.
199 Tab,
200}
201
202/// The shape of a selection cursor.
203#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
204pub enum CursorShape {
205 /// A vertical bar
206 #[default]
207 Bar,
208 /// A block that surrounds the following character
209 Block,
210 /// An underline that runs along the following character
211 Underline,
212 /// A box drawn around the following character
213 Hollow,
214}
215
216impl From<settings::CursorShape> for CursorShape {
217 fn from(shape: settings::CursorShape) -> Self {
218 match shape {
219 settings::CursorShape::Bar => CursorShape::Bar,
220 settings::CursorShape::Block => CursorShape::Block,
221 settings::CursorShape::Underline => CursorShape::Underline,
222 settings::CursorShape::Hollow => CursorShape::Hollow,
223 }
224 }
225}
226
227#[derive(Clone, Debug)]
228struct SelectionSet {
229 line_mode: bool,
230 cursor_shape: CursorShape,
231 selections: Arc<[Selection<Anchor>]>,
232 lamport_timestamp: clock::Lamport,
233}
234
235/// A diagnostic associated with a certain range of a buffer.
236#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
237pub struct Diagnostic {
238 /// The name of the service that produced this diagnostic.
239 pub source: Option<String>,
240 /// A machine-readable code that identifies this diagnostic.
241 pub code: Option<NumberOrString>,
242 pub code_description: Option<lsp::Uri>,
243 /// Whether this diagnostic is a hint, warning, or error.
244 pub severity: DiagnosticSeverity,
245 /// The human-readable message associated with this diagnostic.
246 pub message: String,
247 /// The human-readable message (in markdown format)
248 pub markdown: Option<String>,
249 /// An id that identifies the group to which this diagnostic belongs.
250 ///
251 /// When a language server produces a diagnostic with
252 /// one or more associated diagnostics, those diagnostics are all
253 /// assigned a single group ID.
254 pub group_id: usize,
255 /// Whether this diagnostic is the primary diagnostic for its group.
256 ///
257 /// In a given group, the primary diagnostic is the top-level diagnostic
258 /// returned by the language server. The non-primary diagnostics are the
259 /// associated diagnostics.
260 pub is_primary: bool,
261 /// Whether this diagnostic is considered to originate from an analysis of
262 /// files on disk, as opposed to any unsaved buffer contents. This is a
263 /// property of a given diagnostic source, and is configured for a given
264 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
265 /// for the language server.
266 pub is_disk_based: bool,
267 /// Whether this diagnostic marks unnecessary code.
268 pub is_unnecessary: bool,
269 /// Quick separation of diagnostics groups based by their source.
270 pub source_kind: DiagnosticSourceKind,
271 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
272 pub data: Option<Value>,
273 /// Whether to underline the corresponding text range in the editor.
274 pub underline: bool,
275}
276
277#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
278pub enum DiagnosticSourceKind {
279 Pulled,
280 Pushed,
281 Other,
282}
283
284/// An operation used to synchronize this buffer with its other replicas.
285#[derive(Clone, Debug, PartialEq)]
286pub enum Operation {
287 /// A text operation.
288 Buffer(text::Operation),
289
290 /// An update to the buffer's diagnostics.
291 UpdateDiagnostics {
292 /// The id of the language server that produced the new diagnostics.
293 server_id: LanguageServerId,
294 /// The diagnostics.
295 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
296 /// The buffer's lamport timestamp.
297 lamport_timestamp: clock::Lamport,
298 },
299
300 /// An update to the most recent selections in this buffer.
301 UpdateSelections {
302 /// The selections.
303 selections: Arc<[Selection<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 /// Whether the selections are in 'line mode'.
307 line_mode: bool,
308 /// The [`CursorShape`] associated with these selections.
309 cursor_shape: CursorShape,
310 },
311
312 /// An update to the characters that should trigger autocompletion
313 /// for this buffer.
314 UpdateCompletionTriggers {
315 /// The characters that trigger autocompletion.
316 triggers: Vec<String>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// The language server ID.
320 server_id: LanguageServerId,
321 },
322
323 /// An update to the line ending type of this buffer.
324 UpdateLineEnding {
325 /// The line ending type.
326 line_ending: LineEnding,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 },
330}
331
332/// An event that occurs in a buffer.
333#[derive(Clone, Debug, PartialEq)]
334pub enum BufferEvent {
335 /// The buffer was changed in a way that must be
336 /// propagated to its other replicas.
337 Operation {
338 operation: Operation,
339 is_local: bool,
340 },
341 /// The buffer was edited.
342 Edited,
343 /// The buffer's `dirty` bit changed.
344 DirtyChanged,
345 /// The buffer was saved.
346 Saved,
347 /// The buffer's file was changed on disk.
348 FileHandleChanged,
349 /// The buffer was reloaded.
350 Reloaded,
351 /// The buffer is in need of a reload
352 ReloadNeeded,
353 /// The buffer's language was changed.
354 LanguageChanged,
355 /// The buffer's syntax trees were updated.
356 Reparsed,
357 /// The buffer's diagnostics were updated.
358 DiagnosticsUpdated,
359 /// The buffer gained or lost editing capabilities.
360 CapabilityChanged,
361}
362
363/// The file associated with a buffer.
364pub trait File: Send + Sync + Any {
365 /// Returns the [`LocalFile`] associated with this file, if the
366 /// file is local.
367 fn as_local(&self) -> Option<&dyn LocalFile>;
368
369 /// Returns whether this file is local.
370 fn is_local(&self) -> bool {
371 self.as_local().is_some()
372 }
373
374 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
375 /// only available in some states, such as modification time.
376 fn disk_state(&self) -> DiskState;
377
378 /// Returns the path of this file relative to the worktree's root directory.
379 fn path(&self) -> &Arc<RelPath>;
380
381 /// Returns the path of this file relative to the worktree's parent directory (this means it
382 /// includes the name of the worktree's root folder).
383 fn full_path(&self, cx: &App) -> PathBuf;
384
385 /// Returns the path style of this file.
386 fn path_style(&self, cx: &App) -> PathStyle;
387
388 /// Returns the last component of this handle's absolute path. If this handle refers to the root
389 /// of its worktree, then this method will return the name of the worktree itself.
390 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
391
392 /// Returns the id of the worktree to which this file belongs.
393 ///
394 /// This is needed for looking up project-specific settings.
395 fn worktree_id(&self, cx: &App) -> WorktreeId;
396
397 /// Converts this file into a protobuf message.
398 fn to_proto(&self, cx: &App) -> rpc::proto::File;
399
400 /// Return whether Zed considers this to be a private file.
401 fn is_private(&self) -> bool;
402}
403
404/// The file's storage status - whether it's stored (`Present`), and if so when it was last
405/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
406/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
407/// indicator for new files.
408#[derive(Copy, Clone, Debug, PartialEq)]
409pub enum DiskState {
410 /// File created in Zed that has not been saved.
411 New,
412 /// File present on the filesystem.
413 Present { mtime: MTime },
414 /// Deleted file that was previously present.
415 Deleted,
416}
417
418impl DiskState {
419 /// Returns the file's last known modification time on disk.
420 pub fn mtime(self) -> Option<MTime> {
421 match self {
422 DiskState::New => None,
423 DiskState::Present { mtime } => Some(mtime),
424 DiskState::Deleted => None,
425 }
426 }
427
428 pub fn exists(&self) -> bool {
429 match self {
430 DiskState::New => false,
431 DiskState::Present { .. } => true,
432 DiskState::Deleted => false,
433 }
434 }
435}
436
437/// The file associated with a buffer, in the case where the file is on the local disk.
438pub trait LocalFile: File {
439 /// Returns the absolute path of this file
440 fn abs_path(&self, cx: &App) -> PathBuf;
441
442 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
443 fn load(&self, cx: &App) -> Task<Result<String>>;
444
445 /// Loads the file's contents from disk.
446 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
447}
448
449/// The auto-indent behavior associated with an editing operation.
450/// For some editing operations, each affected line of text has its
451/// indentation recomputed. For other operations, the entire block
452/// of edited text is adjusted uniformly.
453#[derive(Clone, Debug)]
454pub enum AutoindentMode {
455 /// Indent each line of inserted text.
456 EachLine,
457 /// Apply the same indentation adjustment to all of the lines
458 /// in a given insertion.
459 Block {
460 /// The original indentation column of the first line of each
461 /// insertion, if it has been copied.
462 ///
463 /// Knowing this makes it possible to preserve the relative indentation
464 /// of every line in the insertion from when it was copied.
465 ///
466 /// If the original indent column is `a`, and the first line of insertion
467 /// is then auto-indented to column `b`, then every other line of
468 /// the insertion will be auto-indented to column `b - a`
469 original_indent_columns: Vec<Option<u32>>,
470 },
471}
472
473#[derive(Clone)]
474struct AutoindentRequest {
475 before_edit: BufferSnapshot,
476 entries: Vec<AutoindentRequestEntry>,
477 is_block_mode: bool,
478 ignore_empty_lines: bool,
479}
480
481#[derive(Debug, Clone)]
482struct AutoindentRequestEntry {
483 /// A range of the buffer whose indentation should be adjusted.
484 range: Range<Anchor>,
485 /// Whether or not these lines should be considered brand new, for the
486 /// purpose of auto-indent. When text is not new, its indentation will
487 /// only be adjusted if the suggested indentation level has *changed*
488 /// since the edit was made.
489 first_line_is_new: bool,
490 indent_size: IndentSize,
491 original_indent_column: Option<u32>,
492}
493
494#[derive(Debug)]
495struct IndentSuggestion {
496 basis_row: u32,
497 delta: Ordering,
498 within_error: bool,
499}
500
501struct BufferChunkHighlights<'a> {
502 captures: SyntaxMapCaptures<'a>,
503 next_capture: Option<SyntaxMapCapture<'a>>,
504 stack: Vec<(usize, HighlightId)>,
505 highlight_maps: Vec<HighlightMap>,
506}
507
508/// An iterator that yields chunks of a buffer's text, along with their
509/// syntax highlights and diagnostic status.
510pub struct BufferChunks<'a> {
511 buffer_snapshot: Option<&'a BufferSnapshot>,
512 range: Range<usize>,
513 chunks: text::Chunks<'a>,
514 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
515 error_depth: usize,
516 warning_depth: usize,
517 information_depth: usize,
518 hint_depth: usize,
519 unnecessary_depth: usize,
520 underline: bool,
521 highlights: Option<BufferChunkHighlights<'a>>,
522}
523
524/// A chunk of a buffer's text, along with its syntax highlight and
525/// diagnostic status.
526#[derive(Clone, Debug, Default)]
527pub struct Chunk<'a> {
528 /// The text of the chunk.
529 pub text: &'a str,
530 /// The syntax highlighting style of the chunk.
531 pub syntax_highlight_id: Option<HighlightId>,
532 /// The highlight style that has been applied to this chunk in
533 /// the editor.
534 pub highlight_style: Option<HighlightStyle>,
535 /// The severity of diagnostic associated with this chunk, if any.
536 pub diagnostic_severity: Option<DiagnosticSeverity>,
537 /// A bitset of which characters are tabs in this string.
538 pub tabs: u128,
539 /// Bitmap of character indices in this chunk
540 pub chars: u128,
541 /// Whether this chunk of text is marked as unnecessary.
542 pub is_unnecessary: bool,
543 /// Whether this chunk of text was originally a tab character.
544 pub is_tab: bool,
545 /// Whether this chunk of text was originally an inlay.
546 pub is_inlay: bool,
547 /// Whether to underline the corresponding text range in the editor.
548 pub underline: bool,
549}
550
551/// A set of edits to a given version of a buffer, computed asynchronously.
552#[derive(Debug)]
553pub struct Diff {
554 pub base_version: clock::Global,
555 pub line_ending: LineEnding,
556 pub edits: Vec<(Range<usize>, Arc<str>)>,
557}
558
559#[derive(Debug, Clone, Copy)]
560pub(crate) struct DiagnosticEndpoint {
561 offset: usize,
562 is_start: bool,
563 underline: bool,
564 severity: DiagnosticSeverity,
565 is_unnecessary: bool,
566}
567
568/// A class of characters, used for characterizing a run of text.
569#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
570pub enum CharKind {
571 /// Whitespace.
572 Whitespace,
573 /// Punctuation.
574 Punctuation,
575 /// Word.
576 Word,
577}
578
579/// Context for character classification within a specific scope.
580#[derive(Copy, Clone, Eq, PartialEq, Debug)]
581pub enum CharScopeContext {
582 /// Character classification for completion queries.
583 ///
584 /// This context treats certain characters as word constituents that would
585 /// normally be considered punctuation, such as '-' in Tailwind classes
586 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
587 Completion,
588 /// Character classification for linked edits.
589 ///
590 /// This context handles characters that should be treated as part of
591 /// identifiers during linked editing operations, such as '.' in JSX
592 /// component names like `<Animated.View>`.
593 LinkedEdit,
594}
595
596/// A runnable is a set of data about a region that could be resolved into a task
597pub struct Runnable {
598 pub tags: SmallVec<[RunnableTag; 1]>,
599 pub language: Arc<Language>,
600 pub buffer: BufferId,
601}
602
603#[derive(Default, Clone, Debug)]
604pub struct HighlightedText {
605 pub text: SharedString,
606 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
607}
608
609#[derive(Default, Debug)]
610struct HighlightedTextBuilder {
611 pub text: String,
612 highlights: Vec<(Range<usize>, HighlightStyle)>,
613}
614
615impl HighlightedText {
616 pub fn from_buffer_range<T: ToOffset>(
617 range: Range<T>,
618 snapshot: &text::BufferSnapshot,
619 syntax_snapshot: &SyntaxSnapshot,
620 override_style: Option<HighlightStyle>,
621 syntax_theme: &SyntaxTheme,
622 ) -> Self {
623 let mut highlighted_text = HighlightedTextBuilder::default();
624 highlighted_text.add_text_from_buffer_range(
625 range,
626 snapshot,
627 syntax_snapshot,
628 override_style,
629 syntax_theme,
630 );
631 highlighted_text.build()
632 }
633
634 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
635 gpui::StyledText::new(self.text.clone())
636 .with_default_highlights(default_style, self.highlights.iter().cloned())
637 }
638
639 /// Returns the first line without leading whitespace unless highlighted
640 /// and a boolean indicating if there are more lines after
641 pub fn first_line_preview(self) -> (Self, bool) {
642 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
643 let first_line = &self.text[..newline_ix];
644
645 // Trim leading whitespace, unless an edit starts prior to it.
646 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
647 if let Some((first_highlight_range, _)) = self.highlights.first() {
648 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
649 }
650
651 let preview_text = &first_line[preview_start_ix..];
652 let preview_highlights = self
653 .highlights
654 .into_iter()
655 .skip_while(|(range, _)| range.end <= preview_start_ix)
656 .take_while(|(range, _)| range.start < newline_ix)
657 .filter_map(|(mut range, highlight)| {
658 range.start = range.start.saturating_sub(preview_start_ix);
659 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
660 if range.is_empty() {
661 None
662 } else {
663 Some((range, highlight))
664 }
665 });
666
667 let preview = Self {
668 text: SharedString::new(preview_text),
669 highlights: preview_highlights.collect(),
670 };
671
672 (preview, self.text.len() > newline_ix)
673 }
674}
675
676impl HighlightedTextBuilder {
677 pub fn build(self) -> HighlightedText {
678 HighlightedText {
679 text: self.text.into(),
680 highlights: self.highlights,
681 }
682 }
683
684 pub fn add_text_from_buffer_range<T: ToOffset>(
685 &mut self,
686 range: Range<T>,
687 snapshot: &text::BufferSnapshot,
688 syntax_snapshot: &SyntaxSnapshot,
689 override_style: Option<HighlightStyle>,
690 syntax_theme: &SyntaxTheme,
691 ) {
692 let range = range.to_offset(snapshot);
693 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
694 let start = self.text.len();
695 self.text.push_str(chunk.text);
696 let end = self.text.len();
697
698 if let Some(highlight_style) = chunk
699 .syntax_highlight_id
700 .and_then(|id| id.style(syntax_theme))
701 {
702 let highlight_style = override_style.map_or(highlight_style, |override_style| {
703 highlight_style.highlight(override_style)
704 });
705 self.highlights.push((start..end, highlight_style));
706 } else if let Some(override_style) = override_style {
707 self.highlights.push((start..end, override_style));
708 }
709 }
710 }
711
712 fn highlighted_chunks<'a>(
713 range: Range<usize>,
714 snapshot: &'a text::BufferSnapshot,
715 syntax_snapshot: &'a SyntaxSnapshot,
716 ) -> BufferChunks<'a> {
717 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
718 grammar
719 .highlights_config
720 .as_ref()
721 .map(|config| &config.query)
722 });
723
724 let highlight_maps = captures
725 .grammars()
726 .iter()
727 .map(|grammar| grammar.highlight_map())
728 .collect();
729
730 BufferChunks::new(
731 snapshot.as_rope(),
732 range,
733 Some((captures, highlight_maps)),
734 false,
735 None,
736 )
737 }
738}
739
740#[derive(Clone)]
741pub struct EditPreview {
742 old_snapshot: text::BufferSnapshot,
743 applied_edits_snapshot: text::BufferSnapshot,
744 syntax_snapshot: SyntaxSnapshot,
745}
746
747impl EditPreview {
748 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
749 let (first, _) = edits.first()?;
750 let (last, _) = edits.last()?;
751
752 let start = first.start.to_point(&self.old_snapshot);
753 let old_end = last.end.to_point(&self.old_snapshot);
754 let new_end = last
755 .end
756 .bias_right(&self.old_snapshot)
757 .to_point(&self.applied_edits_snapshot);
758
759 let start = Point::new(start.row.saturating_sub(3), 0);
760 let old_end = Point::new(old_end.row + 3, 0).min(self.old_snapshot.max_point());
761 let new_end = Point::new(new_end.row + 3, 0).min(self.applied_edits_snapshot.max_point());
762
763 Some(unified_diff(
764 &self
765 .old_snapshot
766 .text_for_range(start..old_end)
767 .collect::<String>(),
768 &self
769 .applied_edits_snapshot
770 .text_for_range(start..new_end)
771 .collect::<String>(),
772 ))
773 }
774
775 pub fn highlight_edits(
776 &self,
777 current_snapshot: &BufferSnapshot,
778 edits: &[(Range<Anchor>, impl AsRef<str>)],
779 include_deletions: bool,
780 cx: &App,
781 ) -> HighlightedText {
782 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
783 return HighlightedText::default();
784 };
785
786 let mut highlighted_text = HighlightedTextBuilder::default();
787
788 let visible_range_in_preview_snapshot =
789 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
790 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
791
792 let insertion_highlight_style = HighlightStyle {
793 background_color: Some(cx.theme().status().created_background),
794 ..Default::default()
795 };
796 let deletion_highlight_style = HighlightStyle {
797 background_color: Some(cx.theme().status().deleted_background),
798 ..Default::default()
799 };
800 let syntax_theme = cx.theme().syntax();
801
802 for (range, edit_text) in edits {
803 let edit_new_end_in_preview_snapshot = range
804 .end
805 .bias_right(&self.old_snapshot)
806 .to_offset(&self.applied_edits_snapshot);
807 let edit_start_in_preview_snapshot =
808 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
809
810 let unchanged_range_in_preview_snapshot =
811 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
812 if !unchanged_range_in_preview_snapshot.is_empty() {
813 highlighted_text.add_text_from_buffer_range(
814 unchanged_range_in_preview_snapshot,
815 &self.applied_edits_snapshot,
816 &self.syntax_snapshot,
817 None,
818 syntax_theme,
819 );
820 }
821
822 let range_in_current_snapshot = range.to_offset(current_snapshot);
823 if include_deletions && !range_in_current_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 range_in_current_snapshot,
826 ¤t_snapshot.text,
827 ¤t_snapshot.syntax,
828 Some(deletion_highlight_style),
829 syntax_theme,
830 );
831 }
832
833 if !edit_text.as_ref().is_empty() {
834 highlighted_text.add_text_from_buffer_range(
835 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
836 &self.applied_edits_snapshot,
837 &self.syntax_snapshot,
838 Some(insertion_highlight_style),
839 syntax_theme,
840 );
841 }
842
843 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
844 }
845
846 highlighted_text.add_text_from_buffer_range(
847 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
848 &self.applied_edits_snapshot,
849 &self.syntax_snapshot,
850 None,
851 syntax_theme,
852 );
853
854 highlighted_text.build()
855 }
856
857 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
858 cx.new(|cx| {
859 let mut buffer = Buffer::local_normalized(
860 self.applied_edits_snapshot.as_rope().clone(),
861 self.applied_edits_snapshot.line_ending(),
862 cx,
863 );
864 buffer.set_language(self.syntax_snapshot.root_language(), cx);
865 buffer
866 })
867 }
868
869 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
870 let (first, _) = edits.first()?;
871 let (last, _) = edits.last()?;
872
873 let start = first
874 .start
875 .bias_left(&self.old_snapshot)
876 .to_point(&self.applied_edits_snapshot);
877 let end = last
878 .end
879 .bias_right(&self.old_snapshot)
880 .to_point(&self.applied_edits_snapshot);
881
882 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
883 let range = Point::new(start.row, 0)
884 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
885
886 Some(range)
887 }
888}
889
890#[derive(Clone, Debug, PartialEq, Eq)]
891pub struct BracketMatch<T> {
892 pub open_range: Range<T>,
893 pub close_range: Range<T>,
894 pub newline_only: bool,
895 pub syntax_layer_depth: usize,
896 pub color_index: Option<usize>,
897}
898
899impl<T> BracketMatch<T> {
900 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
901 (self.open_range, self.close_range)
902 }
903}
904
905impl Buffer {
906 /// Create a new buffer with the given base text.
907 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
908 Self::build(
909 TextBuffer::new(
910 ReplicaId::LOCAL,
911 cx.entity_id().as_non_zero_u64().into(),
912 base_text.into(),
913 ),
914 None,
915 Capability::ReadWrite,
916 )
917 }
918
919 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
920 pub fn local_normalized(
921 base_text_normalized: Rope,
922 line_ending: LineEnding,
923 cx: &Context<Self>,
924 ) -> Self {
925 Self::build(
926 TextBuffer::new_normalized(
927 ReplicaId::LOCAL,
928 cx.entity_id().as_non_zero_u64().into(),
929 line_ending,
930 base_text_normalized,
931 ),
932 None,
933 Capability::ReadWrite,
934 )
935 }
936
937 /// Create a new buffer that is a replica of a remote buffer.
938 pub fn remote(
939 remote_id: BufferId,
940 replica_id: ReplicaId,
941 capability: Capability,
942 base_text: impl Into<String>,
943 ) -> Self {
944 Self::build(
945 TextBuffer::new(replica_id, remote_id, base_text.into()),
946 None,
947 capability,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer, populating its
952 /// state from the given protobuf message.
953 pub fn from_proto(
954 replica_id: ReplicaId,
955 capability: Capability,
956 message: proto::BufferState,
957 file: Option<Arc<dyn File>>,
958 ) -> Result<Self> {
959 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
960 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
961 let mut this = Self::build(buffer, file, capability);
962 this.text.set_line_ending(proto::deserialize_line_ending(
963 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
964 ));
965 this.saved_version = proto::deserialize_version(&message.saved_version);
966 this.saved_mtime = message.saved_mtime.map(|time| time.into());
967 Ok(this)
968 }
969
970 /// Serialize the buffer's state to a protobuf message.
971 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
972 proto::BufferState {
973 id: self.remote_id().into(),
974 file: self.file.as_ref().map(|f| f.to_proto(cx)),
975 base_text: self.base_text().to_string(),
976 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
977 saved_version: proto::serialize_version(&self.saved_version),
978 saved_mtime: self.saved_mtime.map(|time| time.into()),
979 }
980 }
981
982 /// Serialize as protobufs all of the changes to the buffer since the given version.
983 pub fn serialize_ops(
984 &self,
985 since: Option<clock::Global>,
986 cx: &App,
987 ) -> Task<Vec<proto::Operation>> {
988 let mut operations = Vec::new();
989 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
990
991 operations.extend(self.remote_selections.iter().map(|(_, set)| {
992 proto::serialize_operation(&Operation::UpdateSelections {
993 selections: set.selections.clone(),
994 lamport_timestamp: set.lamport_timestamp,
995 line_mode: set.line_mode,
996 cursor_shape: set.cursor_shape,
997 })
998 }));
999
1000 for (server_id, diagnostics) in &self.diagnostics {
1001 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1002 lamport_timestamp: self.diagnostics_timestamp,
1003 server_id: *server_id,
1004 diagnostics: diagnostics.iter().cloned().collect(),
1005 }));
1006 }
1007
1008 for (server_id, completions) in &self.completion_triggers_per_language_server {
1009 operations.push(proto::serialize_operation(
1010 &Operation::UpdateCompletionTriggers {
1011 triggers: completions.iter().cloned().collect(),
1012 lamport_timestamp: self.completion_triggers_timestamp,
1013 server_id: *server_id,
1014 },
1015 ));
1016 }
1017
1018 let text_operations = self.text.operations().clone();
1019 cx.background_spawn(async move {
1020 let since = since.unwrap_or_default();
1021 operations.extend(
1022 text_operations
1023 .iter()
1024 .filter(|(_, op)| !since.observed(op.timestamp()))
1025 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1026 );
1027 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1028 operations
1029 })
1030 }
1031
1032 /// Assign a language to the buffer, returning the buffer.
1033 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1034 self.set_language(Some(language), cx);
1035 self
1036 }
1037
1038 /// Returns the [`Capability`] of this buffer.
1039 pub fn capability(&self) -> Capability {
1040 self.capability
1041 }
1042
1043 /// Whether this buffer can only be read.
1044 pub fn read_only(&self) -> bool {
1045 self.capability == Capability::ReadOnly
1046 }
1047
1048 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1049 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1050 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1051 let snapshot = buffer.snapshot();
1052 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1053 let tree_sitter_data = TreeSitterData::new(snapshot);
1054 Self {
1055 saved_mtime,
1056 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1057 saved_version: buffer.version(),
1058 preview_version: buffer.version(),
1059 reload_task: None,
1060 transaction_depth: 0,
1061 was_dirty_before_starting_transaction: None,
1062 has_unsaved_edits: Cell::new((buffer.version(), false)),
1063 text: buffer,
1064 branch_state: None,
1065 file,
1066 capability,
1067 syntax_map,
1068 reparse: None,
1069 non_text_state_update_count: 0,
1070 sync_parse_timeout: Duration::from_millis(1),
1071 parse_status: watch::channel(ParseStatus::Idle),
1072 autoindent_requests: Default::default(),
1073 wait_for_autoindent_txs: Default::default(),
1074 pending_autoindent: Default::default(),
1075 language: None,
1076 remote_selections: Default::default(),
1077 diagnostics: Default::default(),
1078 diagnostics_timestamp: Lamport::MIN,
1079 completion_triggers: Default::default(),
1080 completion_triggers_per_language_server: Default::default(),
1081 completion_triggers_timestamp: Lamport::MIN,
1082 deferred_ops: OperationQueue::new(),
1083 has_conflict: false,
1084 change_bits: Default::default(),
1085 _subscriptions: Vec::new(),
1086 }
1087 }
1088
1089 pub fn build_snapshot(
1090 text: Rope,
1091 language: Option<Arc<Language>>,
1092 language_registry: Option<Arc<LanguageRegistry>>,
1093 cx: &mut App,
1094 ) -> impl Future<Output = BufferSnapshot> + use<> {
1095 let entity_id = cx.reserve_entity::<Self>().entity_id();
1096 let buffer_id = entity_id.as_non_zero_u64().into();
1097 async move {
1098 let text =
1099 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1100 .snapshot();
1101 let mut syntax = SyntaxMap::new(&text).snapshot();
1102 if let Some(language) = language.clone() {
1103 let language_registry = language_registry.clone();
1104 syntax.reparse(&text, language_registry, language);
1105 }
1106 let tree_sitter_data = TreeSitterData::new(text.clone());
1107 BufferSnapshot {
1108 text,
1109 syntax,
1110 file: None,
1111 diagnostics: Default::default(),
1112 remote_selections: Default::default(),
1113 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1114 language,
1115 non_text_state_update_count: 0,
1116 }
1117 }
1118 }
1119
1120 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1121 let entity_id = cx.reserve_entity::<Self>().entity_id();
1122 let buffer_id = entity_id.as_non_zero_u64().into();
1123 let text = TextBuffer::new_normalized(
1124 ReplicaId::LOCAL,
1125 buffer_id,
1126 Default::default(),
1127 Rope::new(),
1128 )
1129 .snapshot();
1130 let syntax = SyntaxMap::new(&text).snapshot();
1131 let tree_sitter_data = TreeSitterData::new(text.clone());
1132 BufferSnapshot {
1133 text,
1134 syntax,
1135 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1136 file: None,
1137 diagnostics: Default::default(),
1138 remote_selections: Default::default(),
1139 language: None,
1140 non_text_state_update_count: 0,
1141 }
1142 }
1143
1144 #[cfg(any(test, feature = "test-support"))]
1145 pub fn build_snapshot_sync(
1146 text: Rope,
1147 language: Option<Arc<Language>>,
1148 language_registry: Option<Arc<LanguageRegistry>>,
1149 cx: &mut App,
1150 ) -> BufferSnapshot {
1151 let entity_id = cx.reserve_entity::<Self>().entity_id();
1152 let buffer_id = entity_id.as_non_zero_u64().into();
1153 let text =
1154 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1155 .snapshot();
1156 let mut syntax = SyntaxMap::new(&text).snapshot();
1157 if let Some(language) = language.clone() {
1158 syntax.reparse(&text, language_registry, language);
1159 }
1160 let tree_sitter_data = TreeSitterData::new(text.clone());
1161 BufferSnapshot {
1162 text,
1163 syntax,
1164 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1165 file: None,
1166 diagnostics: Default::default(),
1167 remote_selections: Default::default(),
1168 language,
1169 non_text_state_update_count: 0,
1170 }
1171 }
1172
1173 /// Retrieve a snapshot of the buffer's current state. This is computationally
1174 /// cheap, and allows reading from the buffer on a background thread.
1175 pub fn snapshot(&self) -> BufferSnapshot {
1176 let text = self.text.snapshot();
1177 let mut syntax_map = self.syntax_map.lock();
1178 syntax_map.interpolate(&text);
1179 let syntax = syntax_map.snapshot();
1180
1181 BufferSnapshot {
1182 text,
1183 syntax,
1184 tree_sitter_data: self.tree_sitter_data.clone(),
1185 file: self.file.clone(),
1186 remote_selections: self.remote_selections.clone(),
1187 diagnostics: self.diagnostics.clone(),
1188 language: self.language.clone(),
1189 non_text_state_update_count: self.non_text_state_update_count,
1190 }
1191 }
1192
1193 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1194 let this = cx.entity();
1195 cx.new(|cx| {
1196 let mut branch = Self {
1197 branch_state: Some(BufferBranchState {
1198 base_buffer: this.clone(),
1199 merged_operations: Default::default(),
1200 }),
1201 language: self.language.clone(),
1202 has_conflict: self.has_conflict,
1203 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1204 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1205 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1206 };
1207 if let Some(language_registry) = self.language_registry() {
1208 branch.set_language_registry(language_registry);
1209 }
1210
1211 // Reparse the branch buffer so that we get syntax highlighting immediately.
1212 branch.reparse(cx);
1213
1214 branch
1215 })
1216 }
1217
1218 pub fn preview_edits(
1219 &self,
1220 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1221 cx: &App,
1222 ) -> Task<EditPreview> {
1223 let registry = self.language_registry();
1224 let language = self.language().cloned();
1225 let old_snapshot = self.text.snapshot();
1226 let mut branch_buffer = self.text.branch();
1227 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1228 cx.background_spawn(async move {
1229 if !edits.is_empty() {
1230 if let Some(language) = language.clone() {
1231 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1232 }
1233
1234 branch_buffer.edit(edits.iter().cloned());
1235 let snapshot = branch_buffer.snapshot();
1236 syntax_snapshot.interpolate(&snapshot);
1237
1238 if let Some(language) = language {
1239 syntax_snapshot.reparse(&snapshot, registry, language);
1240 }
1241 }
1242 EditPreview {
1243 old_snapshot,
1244 applied_edits_snapshot: branch_buffer.snapshot(),
1245 syntax_snapshot,
1246 }
1247 })
1248 }
1249
1250 /// Applies all of the changes in this buffer that intersect any of the
1251 /// given `ranges` to its base buffer.
1252 ///
1253 /// If `ranges` is empty, then all changes will be applied. This buffer must
1254 /// be a branch buffer to call this method.
1255 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1256 let Some(base_buffer) = self.base_buffer() else {
1257 debug_panic!("not a branch buffer");
1258 return;
1259 };
1260
1261 let mut ranges = if ranges.is_empty() {
1262 &[0..usize::MAX]
1263 } else {
1264 ranges.as_slice()
1265 }
1266 .iter()
1267 .peekable();
1268
1269 let mut edits = Vec::new();
1270 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1271 let mut is_included = false;
1272 while let Some(range) = ranges.peek() {
1273 if range.end < edit.new.start {
1274 ranges.next().unwrap();
1275 } else {
1276 if range.start <= edit.new.end {
1277 is_included = true;
1278 }
1279 break;
1280 }
1281 }
1282
1283 if is_included {
1284 edits.push((
1285 edit.old.clone(),
1286 self.text_for_range(edit.new.clone()).collect::<String>(),
1287 ));
1288 }
1289 }
1290
1291 let operation = base_buffer.update(cx, |base_buffer, cx| {
1292 // cx.emit(BufferEvent::DiffBaseChanged);
1293 base_buffer.edit(edits, None, cx)
1294 });
1295
1296 if let Some(operation) = operation
1297 && let Some(BufferBranchState {
1298 merged_operations, ..
1299 }) = &mut self.branch_state
1300 {
1301 merged_operations.push(operation);
1302 }
1303 }
1304
1305 fn on_base_buffer_event(
1306 &mut self,
1307 _: Entity<Buffer>,
1308 event: &BufferEvent,
1309 cx: &mut Context<Self>,
1310 ) {
1311 let BufferEvent::Operation { operation, .. } = event else {
1312 return;
1313 };
1314 let Some(BufferBranchState {
1315 merged_operations, ..
1316 }) = &mut self.branch_state
1317 else {
1318 return;
1319 };
1320
1321 let mut operation_to_undo = None;
1322 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1323 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1324 {
1325 merged_operations.remove(ix);
1326 operation_to_undo = Some(operation.timestamp);
1327 }
1328
1329 self.apply_ops([operation.clone()], cx);
1330
1331 if let Some(timestamp) = operation_to_undo {
1332 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1333 self.undo_operations(counts, cx);
1334 }
1335 }
1336
1337 #[cfg(test)]
1338 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1339 &self.text
1340 }
1341
1342 /// Retrieve a snapshot of the buffer's raw text, without any
1343 /// language-related state like the syntax tree or diagnostics.
1344 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1345 self.text.snapshot()
1346 }
1347
1348 /// The file associated with the buffer, if any.
1349 pub fn file(&self) -> Option<&Arc<dyn File>> {
1350 self.file.as_ref()
1351 }
1352
1353 /// The version of the buffer that was last saved or reloaded from disk.
1354 pub fn saved_version(&self) -> &clock::Global {
1355 &self.saved_version
1356 }
1357
1358 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1359 pub fn saved_mtime(&self) -> Option<MTime> {
1360 self.saved_mtime
1361 }
1362
1363 /// Assign a language to the buffer.
1364 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1365 self.non_text_state_update_count += 1;
1366 self.syntax_map.lock().clear(&self.text);
1367 self.language = language;
1368 self.was_changed();
1369 self.reparse(cx);
1370 cx.emit(BufferEvent::LanguageChanged);
1371 }
1372
1373 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1374 /// other languages if parts of the buffer are written in different languages.
1375 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1376 self.syntax_map
1377 .lock()
1378 .set_language_registry(language_registry);
1379 }
1380
1381 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1382 self.syntax_map.lock().language_registry()
1383 }
1384
1385 /// Assign the line ending type to the buffer.
1386 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1387 self.text.set_line_ending(line_ending);
1388
1389 let lamport_timestamp = self.text.lamport_clock.tick();
1390 self.send_operation(
1391 Operation::UpdateLineEnding {
1392 line_ending,
1393 lamport_timestamp,
1394 },
1395 true,
1396 cx,
1397 );
1398 }
1399
1400 /// Assign the buffer a new [`Capability`].
1401 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1402 if self.capability != capability {
1403 self.capability = capability;
1404 cx.emit(BufferEvent::CapabilityChanged)
1405 }
1406 }
1407
1408 /// This method is called to signal that the buffer has been saved.
1409 pub fn did_save(
1410 &mut self,
1411 version: clock::Global,
1412 mtime: Option<MTime>,
1413 cx: &mut Context<Self>,
1414 ) {
1415 self.saved_version = version.clone();
1416 self.has_unsaved_edits.set((version, false));
1417 self.has_conflict = false;
1418 self.saved_mtime = mtime;
1419 self.was_changed();
1420 cx.emit(BufferEvent::Saved);
1421 cx.notify();
1422 }
1423
1424 /// Reloads the contents of the buffer from disk.
1425 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1426 let (tx, rx) = futures::channel::oneshot::channel();
1427 let prev_version = self.text.version();
1428 self.reload_task = Some(cx.spawn(async move |this, cx| {
1429 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1430 let file = this.file.as_ref()?.as_local()?;
1431
1432 Some((file.disk_state().mtime(), file.load(cx)))
1433 })?
1434 else {
1435 return Ok(());
1436 };
1437
1438 let new_text = new_text.await?;
1439 let diff = this
1440 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1441 .await;
1442 this.update(cx, |this, cx| {
1443 if this.version() == diff.base_version {
1444 this.finalize_last_transaction();
1445 this.apply_diff(diff, cx);
1446 tx.send(this.finalize_last_transaction().cloned()).ok();
1447 this.has_conflict = false;
1448 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1449 } else {
1450 if !diff.edits.is_empty()
1451 || this
1452 .edits_since::<usize>(&diff.base_version)
1453 .next()
1454 .is_some()
1455 {
1456 this.has_conflict = true;
1457 }
1458
1459 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1460 }
1461
1462 this.reload_task.take();
1463 })
1464 }));
1465 rx
1466 }
1467
1468 /// This method is called to signal that the buffer has been reloaded.
1469 pub fn did_reload(
1470 &mut self,
1471 version: clock::Global,
1472 line_ending: LineEnding,
1473 mtime: Option<MTime>,
1474 cx: &mut Context<Self>,
1475 ) {
1476 self.saved_version = version;
1477 self.has_unsaved_edits
1478 .set((self.saved_version.clone(), false));
1479 self.text.set_line_ending(line_ending);
1480 self.saved_mtime = mtime;
1481 cx.emit(BufferEvent::Reloaded);
1482 cx.notify();
1483 }
1484
1485 /// Updates the [`File`] backing this buffer. This should be called when
1486 /// the file has changed or has been deleted.
1487 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1488 let was_dirty = self.is_dirty();
1489 let mut file_changed = false;
1490
1491 if let Some(old_file) = self.file.as_ref() {
1492 if new_file.path() != old_file.path() {
1493 file_changed = true;
1494 }
1495
1496 let old_state = old_file.disk_state();
1497 let new_state = new_file.disk_state();
1498 if old_state != new_state {
1499 file_changed = true;
1500 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1501 cx.emit(BufferEvent::ReloadNeeded)
1502 }
1503 }
1504 } else {
1505 file_changed = true;
1506 };
1507
1508 self.file = Some(new_file);
1509 if file_changed {
1510 self.was_changed();
1511 self.non_text_state_update_count += 1;
1512 if was_dirty != self.is_dirty() {
1513 cx.emit(BufferEvent::DirtyChanged);
1514 }
1515 cx.emit(BufferEvent::FileHandleChanged);
1516 cx.notify();
1517 }
1518 }
1519
1520 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1521 Some(self.branch_state.as_ref()?.base_buffer.clone())
1522 }
1523
1524 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1525 pub fn language(&self) -> Option<&Arc<Language>> {
1526 self.language.as_ref()
1527 }
1528
1529 /// Returns the [`Language`] at the given location.
1530 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1531 let offset = position.to_offset(self);
1532 let mut is_first = true;
1533 let start_anchor = self.anchor_before(offset);
1534 let end_anchor = self.anchor_after(offset);
1535 self.syntax_map
1536 .lock()
1537 .layers_for_range(offset..offset, &self.text, false)
1538 .filter(|layer| {
1539 if is_first {
1540 is_first = false;
1541 return true;
1542 }
1543
1544 layer
1545 .included_sub_ranges
1546 .map(|sub_ranges| {
1547 sub_ranges.iter().any(|sub_range| {
1548 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1549 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1550 !is_before_start && !is_after_end
1551 })
1552 })
1553 .unwrap_or(true)
1554 })
1555 .last()
1556 .map(|info| info.language.clone())
1557 .or_else(|| self.language.clone())
1558 }
1559
1560 /// Returns each [`Language`] for the active syntax layers at the given location.
1561 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1562 let offset = position.to_offset(self);
1563 let mut languages: Vec<Arc<Language>> = self
1564 .syntax_map
1565 .lock()
1566 .layers_for_range(offset..offset, &self.text, false)
1567 .map(|info| info.language.clone())
1568 .collect();
1569
1570 if languages.is_empty()
1571 && let Some(buffer_language) = self.language()
1572 {
1573 languages.push(buffer_language.clone());
1574 }
1575
1576 languages
1577 }
1578
1579 /// An integer version number that accounts for all updates besides
1580 /// the buffer's text itself (which is versioned via a version vector).
1581 pub fn non_text_state_update_count(&self) -> usize {
1582 self.non_text_state_update_count
1583 }
1584
1585 /// Whether the buffer is being parsed in the background.
1586 #[cfg(any(test, feature = "test-support"))]
1587 pub fn is_parsing(&self) -> bool {
1588 self.reparse.is_some()
1589 }
1590
1591 /// Indicates whether the buffer contains any regions that may be
1592 /// written in a language that hasn't been loaded yet.
1593 pub fn contains_unknown_injections(&self) -> bool {
1594 self.syntax_map.lock().contains_unknown_injections()
1595 }
1596
1597 #[cfg(any(test, feature = "test-support"))]
1598 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1599 self.sync_parse_timeout = timeout;
1600 }
1601
1602 /// Called after an edit to synchronize the buffer's main parse tree with
1603 /// the buffer's new underlying state.
1604 ///
1605 /// Locks the syntax map and interpolates the edits since the last reparse
1606 /// into the foreground syntax tree.
1607 ///
1608 /// Then takes a stable snapshot of the syntax map before unlocking it.
1609 /// The snapshot with the interpolated edits is sent to a background thread,
1610 /// where we ask Tree-sitter to perform an incremental parse.
1611 ///
1612 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1613 /// waiting on the parse to complete. As soon as it completes, we proceed
1614 /// synchronously, unless a 1ms timeout elapses.
1615 ///
1616 /// If we time out waiting on the parse, we spawn a second task waiting
1617 /// until the parse does complete and return with the interpolated tree still
1618 /// in the foreground. When the background parse completes, call back into
1619 /// the main thread and assign the foreground parse state.
1620 ///
1621 /// If the buffer or grammar changed since the start of the background parse,
1622 /// initiate an additional reparse recursively. To avoid concurrent parses
1623 /// for the same buffer, we only initiate a new parse if we are not already
1624 /// parsing in the background.
1625 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1626 if self.reparse.is_some() {
1627 return;
1628 }
1629 let language = if let Some(language) = self.language.clone() {
1630 language
1631 } else {
1632 return;
1633 };
1634
1635 let text = self.text_snapshot();
1636 let parsed_version = self.version();
1637
1638 let mut syntax_map = self.syntax_map.lock();
1639 syntax_map.interpolate(&text);
1640 let language_registry = syntax_map.language_registry();
1641 let mut syntax_snapshot = syntax_map.snapshot();
1642 drop(syntax_map);
1643
1644 let parse_task = cx.background_spawn({
1645 let language = language.clone();
1646 let language_registry = language_registry.clone();
1647 async move {
1648 syntax_snapshot.reparse(&text, language_registry, language);
1649 syntax_snapshot
1650 }
1651 });
1652
1653 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1654 match cx
1655 .background_executor()
1656 .block_with_timeout(self.sync_parse_timeout, parse_task)
1657 {
1658 Ok(new_syntax_snapshot) => {
1659 self.did_finish_parsing(new_syntax_snapshot, cx);
1660 self.reparse = None;
1661 }
1662 Err(parse_task) => {
1663 // todo(lw): hot foreground spawn
1664 self.reparse = Some(cx.spawn(async move |this, cx| {
1665 let new_syntax_map = cx.background_spawn(parse_task).await;
1666 this.update(cx, move |this, cx| {
1667 let grammar_changed = || {
1668 this.language.as_ref().is_none_or(|current_language| {
1669 !Arc::ptr_eq(&language, current_language)
1670 })
1671 };
1672 let language_registry_changed = || {
1673 new_syntax_map.contains_unknown_injections()
1674 && language_registry.is_some_and(|registry| {
1675 registry.version() != new_syntax_map.language_registry_version()
1676 })
1677 };
1678 let parse_again = this.version.changed_since(&parsed_version)
1679 || language_registry_changed()
1680 || grammar_changed();
1681 this.did_finish_parsing(new_syntax_map, cx);
1682 this.reparse = None;
1683 if parse_again {
1684 this.reparse(cx);
1685 }
1686 })
1687 .ok();
1688 }));
1689 }
1690 }
1691 }
1692
1693 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1694 self.was_changed();
1695 self.non_text_state_update_count += 1;
1696 self.syntax_map.lock().did_parse(syntax_snapshot);
1697 self.request_autoindent(cx);
1698 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1699 self.tree_sitter_data.lock().clear();
1700 cx.emit(BufferEvent::Reparsed);
1701 cx.notify();
1702 }
1703
1704 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1705 self.parse_status.1.clone()
1706 }
1707
1708 /// Wait until the buffer is no longer parsing
1709 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1710 let mut parse_status = self.parse_status();
1711 async move {
1712 while *parse_status.borrow() != ParseStatus::Idle {
1713 if parse_status.changed().await.is_err() {
1714 break;
1715 }
1716 }
1717 }
1718 }
1719
1720 /// Assign to the buffer a set of diagnostics created by a given language server.
1721 pub fn update_diagnostics(
1722 &mut self,
1723 server_id: LanguageServerId,
1724 diagnostics: DiagnosticSet,
1725 cx: &mut Context<Self>,
1726 ) {
1727 let lamport_timestamp = self.text.lamport_clock.tick();
1728 let op = Operation::UpdateDiagnostics {
1729 server_id,
1730 diagnostics: diagnostics.iter().cloned().collect(),
1731 lamport_timestamp,
1732 };
1733
1734 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1735 self.send_operation(op, true, cx);
1736 }
1737
1738 pub fn buffer_diagnostics(
1739 &self,
1740 for_server: Option<LanguageServerId>,
1741 ) -> Vec<&DiagnosticEntry<Anchor>> {
1742 match for_server {
1743 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1744 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1745 Err(_) => Vec::new(),
1746 },
1747 None => self
1748 .diagnostics
1749 .iter()
1750 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1751 .collect(),
1752 }
1753 }
1754
1755 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1756 if let Some(indent_sizes) = self.compute_autoindents() {
1757 let indent_sizes = cx.background_spawn(indent_sizes);
1758 match cx
1759 .background_executor()
1760 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1761 {
1762 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1763 Err(indent_sizes) => {
1764 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1765 let indent_sizes = indent_sizes.await;
1766 this.update(cx, |this, cx| {
1767 this.apply_autoindents(indent_sizes, cx);
1768 })
1769 .ok();
1770 }));
1771 }
1772 }
1773 } else {
1774 self.autoindent_requests.clear();
1775 for tx in self.wait_for_autoindent_txs.drain(..) {
1776 tx.send(()).ok();
1777 }
1778 }
1779 }
1780
1781 fn compute_autoindents(
1782 &self,
1783 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1784 let max_rows_between_yields = 100;
1785 let snapshot = self.snapshot();
1786 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1787 return None;
1788 }
1789
1790 let autoindent_requests = self.autoindent_requests.clone();
1791 Some(async move {
1792 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1793 for request in autoindent_requests {
1794 // Resolve each edited range to its row in the current buffer and in the
1795 // buffer before this batch of edits.
1796 let mut row_ranges = Vec::new();
1797 let mut old_to_new_rows = BTreeMap::new();
1798 let mut language_indent_sizes_by_new_row = Vec::new();
1799 for entry in &request.entries {
1800 let position = entry.range.start;
1801 let new_row = position.to_point(&snapshot).row;
1802 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1803 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1804
1805 if !entry.first_line_is_new {
1806 let old_row = position.to_point(&request.before_edit).row;
1807 old_to_new_rows.insert(old_row, new_row);
1808 }
1809 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1810 }
1811
1812 // Build a map containing the suggested indentation for each of the edited lines
1813 // with respect to the state of the buffer before these edits. This map is keyed
1814 // by the rows for these lines in the current state of the buffer.
1815 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1816 let old_edited_ranges =
1817 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1818 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1819 let mut language_indent_size = IndentSize::default();
1820 for old_edited_range in old_edited_ranges {
1821 let suggestions = request
1822 .before_edit
1823 .suggest_autoindents(old_edited_range.clone())
1824 .into_iter()
1825 .flatten();
1826 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1827 if let Some(suggestion) = suggestion {
1828 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1829
1830 // Find the indent size based on the language for this row.
1831 while let Some((row, size)) = language_indent_sizes.peek() {
1832 if *row > new_row {
1833 break;
1834 }
1835 language_indent_size = *size;
1836 language_indent_sizes.next();
1837 }
1838
1839 let suggested_indent = old_to_new_rows
1840 .get(&suggestion.basis_row)
1841 .and_then(|from_row| {
1842 Some(old_suggestions.get(from_row).copied()?.0)
1843 })
1844 .unwrap_or_else(|| {
1845 request
1846 .before_edit
1847 .indent_size_for_line(suggestion.basis_row)
1848 })
1849 .with_delta(suggestion.delta, language_indent_size);
1850 old_suggestions
1851 .insert(new_row, (suggested_indent, suggestion.within_error));
1852 }
1853 }
1854 yield_now().await;
1855 }
1856
1857 // Compute new suggestions for each line, but only include them in the result
1858 // if they differ from the old suggestion for that line.
1859 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1860 let mut language_indent_size = IndentSize::default();
1861 for (row_range, original_indent_column) in row_ranges {
1862 let new_edited_row_range = if request.is_block_mode {
1863 row_range.start..row_range.start + 1
1864 } else {
1865 row_range.clone()
1866 };
1867
1868 let suggestions = snapshot
1869 .suggest_autoindents(new_edited_row_range.clone())
1870 .into_iter()
1871 .flatten();
1872 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1873 if let Some(suggestion) = suggestion {
1874 // Find the indent size based on the language for this row.
1875 while let Some((row, size)) = language_indent_sizes.peek() {
1876 if *row > new_row {
1877 break;
1878 }
1879 language_indent_size = *size;
1880 language_indent_sizes.next();
1881 }
1882
1883 let suggested_indent = indent_sizes
1884 .get(&suggestion.basis_row)
1885 .copied()
1886 .map(|e| e.0)
1887 .unwrap_or_else(|| {
1888 snapshot.indent_size_for_line(suggestion.basis_row)
1889 })
1890 .with_delta(suggestion.delta, language_indent_size);
1891
1892 if old_suggestions.get(&new_row).is_none_or(
1893 |(old_indentation, was_within_error)| {
1894 suggested_indent != *old_indentation
1895 && (!suggestion.within_error || *was_within_error)
1896 },
1897 ) {
1898 indent_sizes.insert(
1899 new_row,
1900 (suggested_indent, request.ignore_empty_lines),
1901 );
1902 }
1903 }
1904 }
1905
1906 if let (true, Some(original_indent_column)) =
1907 (request.is_block_mode, original_indent_column)
1908 {
1909 let new_indent =
1910 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1911 *indent
1912 } else {
1913 snapshot.indent_size_for_line(row_range.start)
1914 };
1915 let delta = new_indent.len as i64 - original_indent_column as i64;
1916 if delta != 0 {
1917 for row in row_range.skip(1) {
1918 indent_sizes.entry(row).or_insert_with(|| {
1919 let mut size = snapshot.indent_size_for_line(row);
1920 if size.kind == new_indent.kind {
1921 match delta.cmp(&0) {
1922 Ordering::Greater => size.len += delta as u32,
1923 Ordering::Less => {
1924 size.len = size.len.saturating_sub(-delta as u32)
1925 }
1926 Ordering::Equal => {}
1927 }
1928 }
1929 (size, request.ignore_empty_lines)
1930 });
1931 }
1932 }
1933 }
1934
1935 yield_now().await;
1936 }
1937 }
1938
1939 indent_sizes
1940 .into_iter()
1941 .filter_map(|(row, (indent, ignore_empty_lines))| {
1942 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1943 None
1944 } else {
1945 Some((row, indent))
1946 }
1947 })
1948 .collect()
1949 })
1950 }
1951
1952 fn apply_autoindents(
1953 &mut self,
1954 indent_sizes: BTreeMap<u32, IndentSize>,
1955 cx: &mut Context<Self>,
1956 ) {
1957 self.autoindent_requests.clear();
1958 for tx in self.wait_for_autoindent_txs.drain(..) {
1959 tx.send(()).ok();
1960 }
1961
1962 let edits: Vec<_> = indent_sizes
1963 .into_iter()
1964 .filter_map(|(row, indent_size)| {
1965 let current_size = indent_size_for_line(self, row);
1966 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1967 })
1968 .collect();
1969
1970 let preserve_preview = self.preserve_preview();
1971 self.edit(edits, None, cx);
1972 if preserve_preview {
1973 self.refresh_preview();
1974 }
1975 }
1976
1977 /// Create a minimal edit that will cause the given row to be indented
1978 /// with the given size. After applying this edit, the length of the line
1979 /// will always be at least `new_size.len`.
1980 pub fn edit_for_indent_size_adjustment(
1981 row: u32,
1982 current_size: IndentSize,
1983 new_size: IndentSize,
1984 ) -> Option<(Range<Point>, String)> {
1985 if new_size.kind == current_size.kind {
1986 match new_size.len.cmp(¤t_size.len) {
1987 Ordering::Greater => {
1988 let point = Point::new(row, 0);
1989 Some((
1990 point..point,
1991 iter::repeat(new_size.char())
1992 .take((new_size.len - current_size.len) as usize)
1993 .collect::<String>(),
1994 ))
1995 }
1996
1997 Ordering::Less => Some((
1998 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1999 String::new(),
2000 )),
2001
2002 Ordering::Equal => None,
2003 }
2004 } else {
2005 Some((
2006 Point::new(row, 0)..Point::new(row, current_size.len),
2007 iter::repeat(new_size.char())
2008 .take(new_size.len as usize)
2009 .collect::<String>(),
2010 ))
2011 }
2012 }
2013
2014 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2015 /// and the given new text.
2016 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2017 let old_text = self.as_rope().clone();
2018 let base_version = self.version();
2019 cx.background_executor()
2020 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2021 let old_text = old_text.to_string();
2022 let line_ending = LineEnding::detect(&new_text);
2023 LineEnding::normalize(&mut new_text);
2024 let edits = text_diff(&old_text, &new_text);
2025 Diff {
2026 base_version,
2027 line_ending,
2028 edits,
2029 }
2030 })
2031 }
2032
2033 /// Spawns a background task that searches the buffer for any whitespace
2034 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2035 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2036 let old_text = self.as_rope().clone();
2037 let line_ending = self.line_ending();
2038 let base_version = self.version();
2039 cx.background_spawn(async move {
2040 let ranges = trailing_whitespace_ranges(&old_text);
2041 let empty = Arc::<str>::from("");
2042 Diff {
2043 base_version,
2044 line_ending,
2045 edits: ranges
2046 .into_iter()
2047 .map(|range| (range, empty.clone()))
2048 .collect(),
2049 }
2050 })
2051 }
2052
2053 /// Ensures that the buffer ends with a single newline character, and
2054 /// no other whitespace. Skips if the buffer is empty.
2055 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2056 let len = self.len();
2057 if len == 0 {
2058 return;
2059 }
2060 let mut offset = len;
2061 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2062 let non_whitespace_len = chunk
2063 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2064 .len();
2065 offset -= chunk.len();
2066 offset += non_whitespace_len;
2067 if non_whitespace_len != 0 {
2068 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2069 return;
2070 }
2071 break;
2072 }
2073 }
2074 self.edit([(offset..len, "\n")], None, cx);
2075 }
2076
2077 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2078 /// calculated, then adjust the diff to account for those changes, and discard any
2079 /// parts of the diff that conflict with those changes.
2080 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2081 let snapshot = self.snapshot();
2082 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2083 let mut delta = 0;
2084 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2085 while let Some(edit_since) = edits_since.peek() {
2086 // If the edit occurs after a diff hunk, then it does not
2087 // affect that hunk.
2088 if edit_since.old.start > range.end {
2089 break;
2090 }
2091 // If the edit precedes the diff hunk, then adjust the hunk
2092 // to reflect the edit.
2093 else if edit_since.old.end < range.start {
2094 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2095 edits_since.next();
2096 }
2097 // If the edit intersects a diff hunk, then discard that hunk.
2098 else {
2099 return None;
2100 }
2101 }
2102
2103 let start = (range.start as i64 + delta) as usize;
2104 let end = (range.end as i64 + delta) as usize;
2105 Some((start..end, new_text))
2106 });
2107
2108 self.start_transaction();
2109 self.text.set_line_ending(diff.line_ending);
2110 self.edit(adjusted_edits, None, cx);
2111 self.end_transaction(cx)
2112 }
2113
2114 pub fn has_unsaved_edits(&self) -> bool {
2115 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2116
2117 if last_version == self.version {
2118 self.has_unsaved_edits
2119 .set((last_version, has_unsaved_edits));
2120 return has_unsaved_edits;
2121 }
2122
2123 let has_edits = self.has_edits_since(&self.saved_version);
2124 self.has_unsaved_edits
2125 .set((self.version.clone(), has_edits));
2126 has_edits
2127 }
2128
2129 /// Checks if the buffer has unsaved changes.
2130 pub fn is_dirty(&self) -> bool {
2131 if self.capability == Capability::ReadOnly {
2132 return false;
2133 }
2134 if self.has_conflict {
2135 return true;
2136 }
2137 match self.file.as_ref().map(|f| f.disk_state()) {
2138 Some(DiskState::New) | Some(DiskState::Deleted) => {
2139 !self.is_empty() && self.has_unsaved_edits()
2140 }
2141 _ => self.has_unsaved_edits(),
2142 }
2143 }
2144
2145 /// Marks the buffer as having a conflict regardless of current buffer state.
2146 pub fn set_conflict(&mut self) {
2147 self.has_conflict = true;
2148 }
2149
2150 /// Checks if the buffer and its file have both changed since the buffer
2151 /// was last saved or reloaded.
2152 pub fn has_conflict(&self) -> bool {
2153 if self.has_conflict {
2154 return true;
2155 }
2156 let Some(file) = self.file.as_ref() else {
2157 return false;
2158 };
2159 match file.disk_state() {
2160 DiskState::New => false,
2161 DiskState::Present { mtime } => match self.saved_mtime {
2162 Some(saved_mtime) => {
2163 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2164 }
2165 None => true,
2166 },
2167 DiskState::Deleted => false,
2168 }
2169 }
2170
2171 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2172 pub fn subscribe(&mut self) -> Subscription<usize> {
2173 self.text.subscribe()
2174 }
2175
2176 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2177 ///
2178 /// This allows downstream code to check if the buffer's text has changed without
2179 /// waiting for an effect cycle, which would be required if using eents.
2180 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2181 if let Err(ix) = self
2182 .change_bits
2183 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2184 {
2185 self.change_bits.insert(ix, bit);
2186 }
2187 }
2188
2189 /// Set the change bit for all "listeners".
2190 fn was_changed(&mut self) {
2191 self.change_bits.retain(|change_bit| {
2192 change_bit
2193 .upgrade()
2194 .inspect(|bit| {
2195 _ = bit.replace(true);
2196 })
2197 .is_some()
2198 });
2199 }
2200
2201 /// Starts a transaction, if one is not already in-progress. When undoing or
2202 /// redoing edits, all of the edits performed within a transaction are undone
2203 /// or redone together.
2204 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2205 self.start_transaction_at(Instant::now())
2206 }
2207
2208 /// Starts a transaction, providing the current time. Subsequent transactions
2209 /// that occur within a short period of time will be grouped together. This
2210 /// is controlled by the buffer's undo grouping duration.
2211 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2212 self.transaction_depth += 1;
2213 if self.was_dirty_before_starting_transaction.is_none() {
2214 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2215 }
2216 self.text.start_transaction_at(now)
2217 }
2218
2219 /// Terminates the current transaction, if this is the outermost transaction.
2220 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2221 self.end_transaction_at(Instant::now(), cx)
2222 }
2223
2224 /// Terminates the current transaction, providing the current time. Subsequent transactions
2225 /// that occur within a short period of time will be grouped together. This
2226 /// is controlled by the buffer's undo grouping duration.
2227 pub fn end_transaction_at(
2228 &mut self,
2229 now: Instant,
2230 cx: &mut Context<Self>,
2231 ) -> Option<TransactionId> {
2232 assert!(self.transaction_depth > 0);
2233 self.transaction_depth -= 1;
2234 let was_dirty = if self.transaction_depth == 0 {
2235 self.was_dirty_before_starting_transaction.take().unwrap()
2236 } else {
2237 false
2238 };
2239 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2240 self.did_edit(&start_version, was_dirty, cx);
2241 Some(transaction_id)
2242 } else {
2243 None
2244 }
2245 }
2246
2247 /// Manually add a transaction to the buffer's undo history.
2248 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2249 self.text.push_transaction(transaction, now);
2250 }
2251
2252 /// Differs from `push_transaction` in that it does not clear the redo
2253 /// stack. Intended to be used to create a parent transaction to merge
2254 /// potential child transactions into.
2255 ///
2256 /// The caller is responsible for removing it from the undo history using
2257 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2258 /// are merged into this transaction, the caller is responsible for ensuring
2259 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2260 /// cleared is to create transactions with the usual `start_transaction` and
2261 /// `end_transaction` methods and merging the resulting transactions into
2262 /// the transaction created by this method
2263 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2264 self.text.push_empty_transaction(now)
2265 }
2266
2267 /// Prevent the last transaction from being grouped with any subsequent transactions,
2268 /// even if they occur with the buffer's undo grouping duration.
2269 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2270 self.text.finalize_last_transaction()
2271 }
2272
2273 /// Manually group all changes since a given transaction.
2274 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2275 self.text.group_until_transaction(transaction_id);
2276 }
2277
2278 /// Manually remove a transaction from the buffer's undo history
2279 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2280 self.text.forget_transaction(transaction_id)
2281 }
2282
2283 /// Retrieve a transaction from the buffer's undo history
2284 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2285 self.text.get_transaction(transaction_id)
2286 }
2287
2288 /// Manually merge two transactions in the buffer's undo history.
2289 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2290 self.text.merge_transactions(transaction, destination);
2291 }
2292
2293 /// Waits for the buffer to receive operations with the given timestamps.
2294 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2295 &mut self,
2296 edit_ids: It,
2297 ) -> impl Future<Output = Result<()>> + use<It> {
2298 self.text.wait_for_edits(edit_ids)
2299 }
2300
2301 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2302 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2303 &mut self,
2304 anchors: It,
2305 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2306 self.text.wait_for_anchors(anchors)
2307 }
2308
2309 /// Waits for the buffer to receive operations up to the given version.
2310 pub fn wait_for_version(
2311 &mut self,
2312 version: clock::Global,
2313 ) -> impl Future<Output = Result<()>> + use<> {
2314 self.text.wait_for_version(version)
2315 }
2316
2317 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2318 /// [`Buffer::wait_for_version`] to resolve with an error.
2319 pub fn give_up_waiting(&mut self) {
2320 self.text.give_up_waiting();
2321 }
2322
2323 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2324 let mut rx = None;
2325 if !self.autoindent_requests.is_empty() {
2326 let channel = oneshot::channel();
2327 self.wait_for_autoindent_txs.push(channel.0);
2328 rx = Some(channel.1);
2329 }
2330 rx
2331 }
2332
2333 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2334 pub fn set_active_selections(
2335 &mut self,
2336 selections: Arc<[Selection<Anchor>]>,
2337 line_mode: bool,
2338 cursor_shape: CursorShape,
2339 cx: &mut Context<Self>,
2340 ) {
2341 let lamport_timestamp = self.text.lamport_clock.tick();
2342 self.remote_selections.insert(
2343 self.text.replica_id(),
2344 SelectionSet {
2345 selections: selections.clone(),
2346 lamport_timestamp,
2347 line_mode,
2348 cursor_shape,
2349 },
2350 );
2351 self.send_operation(
2352 Operation::UpdateSelections {
2353 selections,
2354 line_mode,
2355 lamport_timestamp,
2356 cursor_shape,
2357 },
2358 true,
2359 cx,
2360 );
2361 self.non_text_state_update_count += 1;
2362 cx.notify();
2363 }
2364
2365 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2366 /// this replica.
2367 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2368 if self
2369 .remote_selections
2370 .get(&self.text.replica_id())
2371 .is_none_or(|set| !set.selections.is_empty())
2372 {
2373 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2374 }
2375 }
2376
2377 pub fn set_agent_selections(
2378 &mut self,
2379 selections: Arc<[Selection<Anchor>]>,
2380 line_mode: bool,
2381 cursor_shape: CursorShape,
2382 cx: &mut Context<Self>,
2383 ) {
2384 let lamport_timestamp = self.text.lamport_clock.tick();
2385 self.remote_selections.insert(
2386 ReplicaId::AGENT,
2387 SelectionSet {
2388 selections,
2389 lamport_timestamp,
2390 line_mode,
2391 cursor_shape,
2392 },
2393 );
2394 self.non_text_state_update_count += 1;
2395 cx.notify();
2396 }
2397
2398 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2399 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2400 }
2401
2402 /// Replaces the buffer's entire text.
2403 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2404 where
2405 T: Into<Arc<str>>,
2406 {
2407 self.autoindent_requests.clear();
2408 self.edit([(0..self.len(), text)], None, cx)
2409 }
2410
2411 /// Appends the given text to the end of the buffer.
2412 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2413 where
2414 T: Into<Arc<str>>,
2415 {
2416 self.edit([(self.len()..self.len(), text)], None, cx)
2417 }
2418
2419 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2420 /// delete, and a string of text to insert at that location.
2421 ///
2422 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2423 /// request for the edited ranges, which will be processed when the buffer finishes
2424 /// parsing.
2425 ///
2426 /// Parsing takes place at the end of a transaction, and may compute synchronously
2427 /// or asynchronously, depending on the changes.
2428 pub fn edit<I, S, T>(
2429 &mut self,
2430 edits_iter: I,
2431 autoindent_mode: Option<AutoindentMode>,
2432 cx: &mut Context<Self>,
2433 ) -> Option<clock::Lamport>
2434 where
2435 I: IntoIterator<Item = (Range<S>, T)>,
2436 S: ToOffset,
2437 T: Into<Arc<str>>,
2438 {
2439 // Skip invalid edits and coalesce contiguous ones.
2440 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2441
2442 for (range, new_text) in edits_iter {
2443 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2444
2445 if range.start > range.end {
2446 mem::swap(&mut range.start, &mut range.end);
2447 }
2448 let new_text = new_text.into();
2449 if !new_text.is_empty() || !range.is_empty() {
2450 if let Some((prev_range, prev_text)) = edits.last_mut()
2451 && prev_range.end >= range.start
2452 {
2453 prev_range.end = cmp::max(prev_range.end, range.end);
2454 *prev_text = format!("{prev_text}{new_text}").into();
2455 } else {
2456 edits.push((range, new_text));
2457 }
2458 }
2459 }
2460 if edits.is_empty() {
2461 return None;
2462 }
2463
2464 self.start_transaction();
2465 self.pending_autoindent.take();
2466 let autoindent_request = autoindent_mode
2467 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2468
2469 let edit_operation = self.text.edit(edits.iter().cloned());
2470 let edit_id = edit_operation.timestamp();
2471
2472 if let Some((before_edit, mode)) = autoindent_request {
2473 let mut delta = 0isize;
2474 let mut previous_setting = None;
2475 let entries: Vec<_> = edits
2476 .into_iter()
2477 .enumerate()
2478 .zip(&edit_operation.as_edit().unwrap().new_text)
2479 .filter(|((_, (range, _)), _)| {
2480 let language = before_edit.language_at(range.start);
2481 let language_id = language.map(|l| l.id());
2482 if let Some((cached_language_id, auto_indent)) = previous_setting
2483 && cached_language_id == language_id
2484 {
2485 auto_indent
2486 } else {
2487 // The auto-indent setting is not present in editorconfigs, hence
2488 // we can avoid passing the file here.
2489 let auto_indent =
2490 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2491 previous_setting = Some((language_id, auto_indent));
2492 auto_indent
2493 }
2494 })
2495 .map(|((ix, (range, _)), new_text)| {
2496 let new_text_length = new_text.len();
2497 let old_start = range.start.to_point(&before_edit);
2498 let new_start = (delta + range.start as isize) as usize;
2499 let range_len = range.end - range.start;
2500 delta += new_text_length as isize - range_len as isize;
2501
2502 // Decide what range of the insertion to auto-indent, and whether
2503 // the first line of the insertion should be considered a newly-inserted line
2504 // or an edit to an existing line.
2505 let mut range_of_insertion_to_indent = 0..new_text_length;
2506 let mut first_line_is_new = true;
2507
2508 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2509 let old_line_end = before_edit.line_len(old_start.row);
2510
2511 if old_start.column > old_line_start {
2512 first_line_is_new = false;
2513 }
2514
2515 if !new_text.contains('\n')
2516 && (old_start.column + (range_len as u32) < old_line_end
2517 || old_line_end == old_line_start)
2518 {
2519 first_line_is_new = false;
2520 }
2521
2522 // When inserting text starting with a newline, avoid auto-indenting the
2523 // previous line.
2524 if new_text.starts_with('\n') {
2525 range_of_insertion_to_indent.start += 1;
2526 first_line_is_new = true;
2527 }
2528
2529 let mut original_indent_column = None;
2530 if let AutoindentMode::Block {
2531 original_indent_columns,
2532 } = &mode
2533 {
2534 original_indent_column = Some(if new_text.starts_with('\n') {
2535 indent_size_for_text(
2536 new_text[range_of_insertion_to_indent.clone()].chars(),
2537 )
2538 .len
2539 } else {
2540 original_indent_columns
2541 .get(ix)
2542 .copied()
2543 .flatten()
2544 .unwrap_or_else(|| {
2545 indent_size_for_text(
2546 new_text[range_of_insertion_to_indent.clone()].chars(),
2547 )
2548 .len
2549 })
2550 });
2551
2552 // Avoid auto-indenting the line after the edit.
2553 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2554 range_of_insertion_to_indent.end -= 1;
2555 }
2556 }
2557
2558 AutoindentRequestEntry {
2559 first_line_is_new,
2560 original_indent_column,
2561 indent_size: before_edit.language_indent_size_at(range.start, cx),
2562 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2563 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2564 }
2565 })
2566 .collect();
2567
2568 if !entries.is_empty() {
2569 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2570 before_edit,
2571 entries,
2572 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2573 ignore_empty_lines: false,
2574 }));
2575 }
2576 }
2577
2578 self.end_transaction(cx);
2579 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2580 Some(edit_id)
2581 }
2582
2583 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2584 self.was_changed();
2585
2586 if self.edits_since::<usize>(old_version).next().is_none() {
2587 return;
2588 }
2589
2590 self.reparse(cx);
2591 cx.emit(BufferEvent::Edited);
2592 if was_dirty != self.is_dirty() {
2593 cx.emit(BufferEvent::DirtyChanged);
2594 }
2595 cx.notify();
2596 }
2597
2598 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2599 where
2600 I: IntoIterator<Item = Range<T>>,
2601 T: ToOffset + Copy,
2602 {
2603 let before_edit = self.snapshot();
2604 let entries = ranges
2605 .into_iter()
2606 .map(|range| AutoindentRequestEntry {
2607 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2608 first_line_is_new: true,
2609 indent_size: before_edit.language_indent_size_at(range.start, cx),
2610 original_indent_column: None,
2611 })
2612 .collect();
2613 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2614 before_edit,
2615 entries,
2616 is_block_mode: false,
2617 ignore_empty_lines: true,
2618 }));
2619 self.request_autoindent(cx);
2620 }
2621
2622 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2623 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2624 pub fn insert_empty_line(
2625 &mut self,
2626 position: impl ToPoint,
2627 space_above: bool,
2628 space_below: bool,
2629 cx: &mut Context<Self>,
2630 ) -> Point {
2631 let mut position = position.to_point(self);
2632
2633 self.start_transaction();
2634
2635 self.edit(
2636 [(position..position, "\n")],
2637 Some(AutoindentMode::EachLine),
2638 cx,
2639 );
2640
2641 if position.column > 0 {
2642 position += Point::new(1, 0);
2643 }
2644
2645 if !self.is_line_blank(position.row) {
2646 self.edit(
2647 [(position..position, "\n")],
2648 Some(AutoindentMode::EachLine),
2649 cx,
2650 );
2651 }
2652
2653 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2654 self.edit(
2655 [(position..position, "\n")],
2656 Some(AutoindentMode::EachLine),
2657 cx,
2658 );
2659 position.row += 1;
2660 }
2661
2662 if space_below
2663 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2664 {
2665 self.edit(
2666 [(position..position, "\n")],
2667 Some(AutoindentMode::EachLine),
2668 cx,
2669 );
2670 }
2671
2672 self.end_transaction(cx);
2673
2674 position
2675 }
2676
2677 /// Applies the given remote operations to the buffer.
2678 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2679 self.pending_autoindent.take();
2680 let was_dirty = self.is_dirty();
2681 let old_version = self.version.clone();
2682 let mut deferred_ops = Vec::new();
2683 let buffer_ops = ops
2684 .into_iter()
2685 .filter_map(|op| match op {
2686 Operation::Buffer(op) => Some(op),
2687 _ => {
2688 if self.can_apply_op(&op) {
2689 self.apply_op(op, cx);
2690 } else {
2691 deferred_ops.push(op);
2692 }
2693 None
2694 }
2695 })
2696 .collect::<Vec<_>>();
2697 for operation in buffer_ops.iter() {
2698 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2699 }
2700 self.text.apply_ops(buffer_ops);
2701 self.deferred_ops.insert(deferred_ops);
2702 self.flush_deferred_ops(cx);
2703 self.did_edit(&old_version, was_dirty, cx);
2704 // Notify independently of whether the buffer was edited as the operations could include a
2705 // selection update.
2706 cx.notify();
2707 }
2708
2709 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2710 let mut deferred_ops = Vec::new();
2711 for op in self.deferred_ops.drain().iter().cloned() {
2712 if self.can_apply_op(&op) {
2713 self.apply_op(op, cx);
2714 } else {
2715 deferred_ops.push(op);
2716 }
2717 }
2718 self.deferred_ops.insert(deferred_ops);
2719 }
2720
2721 pub fn has_deferred_ops(&self) -> bool {
2722 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2723 }
2724
2725 fn can_apply_op(&self, operation: &Operation) -> bool {
2726 match operation {
2727 Operation::Buffer(_) => {
2728 unreachable!("buffer operations should never be applied at this layer")
2729 }
2730 Operation::UpdateDiagnostics {
2731 diagnostics: diagnostic_set,
2732 ..
2733 } => diagnostic_set.iter().all(|diagnostic| {
2734 self.text.can_resolve(&diagnostic.range.start)
2735 && self.text.can_resolve(&diagnostic.range.end)
2736 }),
2737 Operation::UpdateSelections { selections, .. } => selections
2738 .iter()
2739 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2740 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2741 }
2742 }
2743
2744 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2745 match operation {
2746 Operation::Buffer(_) => {
2747 unreachable!("buffer operations should never be applied at this layer")
2748 }
2749 Operation::UpdateDiagnostics {
2750 server_id,
2751 diagnostics: diagnostic_set,
2752 lamport_timestamp,
2753 } => {
2754 let snapshot = self.snapshot();
2755 self.apply_diagnostic_update(
2756 server_id,
2757 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2758 lamport_timestamp,
2759 cx,
2760 );
2761 }
2762 Operation::UpdateSelections {
2763 selections,
2764 lamport_timestamp,
2765 line_mode,
2766 cursor_shape,
2767 } => {
2768 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2769 && set.lamport_timestamp > lamport_timestamp
2770 {
2771 return;
2772 }
2773
2774 self.remote_selections.insert(
2775 lamport_timestamp.replica_id,
2776 SelectionSet {
2777 selections,
2778 lamport_timestamp,
2779 line_mode,
2780 cursor_shape,
2781 },
2782 );
2783 self.text.lamport_clock.observe(lamport_timestamp);
2784 self.non_text_state_update_count += 1;
2785 }
2786 Operation::UpdateCompletionTriggers {
2787 triggers,
2788 lamport_timestamp,
2789 server_id,
2790 } => {
2791 if triggers.is_empty() {
2792 self.completion_triggers_per_language_server
2793 .remove(&server_id);
2794 self.completion_triggers = self
2795 .completion_triggers_per_language_server
2796 .values()
2797 .flat_map(|triggers| triggers.iter().cloned())
2798 .collect();
2799 } else {
2800 self.completion_triggers_per_language_server
2801 .insert(server_id, triggers.iter().cloned().collect());
2802 self.completion_triggers.extend(triggers);
2803 }
2804 self.text.lamport_clock.observe(lamport_timestamp);
2805 }
2806 Operation::UpdateLineEnding {
2807 line_ending,
2808 lamport_timestamp,
2809 } => {
2810 self.text.set_line_ending(line_ending);
2811 self.text.lamport_clock.observe(lamport_timestamp);
2812 }
2813 }
2814 }
2815
2816 fn apply_diagnostic_update(
2817 &mut self,
2818 server_id: LanguageServerId,
2819 diagnostics: DiagnosticSet,
2820 lamport_timestamp: clock::Lamport,
2821 cx: &mut Context<Self>,
2822 ) {
2823 if lamport_timestamp > self.diagnostics_timestamp {
2824 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2825 if diagnostics.is_empty() {
2826 if let Ok(ix) = ix {
2827 self.diagnostics.remove(ix);
2828 }
2829 } else {
2830 match ix {
2831 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2832 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2833 };
2834 }
2835 self.diagnostics_timestamp = lamport_timestamp;
2836 self.non_text_state_update_count += 1;
2837 self.text.lamport_clock.observe(lamport_timestamp);
2838 cx.notify();
2839 cx.emit(BufferEvent::DiagnosticsUpdated);
2840 }
2841 }
2842
2843 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2844 self.was_changed();
2845 cx.emit(BufferEvent::Operation {
2846 operation,
2847 is_local,
2848 });
2849 }
2850
2851 /// Removes the selections for a given peer.
2852 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2853 self.remote_selections.remove(&replica_id);
2854 cx.notify();
2855 }
2856
2857 /// Undoes the most recent transaction.
2858 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2859 let was_dirty = self.is_dirty();
2860 let old_version = self.version.clone();
2861
2862 if let Some((transaction_id, operation)) = self.text.undo() {
2863 self.send_operation(Operation::Buffer(operation), true, cx);
2864 self.did_edit(&old_version, was_dirty, cx);
2865 Some(transaction_id)
2866 } else {
2867 None
2868 }
2869 }
2870
2871 /// Manually undoes a specific transaction in the buffer's undo history.
2872 pub fn undo_transaction(
2873 &mut self,
2874 transaction_id: TransactionId,
2875 cx: &mut Context<Self>,
2876 ) -> bool {
2877 let was_dirty = self.is_dirty();
2878 let old_version = self.version.clone();
2879 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2880 self.send_operation(Operation::Buffer(operation), true, cx);
2881 self.did_edit(&old_version, was_dirty, cx);
2882 true
2883 } else {
2884 false
2885 }
2886 }
2887
2888 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2889 pub fn undo_to_transaction(
2890 &mut self,
2891 transaction_id: TransactionId,
2892 cx: &mut Context<Self>,
2893 ) -> bool {
2894 let was_dirty = self.is_dirty();
2895 let old_version = self.version.clone();
2896
2897 let operations = self.text.undo_to_transaction(transaction_id);
2898 let undone = !operations.is_empty();
2899 for operation in operations {
2900 self.send_operation(Operation::Buffer(operation), true, cx);
2901 }
2902 if undone {
2903 self.did_edit(&old_version, was_dirty, cx)
2904 }
2905 undone
2906 }
2907
2908 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2909 let was_dirty = self.is_dirty();
2910 let operation = self.text.undo_operations(counts);
2911 let old_version = self.version.clone();
2912 self.send_operation(Operation::Buffer(operation), true, cx);
2913 self.did_edit(&old_version, was_dirty, cx);
2914 }
2915
2916 /// Manually redoes a specific transaction in the buffer's redo history.
2917 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2918 let was_dirty = self.is_dirty();
2919 let old_version = self.version.clone();
2920
2921 if let Some((transaction_id, operation)) = self.text.redo() {
2922 self.send_operation(Operation::Buffer(operation), true, cx);
2923 self.did_edit(&old_version, was_dirty, cx);
2924 Some(transaction_id)
2925 } else {
2926 None
2927 }
2928 }
2929
2930 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2931 pub fn redo_to_transaction(
2932 &mut self,
2933 transaction_id: TransactionId,
2934 cx: &mut Context<Self>,
2935 ) -> bool {
2936 let was_dirty = self.is_dirty();
2937 let old_version = self.version.clone();
2938
2939 let operations = self.text.redo_to_transaction(transaction_id);
2940 let redone = !operations.is_empty();
2941 for operation in operations {
2942 self.send_operation(Operation::Buffer(operation), true, cx);
2943 }
2944 if redone {
2945 self.did_edit(&old_version, was_dirty, cx)
2946 }
2947 redone
2948 }
2949
2950 /// Override current completion triggers with the user-provided completion triggers.
2951 pub fn set_completion_triggers(
2952 &mut self,
2953 server_id: LanguageServerId,
2954 triggers: BTreeSet<String>,
2955 cx: &mut Context<Self>,
2956 ) {
2957 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2958 if triggers.is_empty() {
2959 self.completion_triggers_per_language_server
2960 .remove(&server_id);
2961 self.completion_triggers = self
2962 .completion_triggers_per_language_server
2963 .values()
2964 .flat_map(|triggers| triggers.iter().cloned())
2965 .collect();
2966 } else {
2967 self.completion_triggers_per_language_server
2968 .insert(server_id, triggers.clone());
2969 self.completion_triggers.extend(triggers.iter().cloned());
2970 }
2971 self.send_operation(
2972 Operation::UpdateCompletionTriggers {
2973 triggers: triggers.into_iter().collect(),
2974 lamport_timestamp: self.completion_triggers_timestamp,
2975 server_id,
2976 },
2977 true,
2978 cx,
2979 );
2980 cx.notify();
2981 }
2982
2983 /// Returns a list of strings which trigger a completion menu for this language.
2984 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2985 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2986 &self.completion_triggers
2987 }
2988
2989 /// Call this directly after performing edits to prevent the preview tab
2990 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2991 /// to return false until there are additional edits.
2992 pub fn refresh_preview(&mut self) {
2993 self.preview_version = self.version.clone();
2994 }
2995
2996 /// Whether we should preserve the preview status of a tab containing this buffer.
2997 pub fn preserve_preview(&self) -> bool {
2998 !self.has_edits_since(&self.preview_version)
2999 }
3000}
3001
3002#[doc(hidden)]
3003#[cfg(any(test, feature = "test-support"))]
3004impl Buffer {
3005 pub fn edit_via_marked_text(
3006 &mut self,
3007 marked_string: &str,
3008 autoindent_mode: Option<AutoindentMode>,
3009 cx: &mut Context<Self>,
3010 ) {
3011 let edits = self.edits_for_marked_text(marked_string);
3012 self.edit(edits, autoindent_mode, cx);
3013 }
3014
3015 pub fn set_group_interval(&mut self, group_interval: Duration) {
3016 self.text.set_group_interval(group_interval);
3017 }
3018
3019 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3020 where
3021 T: rand::Rng,
3022 {
3023 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3024 let mut last_end = None;
3025 for _ in 0..old_range_count {
3026 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3027 break;
3028 }
3029
3030 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3031 let mut range = self.random_byte_range(new_start, rng);
3032 if rng.random_bool(0.2) {
3033 mem::swap(&mut range.start, &mut range.end);
3034 }
3035 last_end = Some(range.end);
3036
3037 let new_text_len = rng.random_range(0..10);
3038 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3039 new_text = new_text.to_uppercase();
3040
3041 edits.push((range, new_text));
3042 }
3043 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3044 self.edit(edits, None, cx);
3045 }
3046
3047 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3048 let was_dirty = self.is_dirty();
3049 let old_version = self.version.clone();
3050
3051 let ops = self.text.randomly_undo_redo(rng);
3052 if !ops.is_empty() {
3053 for op in ops {
3054 self.send_operation(Operation::Buffer(op), true, cx);
3055 self.did_edit(&old_version, was_dirty, cx);
3056 }
3057 }
3058 }
3059}
3060
3061impl EventEmitter<BufferEvent> for Buffer {}
3062
3063impl Deref for Buffer {
3064 type Target = TextBuffer;
3065
3066 fn deref(&self) -> &Self::Target {
3067 &self.text
3068 }
3069}
3070
3071impl BufferSnapshot {
3072 /// Returns [`IndentSize`] for a given line that respects user settings and
3073 /// language preferences.
3074 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3075 indent_size_for_line(self, row)
3076 }
3077
3078 /// Returns [`IndentSize`] for a given position that respects user settings
3079 /// and language preferences.
3080 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3081 let settings = language_settings(
3082 self.language_at(position).map(|l| l.name()),
3083 self.file(),
3084 cx,
3085 );
3086 if settings.hard_tabs {
3087 IndentSize::tab()
3088 } else {
3089 IndentSize::spaces(settings.tab_size.get())
3090 }
3091 }
3092
3093 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3094 /// is passed in as `single_indent_size`.
3095 pub fn suggested_indents(
3096 &self,
3097 rows: impl Iterator<Item = u32>,
3098 single_indent_size: IndentSize,
3099 ) -> BTreeMap<u32, IndentSize> {
3100 let mut result = BTreeMap::new();
3101
3102 for row_range in contiguous_ranges(rows, 10) {
3103 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3104 Some(suggestions) => suggestions,
3105 _ => break,
3106 };
3107
3108 for (row, suggestion) in row_range.zip(suggestions) {
3109 let indent_size = if let Some(suggestion) = suggestion {
3110 result
3111 .get(&suggestion.basis_row)
3112 .copied()
3113 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3114 .with_delta(suggestion.delta, single_indent_size)
3115 } else {
3116 self.indent_size_for_line(row)
3117 };
3118
3119 result.insert(row, indent_size);
3120 }
3121 }
3122
3123 result
3124 }
3125
3126 fn suggest_autoindents(
3127 &self,
3128 row_range: Range<u32>,
3129 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3130 let config = &self.language.as_ref()?.config;
3131 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3132
3133 #[derive(Debug, Clone)]
3134 struct StartPosition {
3135 start: Point,
3136 suffix: SharedString,
3137 }
3138
3139 // Find the suggested indentation ranges based on the syntax tree.
3140 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3141 let end = Point::new(row_range.end, 0);
3142 let range = (start..end).to_offset(&self.text);
3143 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3144 Some(&grammar.indents_config.as_ref()?.query)
3145 });
3146 let indent_configs = matches
3147 .grammars()
3148 .iter()
3149 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3150 .collect::<Vec<_>>();
3151
3152 let mut indent_ranges = Vec::<Range<Point>>::new();
3153 let mut start_positions = Vec::<StartPosition>::new();
3154 let mut outdent_positions = Vec::<Point>::new();
3155 while let Some(mat) = matches.peek() {
3156 let mut start: Option<Point> = None;
3157 let mut end: Option<Point> = None;
3158
3159 let config = indent_configs[mat.grammar_index];
3160 for capture in mat.captures {
3161 if capture.index == config.indent_capture_ix {
3162 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3163 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3164 } else if Some(capture.index) == config.start_capture_ix {
3165 start = Some(Point::from_ts_point(capture.node.end_position()));
3166 } else if Some(capture.index) == config.end_capture_ix {
3167 end = Some(Point::from_ts_point(capture.node.start_position()));
3168 } else if Some(capture.index) == config.outdent_capture_ix {
3169 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3170 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3171 start_positions.push(StartPosition {
3172 start: Point::from_ts_point(capture.node.start_position()),
3173 suffix: suffix.clone(),
3174 });
3175 }
3176 }
3177
3178 matches.advance();
3179 if let Some((start, end)) = start.zip(end) {
3180 if start.row == end.row {
3181 continue;
3182 }
3183 let range = start..end;
3184 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3185 Err(ix) => indent_ranges.insert(ix, range),
3186 Ok(ix) => {
3187 let prev_range = &mut indent_ranges[ix];
3188 prev_range.end = prev_range.end.max(range.end);
3189 }
3190 }
3191 }
3192 }
3193
3194 let mut error_ranges = Vec::<Range<Point>>::new();
3195 let mut matches = self
3196 .syntax
3197 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3198 while let Some(mat) = matches.peek() {
3199 let node = mat.captures[0].node;
3200 let start = Point::from_ts_point(node.start_position());
3201 let end = Point::from_ts_point(node.end_position());
3202 let range = start..end;
3203 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3204 Ok(ix) | Err(ix) => ix,
3205 };
3206 let mut end_ix = ix;
3207 while let Some(existing_range) = error_ranges.get(end_ix) {
3208 if existing_range.end < end {
3209 end_ix += 1;
3210 } else {
3211 break;
3212 }
3213 }
3214 error_ranges.splice(ix..end_ix, [range]);
3215 matches.advance();
3216 }
3217
3218 outdent_positions.sort();
3219 for outdent_position in outdent_positions {
3220 // find the innermost indent range containing this outdent_position
3221 // set its end to the outdent position
3222 if let Some(range_to_truncate) = indent_ranges
3223 .iter_mut()
3224 .filter(|indent_range| indent_range.contains(&outdent_position))
3225 .next_back()
3226 {
3227 range_to_truncate.end = outdent_position;
3228 }
3229 }
3230
3231 start_positions.sort_by_key(|b| b.start);
3232
3233 // Find the suggested indentation increases and decreased based on regexes.
3234 let mut regex_outdent_map = HashMap::default();
3235 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3236 let mut start_positions_iter = start_positions.iter().peekable();
3237
3238 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3239 self.for_each_line(
3240 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3241 ..Point::new(row_range.end, 0),
3242 |row, line| {
3243 if config
3244 .decrease_indent_pattern
3245 .as_ref()
3246 .is_some_and(|regex| regex.is_match(line))
3247 {
3248 indent_change_rows.push((row, Ordering::Less));
3249 }
3250 if config
3251 .increase_indent_pattern
3252 .as_ref()
3253 .is_some_and(|regex| regex.is_match(line))
3254 {
3255 indent_change_rows.push((row + 1, Ordering::Greater));
3256 }
3257 while let Some(pos) = start_positions_iter.peek() {
3258 if pos.start.row < row {
3259 let pos = start_positions_iter.next().unwrap();
3260 last_seen_suffix
3261 .entry(pos.suffix.to_string())
3262 .or_default()
3263 .push(pos.start);
3264 } else {
3265 break;
3266 }
3267 }
3268 for rule in &config.decrease_indent_patterns {
3269 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3270 let row_start_column = self.indent_size_for_line(row).len;
3271 let basis_row = rule
3272 .valid_after
3273 .iter()
3274 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3275 .flatten()
3276 .filter(|start_point| start_point.column <= row_start_column)
3277 .max_by_key(|start_point| start_point.row);
3278 if let Some(outdent_to_row) = basis_row {
3279 regex_outdent_map.insert(row, outdent_to_row.row);
3280 }
3281 break;
3282 }
3283 }
3284 },
3285 );
3286
3287 let mut indent_changes = indent_change_rows.into_iter().peekable();
3288 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3289 prev_non_blank_row.unwrap_or(0)
3290 } else {
3291 row_range.start.saturating_sub(1)
3292 };
3293
3294 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3295 Some(row_range.map(move |row| {
3296 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3297
3298 let mut indent_from_prev_row = false;
3299 let mut outdent_from_prev_row = false;
3300 let mut outdent_to_row = u32::MAX;
3301 let mut from_regex = false;
3302
3303 while let Some((indent_row, delta)) = indent_changes.peek() {
3304 match indent_row.cmp(&row) {
3305 Ordering::Equal => match delta {
3306 Ordering::Less => {
3307 from_regex = true;
3308 outdent_from_prev_row = true
3309 }
3310 Ordering::Greater => {
3311 indent_from_prev_row = true;
3312 from_regex = true
3313 }
3314 _ => {}
3315 },
3316
3317 Ordering::Greater => break,
3318 Ordering::Less => {}
3319 }
3320
3321 indent_changes.next();
3322 }
3323
3324 for range in &indent_ranges {
3325 if range.start.row >= row {
3326 break;
3327 }
3328 if range.start.row == prev_row && range.end > row_start {
3329 indent_from_prev_row = true;
3330 }
3331 if range.end > prev_row_start && range.end <= row_start {
3332 outdent_to_row = outdent_to_row.min(range.start.row);
3333 }
3334 }
3335
3336 if let Some(basis_row) = regex_outdent_map.get(&row) {
3337 indent_from_prev_row = false;
3338 outdent_to_row = *basis_row;
3339 from_regex = true;
3340 }
3341
3342 let within_error = error_ranges
3343 .iter()
3344 .any(|e| e.start.row < row && e.end > row_start);
3345
3346 let suggestion = if outdent_to_row == prev_row
3347 || (outdent_from_prev_row && indent_from_prev_row)
3348 {
3349 Some(IndentSuggestion {
3350 basis_row: prev_row,
3351 delta: Ordering::Equal,
3352 within_error: within_error && !from_regex,
3353 })
3354 } else if indent_from_prev_row {
3355 Some(IndentSuggestion {
3356 basis_row: prev_row,
3357 delta: Ordering::Greater,
3358 within_error: within_error && !from_regex,
3359 })
3360 } else if outdent_to_row < prev_row {
3361 Some(IndentSuggestion {
3362 basis_row: outdent_to_row,
3363 delta: Ordering::Equal,
3364 within_error: within_error && !from_regex,
3365 })
3366 } else if outdent_from_prev_row {
3367 Some(IndentSuggestion {
3368 basis_row: prev_row,
3369 delta: Ordering::Less,
3370 within_error: within_error && !from_regex,
3371 })
3372 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3373 {
3374 Some(IndentSuggestion {
3375 basis_row: prev_row,
3376 delta: Ordering::Equal,
3377 within_error: within_error && !from_regex,
3378 })
3379 } else {
3380 None
3381 };
3382
3383 prev_row = row;
3384 prev_row_start = row_start;
3385 suggestion
3386 }))
3387 }
3388
3389 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3390 while row > 0 {
3391 row -= 1;
3392 if !self.is_line_blank(row) {
3393 return Some(row);
3394 }
3395 }
3396 None
3397 }
3398
3399 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3400 let captures = self.syntax.captures(range, &self.text, |grammar| {
3401 grammar
3402 .highlights_config
3403 .as_ref()
3404 .map(|config| &config.query)
3405 });
3406 let highlight_maps = captures
3407 .grammars()
3408 .iter()
3409 .map(|grammar| grammar.highlight_map())
3410 .collect();
3411 (captures, highlight_maps)
3412 }
3413
3414 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3415 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3416 /// returned in chunks where each chunk has a single syntax highlighting style and
3417 /// diagnostic status.
3418 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3419 let range = range.start.to_offset(self)..range.end.to_offset(self);
3420
3421 let mut syntax = None;
3422 if language_aware {
3423 syntax = Some(self.get_highlights(range.clone()));
3424 }
3425 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3426 let diagnostics = language_aware;
3427 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3428 }
3429
3430 pub fn highlighted_text_for_range<T: ToOffset>(
3431 &self,
3432 range: Range<T>,
3433 override_style: Option<HighlightStyle>,
3434 syntax_theme: &SyntaxTheme,
3435 ) -> HighlightedText {
3436 HighlightedText::from_buffer_range(
3437 range,
3438 &self.text,
3439 &self.syntax,
3440 override_style,
3441 syntax_theme,
3442 )
3443 }
3444
3445 /// Invokes the given callback for each line of text in the given range of the buffer.
3446 /// Uses callback to avoid allocating a string for each line.
3447 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3448 let mut line = String::new();
3449 let mut row = range.start.row;
3450 for chunk in self
3451 .as_rope()
3452 .chunks_in_range(range.to_offset(self))
3453 .chain(["\n"])
3454 {
3455 for (newline_ix, text) in chunk.split('\n').enumerate() {
3456 if newline_ix > 0 {
3457 callback(row, &line);
3458 row += 1;
3459 line.clear();
3460 }
3461 line.push_str(text);
3462 }
3463 }
3464 }
3465
3466 /// Iterates over every [`SyntaxLayer`] in the buffer.
3467 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3468 self.syntax_layers_for_range(0..self.len(), true)
3469 }
3470
3471 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3472 let offset = position.to_offset(self);
3473 self.syntax_layers_for_range(offset..offset, false)
3474 .filter(|l| {
3475 if let Some(ranges) = l.included_sub_ranges {
3476 ranges.iter().any(|range| {
3477 let start = range.start.to_offset(self);
3478 start <= offset && {
3479 let end = range.end.to_offset(self);
3480 offset < end
3481 }
3482 })
3483 } else {
3484 l.node().start_byte() <= offset && l.node().end_byte() > offset
3485 }
3486 })
3487 .last()
3488 }
3489
3490 pub fn syntax_layers_for_range<D: ToOffset>(
3491 &self,
3492 range: Range<D>,
3493 include_hidden: bool,
3494 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3495 self.syntax
3496 .layers_for_range(range, &self.text, include_hidden)
3497 }
3498
3499 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3500 &self,
3501 range: Range<D>,
3502 ) -> Option<SyntaxLayer<'_>> {
3503 let range = range.to_offset(self);
3504 self.syntax
3505 .layers_for_range(range, &self.text, false)
3506 .max_by(|a, b| {
3507 if a.depth != b.depth {
3508 a.depth.cmp(&b.depth)
3509 } else if a.offset.0 != b.offset.0 {
3510 a.offset.0.cmp(&b.offset.0)
3511 } else {
3512 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3513 }
3514 })
3515 }
3516
3517 /// Returns the main [`Language`].
3518 pub fn language(&self) -> Option<&Arc<Language>> {
3519 self.language.as_ref()
3520 }
3521
3522 /// Returns the [`Language`] at the given location.
3523 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3524 self.syntax_layer_at(position)
3525 .map(|info| info.language)
3526 .or(self.language.as_ref())
3527 }
3528
3529 /// Returns the settings for the language at the given location.
3530 pub fn settings_at<'a, D: ToOffset>(
3531 &'a self,
3532 position: D,
3533 cx: &'a App,
3534 ) -> Cow<'a, LanguageSettings> {
3535 language_settings(
3536 self.language_at(position).map(|l| l.name()),
3537 self.file.as_ref(),
3538 cx,
3539 )
3540 }
3541
3542 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3543 CharClassifier::new(self.language_scope_at(point))
3544 }
3545
3546 /// Returns the [`LanguageScope`] at the given location.
3547 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3548 let offset = position.to_offset(self);
3549 let mut scope = None;
3550 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3551
3552 // Use the layer that has the smallest node intersecting the given point.
3553 for layer in self
3554 .syntax
3555 .layers_for_range(offset..offset, &self.text, false)
3556 {
3557 let mut cursor = layer.node().walk();
3558
3559 let mut range = None;
3560 loop {
3561 let child_range = cursor.node().byte_range();
3562 if !child_range.contains(&offset) {
3563 break;
3564 }
3565
3566 range = Some(child_range);
3567 if cursor.goto_first_child_for_byte(offset).is_none() {
3568 break;
3569 }
3570 }
3571
3572 if let Some(range) = range
3573 && smallest_range_and_depth.as_ref().is_none_or(
3574 |(smallest_range, smallest_range_depth)| {
3575 if layer.depth > *smallest_range_depth {
3576 true
3577 } else if layer.depth == *smallest_range_depth {
3578 range.len() < smallest_range.len()
3579 } else {
3580 false
3581 }
3582 },
3583 )
3584 {
3585 smallest_range_and_depth = Some((range, layer.depth));
3586 scope = Some(LanguageScope {
3587 language: layer.language.clone(),
3588 override_id: layer.override_id(offset, &self.text),
3589 });
3590 }
3591 }
3592
3593 scope.or_else(|| {
3594 self.language.clone().map(|language| LanguageScope {
3595 language,
3596 override_id: None,
3597 })
3598 })
3599 }
3600
3601 /// Returns a tuple of the range and character kind of the word
3602 /// surrounding the given position.
3603 pub fn surrounding_word<T: ToOffset>(
3604 &self,
3605 start: T,
3606 scope_context: Option<CharScopeContext>,
3607 ) -> (Range<usize>, Option<CharKind>) {
3608 let mut start = start.to_offset(self);
3609 let mut end = start;
3610 let mut next_chars = self.chars_at(start).take(128).peekable();
3611 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3612
3613 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3614 let word_kind = cmp::max(
3615 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3616 next_chars.peek().copied().map(|c| classifier.kind(c)),
3617 );
3618
3619 for ch in prev_chars {
3620 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3621 start -= ch.len_utf8();
3622 } else {
3623 break;
3624 }
3625 }
3626
3627 for ch in next_chars {
3628 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3629 end += ch.len_utf8();
3630 } else {
3631 break;
3632 }
3633 }
3634
3635 (start..end, word_kind)
3636 }
3637
3638 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3639 /// range. When `require_larger` is true, the node found must be larger than the query range.
3640 ///
3641 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3642 /// be moved to the root of the tree.
3643 fn goto_node_enclosing_range(
3644 cursor: &mut tree_sitter::TreeCursor,
3645 query_range: &Range<usize>,
3646 require_larger: bool,
3647 ) -> bool {
3648 let mut ascending = false;
3649 loop {
3650 let mut range = cursor.node().byte_range();
3651 if query_range.is_empty() {
3652 // When the query range is empty and the current node starts after it, move to the
3653 // previous sibling to find the node the containing node.
3654 if range.start > query_range.start {
3655 cursor.goto_previous_sibling();
3656 range = cursor.node().byte_range();
3657 }
3658 } else {
3659 // When the query range is non-empty and the current node ends exactly at the start,
3660 // move to the next sibling to find a node that extends beyond the start.
3661 if range.end == query_range.start {
3662 cursor.goto_next_sibling();
3663 range = cursor.node().byte_range();
3664 }
3665 }
3666
3667 let encloses = range.contains_inclusive(query_range)
3668 && (!require_larger || range.len() > query_range.len());
3669 if !encloses {
3670 ascending = true;
3671 if !cursor.goto_parent() {
3672 return false;
3673 }
3674 continue;
3675 } else if ascending {
3676 return true;
3677 }
3678
3679 // Descend into the current node.
3680 if cursor
3681 .goto_first_child_for_byte(query_range.start)
3682 .is_none()
3683 {
3684 return true;
3685 }
3686 }
3687 }
3688
3689 pub fn syntax_ancestor<'a, T: ToOffset>(
3690 &'a self,
3691 range: Range<T>,
3692 ) -> Option<tree_sitter::Node<'a>> {
3693 let range = range.start.to_offset(self)..range.end.to_offset(self);
3694 let mut result: Option<tree_sitter::Node<'a>> = None;
3695 for layer in self
3696 .syntax
3697 .layers_for_range(range.clone(), &self.text, true)
3698 {
3699 let mut cursor = layer.node().walk();
3700
3701 // Find the node that both contains the range and is larger than it.
3702 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3703 continue;
3704 }
3705
3706 let left_node = cursor.node();
3707 let mut layer_result = left_node;
3708
3709 // For an empty range, try to find another node immediately to the right of the range.
3710 if left_node.end_byte() == range.start {
3711 let mut right_node = None;
3712 while !cursor.goto_next_sibling() {
3713 if !cursor.goto_parent() {
3714 break;
3715 }
3716 }
3717
3718 while cursor.node().start_byte() == range.start {
3719 right_node = Some(cursor.node());
3720 if !cursor.goto_first_child() {
3721 break;
3722 }
3723 }
3724
3725 // If there is a candidate node on both sides of the (empty) range, then
3726 // decide between the two by favoring a named node over an anonymous token.
3727 // If both nodes are the same in that regard, favor the right one.
3728 if let Some(right_node) = right_node
3729 && (right_node.is_named() || !left_node.is_named())
3730 {
3731 layer_result = right_node;
3732 }
3733 }
3734
3735 if let Some(previous_result) = &result
3736 && previous_result.byte_range().len() < layer_result.byte_range().len()
3737 {
3738 continue;
3739 }
3740 result = Some(layer_result);
3741 }
3742
3743 result
3744 }
3745
3746 /// Find the previous sibling syntax node at the given range.
3747 ///
3748 /// This function locates the syntax node that precedes the node containing
3749 /// the given range. It searches hierarchically by:
3750 /// 1. Finding the node that contains the given range
3751 /// 2. Looking for the previous sibling at the same tree level
3752 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3753 ///
3754 /// Returns `None` if there is no previous sibling at any ancestor level.
3755 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3756 &'a self,
3757 range: Range<T>,
3758 ) -> Option<tree_sitter::Node<'a>> {
3759 let range = range.start.to_offset(self)..range.end.to_offset(self);
3760 let mut result: Option<tree_sitter::Node<'a>> = None;
3761
3762 for layer in self
3763 .syntax
3764 .layers_for_range(range.clone(), &self.text, true)
3765 {
3766 let mut cursor = layer.node().walk();
3767
3768 // Find the node that contains the range
3769 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3770 continue;
3771 }
3772
3773 // Look for the previous sibling, moving up ancestor levels if needed
3774 loop {
3775 if cursor.goto_previous_sibling() {
3776 let layer_result = cursor.node();
3777
3778 if let Some(previous_result) = &result {
3779 if previous_result.byte_range().end < layer_result.byte_range().end {
3780 continue;
3781 }
3782 }
3783 result = Some(layer_result);
3784 break;
3785 }
3786
3787 // No sibling found at this level, try moving up to parent
3788 if !cursor.goto_parent() {
3789 break;
3790 }
3791 }
3792 }
3793
3794 result
3795 }
3796
3797 /// Find the next sibling syntax node at the given range.
3798 ///
3799 /// This function locates the syntax node that follows the node containing
3800 /// the given range. It searches hierarchically by:
3801 /// 1. Finding the node that contains the given range
3802 /// 2. Looking for the next sibling at the same tree level
3803 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3804 ///
3805 /// Returns `None` if there is no next sibling at any ancestor level.
3806 pub fn syntax_next_sibling<'a, T: ToOffset>(
3807 &'a self,
3808 range: Range<T>,
3809 ) -> Option<tree_sitter::Node<'a>> {
3810 let range = range.start.to_offset(self)..range.end.to_offset(self);
3811 let mut result: Option<tree_sitter::Node<'a>> = None;
3812
3813 for layer in self
3814 .syntax
3815 .layers_for_range(range.clone(), &self.text, true)
3816 {
3817 let mut cursor = layer.node().walk();
3818
3819 // Find the node that contains the range
3820 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3821 continue;
3822 }
3823
3824 // Look for the next sibling, moving up ancestor levels if needed
3825 loop {
3826 if cursor.goto_next_sibling() {
3827 let layer_result = cursor.node();
3828
3829 if let Some(previous_result) = &result {
3830 if previous_result.byte_range().start > layer_result.byte_range().start {
3831 continue;
3832 }
3833 }
3834 result = Some(layer_result);
3835 break;
3836 }
3837
3838 // No sibling found at this level, try moving up to parent
3839 if !cursor.goto_parent() {
3840 break;
3841 }
3842 }
3843 }
3844
3845 result
3846 }
3847
3848 /// Returns the root syntax node within the given row
3849 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3850 let start_offset = position.to_offset(self);
3851
3852 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3853
3854 let layer = self
3855 .syntax
3856 .layers_for_range(start_offset..start_offset, &self.text, true)
3857 .next()?;
3858
3859 let mut cursor = layer.node().walk();
3860
3861 // Descend to the first leaf that touches the start of the range.
3862 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3863 if cursor.node().end_byte() == start_offset {
3864 cursor.goto_next_sibling();
3865 }
3866 }
3867
3868 // Ascend to the root node within the same row.
3869 while cursor.goto_parent() {
3870 if cursor.node().start_position().row != row {
3871 break;
3872 }
3873 }
3874
3875 Some(cursor.node())
3876 }
3877
3878 /// Returns the outline for the buffer.
3879 ///
3880 /// This method allows passing an optional [`SyntaxTheme`] to
3881 /// syntax-highlight the returned symbols.
3882 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3883 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3884 }
3885
3886 /// Returns all the symbols that contain the given position.
3887 ///
3888 /// This method allows passing an optional [`SyntaxTheme`] to
3889 /// syntax-highlight the returned symbols.
3890 pub fn symbols_containing<T: ToOffset>(
3891 &self,
3892 position: T,
3893 theme: Option<&SyntaxTheme>,
3894 ) -> Vec<OutlineItem<Anchor>> {
3895 let position = position.to_offset(self);
3896 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3897 let end = self.clip_offset(position + 1, Bias::Right);
3898 let mut items = self.outline_items_containing(start..end, false, theme);
3899 let mut prev_depth = None;
3900 items.retain(|item| {
3901 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3902 prev_depth = Some(item.depth);
3903 result
3904 });
3905 items
3906 }
3907
3908 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3909 let range = range.to_offset(self);
3910 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3911 grammar.outline_config.as_ref().map(|c| &c.query)
3912 });
3913 let configs = matches
3914 .grammars()
3915 .iter()
3916 .map(|g| g.outline_config.as_ref().unwrap())
3917 .collect::<Vec<_>>();
3918
3919 while let Some(mat) = matches.peek() {
3920 let config = &configs[mat.grammar_index];
3921 let containing_item_node = maybe!({
3922 let item_node = mat.captures.iter().find_map(|cap| {
3923 if cap.index == config.item_capture_ix {
3924 Some(cap.node)
3925 } else {
3926 None
3927 }
3928 })?;
3929
3930 let item_byte_range = item_node.byte_range();
3931 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3932 None
3933 } else {
3934 Some(item_node)
3935 }
3936 });
3937
3938 if let Some(item_node) = containing_item_node {
3939 return Some(
3940 Point::from_ts_point(item_node.start_position())
3941 ..Point::from_ts_point(item_node.end_position()),
3942 );
3943 }
3944
3945 matches.advance();
3946 }
3947 None
3948 }
3949
3950 pub fn outline_items_containing<T: ToOffset>(
3951 &self,
3952 range: Range<T>,
3953 include_extra_context: bool,
3954 theme: Option<&SyntaxTheme>,
3955 ) -> Vec<OutlineItem<Anchor>> {
3956 self.outline_items_containing_internal(
3957 range,
3958 include_extra_context,
3959 theme,
3960 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3961 )
3962 }
3963
3964 pub fn outline_items_as_points_containing<T: ToOffset>(
3965 &self,
3966 range: Range<T>,
3967 include_extra_context: bool,
3968 theme: Option<&SyntaxTheme>,
3969 ) -> Vec<OutlineItem<Point>> {
3970 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3971 range
3972 })
3973 }
3974
3975 fn outline_items_containing_internal<T: ToOffset, U>(
3976 &self,
3977 range: Range<T>,
3978 include_extra_context: bool,
3979 theme: Option<&SyntaxTheme>,
3980 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3981 ) -> Vec<OutlineItem<U>> {
3982 let range = range.to_offset(self);
3983 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3984 grammar.outline_config.as_ref().map(|c| &c.query)
3985 });
3986
3987 let mut items = Vec::new();
3988 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3989 while let Some(mat) = matches.peek() {
3990 let config = matches.grammars()[mat.grammar_index]
3991 .outline_config
3992 .as_ref()
3993 .unwrap();
3994 if let Some(item) =
3995 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3996 {
3997 items.push(item);
3998 } else if let Some(capture) = mat
3999 .captures
4000 .iter()
4001 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4002 {
4003 let capture_range = capture.node.start_position()..capture.node.end_position();
4004 let mut capture_row_range =
4005 capture_range.start.row as u32..capture_range.end.row as u32;
4006 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4007 {
4008 capture_row_range.end -= 1;
4009 }
4010 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4011 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4012 last_row_range.end = capture_row_range.end;
4013 } else {
4014 annotation_row_ranges.push(capture_row_range);
4015 }
4016 } else {
4017 annotation_row_ranges.push(capture_row_range);
4018 }
4019 }
4020 matches.advance();
4021 }
4022
4023 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4024
4025 // Assign depths based on containment relationships and convert to anchors.
4026 let mut item_ends_stack = Vec::<Point>::new();
4027 let mut anchor_items = Vec::new();
4028 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4029 for item in items {
4030 while let Some(last_end) = item_ends_stack.last().copied() {
4031 if last_end < item.range.end {
4032 item_ends_stack.pop();
4033 } else {
4034 break;
4035 }
4036 }
4037
4038 let mut annotation_row_range = None;
4039 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4040 let row_preceding_item = item.range.start.row.saturating_sub(1);
4041 if next_annotation_row_range.end < row_preceding_item {
4042 annotation_row_ranges.next();
4043 } else {
4044 if next_annotation_row_range.end == row_preceding_item {
4045 annotation_row_range = Some(next_annotation_row_range.clone());
4046 annotation_row_ranges.next();
4047 }
4048 break;
4049 }
4050 }
4051
4052 anchor_items.push(OutlineItem {
4053 depth: item_ends_stack.len(),
4054 range: range_callback(self, item.range.clone()),
4055 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4056 text: item.text,
4057 highlight_ranges: item.highlight_ranges,
4058 name_ranges: item.name_ranges,
4059 body_range: item.body_range.map(|r| range_callback(self, r)),
4060 annotation_range: annotation_row_range.map(|annotation_range| {
4061 let point_range = Point::new(annotation_range.start, 0)
4062 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4063 range_callback(self, point_range)
4064 }),
4065 });
4066 item_ends_stack.push(item.range.end);
4067 }
4068
4069 anchor_items
4070 }
4071
4072 fn next_outline_item(
4073 &self,
4074 config: &OutlineConfig,
4075 mat: &SyntaxMapMatch,
4076 range: &Range<usize>,
4077 include_extra_context: bool,
4078 theme: Option<&SyntaxTheme>,
4079 ) -> Option<OutlineItem<Point>> {
4080 let item_node = mat.captures.iter().find_map(|cap| {
4081 if cap.index == config.item_capture_ix {
4082 Some(cap.node)
4083 } else {
4084 None
4085 }
4086 })?;
4087
4088 let item_byte_range = item_node.byte_range();
4089 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4090 return None;
4091 }
4092 let item_point_range = Point::from_ts_point(item_node.start_position())
4093 ..Point::from_ts_point(item_node.end_position());
4094
4095 let mut open_point = None;
4096 let mut close_point = None;
4097
4098 let mut buffer_ranges = Vec::new();
4099 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4100 let mut range = node.start_byte()..node.end_byte();
4101 let start = node.start_position();
4102 if node.end_position().row > start.row {
4103 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4104 }
4105
4106 if !range.is_empty() {
4107 buffer_ranges.push((range, node_is_name));
4108 }
4109 };
4110
4111 for capture in mat.captures {
4112 if capture.index == config.name_capture_ix {
4113 add_to_buffer_ranges(capture.node, true);
4114 } else if Some(capture.index) == config.context_capture_ix
4115 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4116 {
4117 add_to_buffer_ranges(capture.node, false);
4118 } else {
4119 if Some(capture.index) == config.open_capture_ix {
4120 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4121 } else if Some(capture.index) == config.close_capture_ix {
4122 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4123 }
4124 }
4125 }
4126
4127 if buffer_ranges.is_empty() {
4128 return None;
4129 }
4130 let source_range_for_text =
4131 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4132
4133 let mut text = String::new();
4134 let mut highlight_ranges = Vec::new();
4135 let mut name_ranges = Vec::new();
4136 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4137 let mut last_buffer_range_end = 0;
4138 for (buffer_range, is_name) in buffer_ranges {
4139 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4140 if space_added {
4141 text.push(' ');
4142 }
4143 let before_append_len = text.len();
4144 let mut offset = buffer_range.start;
4145 chunks.seek(buffer_range.clone());
4146 for mut chunk in chunks.by_ref() {
4147 if chunk.text.len() > buffer_range.end - offset {
4148 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4149 offset = buffer_range.end;
4150 } else {
4151 offset += chunk.text.len();
4152 }
4153 let style = chunk
4154 .syntax_highlight_id
4155 .zip(theme)
4156 .and_then(|(highlight, theme)| highlight.style(theme));
4157 if let Some(style) = style {
4158 let start = text.len();
4159 let end = start + chunk.text.len();
4160 highlight_ranges.push((start..end, style));
4161 }
4162 text.push_str(chunk.text);
4163 if offset >= buffer_range.end {
4164 break;
4165 }
4166 }
4167 if is_name {
4168 let after_append_len = text.len();
4169 let start = if space_added && !name_ranges.is_empty() {
4170 before_append_len - 1
4171 } else {
4172 before_append_len
4173 };
4174 name_ranges.push(start..after_append_len);
4175 }
4176 last_buffer_range_end = buffer_range.end;
4177 }
4178
4179 Some(OutlineItem {
4180 depth: 0, // We'll calculate the depth later
4181 range: item_point_range,
4182 source_range_for_text: source_range_for_text.to_point(self),
4183 text,
4184 highlight_ranges,
4185 name_ranges,
4186 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4187 annotation_range: None,
4188 })
4189 }
4190
4191 pub fn function_body_fold_ranges<T: ToOffset>(
4192 &self,
4193 within: Range<T>,
4194 ) -> impl Iterator<Item = Range<usize>> + '_ {
4195 self.text_object_ranges(within, TreeSitterOptions::default())
4196 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4197 }
4198
4199 /// For each grammar in the language, runs the provided
4200 /// [`tree_sitter::Query`] against the given range.
4201 pub fn matches(
4202 &self,
4203 range: Range<usize>,
4204 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4205 ) -> SyntaxMapMatches<'_> {
4206 self.syntax.matches(range, self, query)
4207 }
4208
4209 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4210 /// Hence, may return more bracket pairs than the range contains.
4211 ///
4212 /// Will omit known chunks.
4213 /// The resulting bracket match collections are not ordered.
4214 pub fn fetch_bracket_ranges(
4215 &self,
4216 range: Range<usize>,
4217 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4218 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4219 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4220
4221 let known_chunks = match known_chunks {
4222 Some((known_version, known_chunks)) => {
4223 if !tree_sitter_data
4224 .chunks
4225 .version()
4226 .changed_since(known_version)
4227 {
4228 known_chunks.clone()
4229 } else {
4230 HashSet::default()
4231 }
4232 }
4233 None => HashSet::default(),
4234 };
4235
4236 let mut new_bracket_matches = HashMap::default();
4237 let mut all_bracket_matches = HashMap::default();
4238
4239 for chunk in tree_sitter_data
4240 .chunks
4241 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4242 {
4243 if known_chunks.contains(&chunk.row_range()) {
4244 continue;
4245 }
4246 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4247 continue;
4248 };
4249 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4250
4251 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4252 Some(cached_brackets) => cached_brackets,
4253 None => {
4254 let mut all_brackets = Vec::new();
4255 let mut opens = Vec::new();
4256 let mut color_pairs = Vec::new();
4257
4258 let mut matches =
4259 self.syntax
4260 .matches(chunk_range.clone(), &self.text, |grammar| {
4261 grammar.brackets_config.as_ref().map(|c| &c.query)
4262 });
4263 let configs = matches
4264 .grammars()
4265 .iter()
4266 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4267 .collect::<Vec<_>>();
4268
4269 while let Some(mat) = matches.peek() {
4270 let mut open = None;
4271 let mut close = None;
4272 let syntax_layer_depth = mat.depth;
4273 let config = configs[mat.grammar_index];
4274 let pattern = &config.patterns[mat.pattern_index];
4275 for capture in mat.captures {
4276 if capture.index == config.open_capture_ix {
4277 open = Some(capture.node.byte_range());
4278 } else if capture.index == config.close_capture_ix {
4279 close = Some(capture.node.byte_range());
4280 }
4281 }
4282
4283 matches.advance();
4284
4285 let Some((open_range, close_range)) = open.zip(close) else {
4286 continue;
4287 };
4288
4289 let bracket_range = open_range.start..=close_range.end;
4290 if !bracket_range.overlaps(&chunk_range) {
4291 continue;
4292 }
4293
4294 let index = all_brackets.len();
4295 all_brackets.push(BracketMatch {
4296 open_range: open_range.clone(),
4297 close_range: close_range.clone(),
4298 newline_only: pattern.newline_only,
4299 syntax_layer_depth,
4300 color_index: None,
4301 });
4302
4303 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4304 // bracket will match the entire tag with all text inside.
4305 // For now, avoid highlighting any pair that has more than single char in each bracket.
4306 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4307 let should_color = !pattern.rainbow_exclude
4308 && (open_range.len() == 1 || close_range.len() == 1);
4309 if should_color {
4310 opens.push(open_range.clone());
4311 color_pairs.push((open_range, close_range, index));
4312 }
4313 }
4314
4315 opens.sort_by_key(|r| (r.start, r.end));
4316 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4317 color_pairs.sort_by_key(|(_, close, _)| close.end);
4318
4319 let mut open_stack = Vec::new();
4320 let mut open_index = 0;
4321 for (open, close, index) in color_pairs {
4322 while open_index < opens.len() && opens[open_index].start < close.start {
4323 open_stack.push(opens[open_index].clone());
4324 open_index += 1;
4325 }
4326
4327 if open_stack.last() == Some(&open) {
4328 let depth_index = open_stack.len() - 1;
4329 all_brackets[index].color_index = Some(depth_index);
4330 open_stack.pop();
4331 }
4332 }
4333
4334 all_brackets.sort_by_key(|bracket_match| {
4335 (bracket_match.open_range.start, bracket_match.open_range.end)
4336 });
4337 new_bracket_matches.insert(chunk.id, all_brackets.clone());
4338 all_brackets
4339 }
4340 };
4341 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4342 }
4343
4344 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4345 if latest_tree_sitter_data.chunks.version() == &self.version {
4346 for (chunk_id, new_matches) in new_bracket_matches {
4347 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4348 if old_chunks.is_none() {
4349 *old_chunks = Some(new_matches);
4350 }
4351 }
4352 }
4353
4354 all_bracket_matches
4355 }
4356
4357 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4358 let mut tree_sitter_data = self.tree_sitter_data.lock();
4359 if self
4360 .version
4361 .changed_since(tree_sitter_data.chunks.version())
4362 {
4363 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4364 }
4365 tree_sitter_data
4366 }
4367
4368 pub fn all_bracket_ranges(
4369 &self,
4370 range: Range<usize>,
4371 ) -> impl Iterator<Item = BracketMatch<usize>> {
4372 self.fetch_bracket_ranges(range.clone(), None)
4373 .into_values()
4374 .flatten()
4375 .filter(move |bracket_match| {
4376 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4377 bracket_range.overlaps(&range)
4378 })
4379 }
4380
4381 /// Returns bracket range pairs overlapping or adjacent to `range`
4382 pub fn bracket_ranges<T: ToOffset>(
4383 &self,
4384 range: Range<T>,
4385 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4386 // Find bracket pairs that *inclusively* contain the given range.
4387 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4388 self.all_bracket_ranges(range)
4389 .filter(|pair| !pair.newline_only)
4390 }
4391
4392 pub fn debug_variables_query<T: ToOffset>(
4393 &self,
4394 range: Range<T>,
4395 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4396 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4397
4398 let mut matches = self.syntax.matches_with_options(
4399 range.clone(),
4400 &self.text,
4401 TreeSitterOptions::default(),
4402 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4403 );
4404
4405 let configs = matches
4406 .grammars()
4407 .iter()
4408 .map(|grammar| grammar.debug_variables_config.as_ref())
4409 .collect::<Vec<_>>();
4410
4411 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4412
4413 iter::from_fn(move || {
4414 loop {
4415 while let Some(capture) = captures.pop() {
4416 if capture.0.overlaps(&range) {
4417 return Some(capture);
4418 }
4419 }
4420
4421 let mat = matches.peek()?;
4422
4423 let Some(config) = configs[mat.grammar_index].as_ref() else {
4424 matches.advance();
4425 continue;
4426 };
4427
4428 for capture in mat.captures {
4429 let Some(ix) = config
4430 .objects_by_capture_ix
4431 .binary_search_by_key(&capture.index, |e| e.0)
4432 .ok()
4433 else {
4434 continue;
4435 };
4436 let text_object = config.objects_by_capture_ix[ix].1;
4437 let byte_range = capture.node.byte_range();
4438
4439 let mut found = false;
4440 for (range, existing) in captures.iter_mut() {
4441 if existing == &text_object {
4442 range.start = range.start.min(byte_range.start);
4443 range.end = range.end.max(byte_range.end);
4444 found = true;
4445 break;
4446 }
4447 }
4448
4449 if !found {
4450 captures.push((byte_range, text_object));
4451 }
4452 }
4453
4454 matches.advance();
4455 }
4456 })
4457 }
4458
4459 pub fn text_object_ranges<T: ToOffset>(
4460 &self,
4461 range: Range<T>,
4462 options: TreeSitterOptions,
4463 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4464 let range =
4465 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4466
4467 let mut matches =
4468 self.syntax
4469 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4470 grammar.text_object_config.as_ref().map(|c| &c.query)
4471 });
4472
4473 let configs = matches
4474 .grammars()
4475 .iter()
4476 .map(|grammar| grammar.text_object_config.as_ref())
4477 .collect::<Vec<_>>();
4478
4479 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4480
4481 iter::from_fn(move || {
4482 loop {
4483 while let Some(capture) = captures.pop() {
4484 if capture.0.overlaps(&range) {
4485 return Some(capture);
4486 }
4487 }
4488
4489 let mat = matches.peek()?;
4490
4491 let Some(config) = configs[mat.grammar_index].as_ref() else {
4492 matches.advance();
4493 continue;
4494 };
4495
4496 for capture in mat.captures {
4497 let Some(ix) = config
4498 .text_objects_by_capture_ix
4499 .binary_search_by_key(&capture.index, |e| e.0)
4500 .ok()
4501 else {
4502 continue;
4503 };
4504 let text_object = config.text_objects_by_capture_ix[ix].1;
4505 let byte_range = capture.node.byte_range();
4506
4507 let mut found = false;
4508 for (range, existing) in captures.iter_mut() {
4509 if existing == &text_object {
4510 range.start = range.start.min(byte_range.start);
4511 range.end = range.end.max(byte_range.end);
4512 found = true;
4513 break;
4514 }
4515 }
4516
4517 if !found {
4518 captures.push((byte_range, text_object));
4519 }
4520 }
4521
4522 matches.advance();
4523 }
4524 })
4525 }
4526
4527 /// Returns enclosing bracket ranges containing the given range
4528 pub fn enclosing_bracket_ranges<T: ToOffset>(
4529 &self,
4530 range: Range<T>,
4531 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4532 let range = range.start.to_offset(self)..range.end.to_offset(self);
4533
4534 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4535 let max_depth = result
4536 .iter()
4537 .map(|mat| mat.syntax_layer_depth)
4538 .max()
4539 .unwrap_or(0);
4540 result.into_iter().filter(move |pair| {
4541 pair.open_range.start <= range.start
4542 && pair.close_range.end >= range.end
4543 && pair.syntax_layer_depth == max_depth
4544 })
4545 }
4546
4547 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4548 ///
4549 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4550 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4551 &self,
4552 range: Range<T>,
4553 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4554 ) -> Option<(Range<usize>, Range<usize>)> {
4555 let range = range.start.to_offset(self)..range.end.to_offset(self);
4556
4557 // Get the ranges of the innermost pair of brackets.
4558 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4559
4560 for pair in self.enclosing_bracket_ranges(range) {
4561 if let Some(range_filter) = range_filter
4562 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4563 {
4564 continue;
4565 }
4566
4567 let len = pair.close_range.end - pair.open_range.start;
4568
4569 if let Some((existing_open, existing_close)) = &result {
4570 let existing_len = existing_close.end - existing_open.start;
4571 if len > existing_len {
4572 continue;
4573 }
4574 }
4575
4576 result = Some((pair.open_range, pair.close_range));
4577 }
4578
4579 result
4580 }
4581
4582 /// Returns anchor ranges for any matches of the redaction query.
4583 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4584 /// will be run on the relevant section of the buffer.
4585 pub fn redacted_ranges<T: ToOffset>(
4586 &self,
4587 range: Range<T>,
4588 ) -> impl Iterator<Item = Range<usize>> + '_ {
4589 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4590 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4591 grammar
4592 .redactions_config
4593 .as_ref()
4594 .map(|config| &config.query)
4595 });
4596
4597 let configs = syntax_matches
4598 .grammars()
4599 .iter()
4600 .map(|grammar| grammar.redactions_config.as_ref())
4601 .collect::<Vec<_>>();
4602
4603 iter::from_fn(move || {
4604 let redacted_range = syntax_matches
4605 .peek()
4606 .and_then(|mat| {
4607 configs[mat.grammar_index].and_then(|config| {
4608 mat.captures
4609 .iter()
4610 .find(|capture| capture.index == config.redaction_capture_ix)
4611 })
4612 })
4613 .map(|mat| mat.node.byte_range());
4614 syntax_matches.advance();
4615 redacted_range
4616 })
4617 }
4618
4619 pub fn injections_intersecting_range<T: ToOffset>(
4620 &self,
4621 range: Range<T>,
4622 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4623 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4624
4625 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4626 grammar
4627 .injection_config
4628 .as_ref()
4629 .map(|config| &config.query)
4630 });
4631
4632 let configs = syntax_matches
4633 .grammars()
4634 .iter()
4635 .map(|grammar| grammar.injection_config.as_ref())
4636 .collect::<Vec<_>>();
4637
4638 iter::from_fn(move || {
4639 let ranges = syntax_matches.peek().and_then(|mat| {
4640 let config = &configs[mat.grammar_index]?;
4641 let content_capture_range = mat.captures.iter().find_map(|capture| {
4642 if capture.index == config.content_capture_ix {
4643 Some(capture.node.byte_range())
4644 } else {
4645 None
4646 }
4647 })?;
4648 let language = self.language_at(content_capture_range.start)?;
4649 Some((content_capture_range, language))
4650 });
4651 syntax_matches.advance();
4652 ranges
4653 })
4654 }
4655
4656 pub fn runnable_ranges(
4657 &self,
4658 offset_range: Range<usize>,
4659 ) -> impl Iterator<Item = RunnableRange> + '_ {
4660 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4661 grammar.runnable_config.as_ref().map(|config| &config.query)
4662 });
4663
4664 let test_configs = syntax_matches
4665 .grammars()
4666 .iter()
4667 .map(|grammar| grammar.runnable_config.as_ref())
4668 .collect::<Vec<_>>();
4669
4670 iter::from_fn(move || {
4671 loop {
4672 let mat = syntax_matches.peek()?;
4673
4674 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4675 let mut run_range = None;
4676 let full_range = mat.captures.iter().fold(
4677 Range {
4678 start: usize::MAX,
4679 end: 0,
4680 },
4681 |mut acc, next| {
4682 let byte_range = next.node.byte_range();
4683 if acc.start > byte_range.start {
4684 acc.start = byte_range.start;
4685 }
4686 if acc.end < byte_range.end {
4687 acc.end = byte_range.end;
4688 }
4689 acc
4690 },
4691 );
4692 if full_range.start > full_range.end {
4693 // We did not find a full spanning range of this match.
4694 return None;
4695 }
4696 let extra_captures: SmallVec<[_; 1]> =
4697 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4698 test_configs
4699 .extra_captures
4700 .get(capture.index as usize)
4701 .cloned()
4702 .and_then(|tag_name| match tag_name {
4703 RunnableCapture::Named(name) => {
4704 Some((capture.node.byte_range(), name))
4705 }
4706 RunnableCapture::Run => {
4707 let _ = run_range.insert(capture.node.byte_range());
4708 None
4709 }
4710 })
4711 }));
4712 let run_range = run_range?;
4713 let tags = test_configs
4714 .query
4715 .property_settings(mat.pattern_index)
4716 .iter()
4717 .filter_map(|property| {
4718 if *property.key == *"tag" {
4719 property
4720 .value
4721 .as_ref()
4722 .map(|value| RunnableTag(value.to_string().into()))
4723 } else {
4724 None
4725 }
4726 })
4727 .collect();
4728 let extra_captures = extra_captures
4729 .into_iter()
4730 .map(|(range, name)| {
4731 (
4732 name.to_string(),
4733 self.text_for_range(range).collect::<String>(),
4734 )
4735 })
4736 .collect();
4737 // All tags should have the same range.
4738 Some(RunnableRange {
4739 run_range,
4740 full_range,
4741 runnable: Runnable {
4742 tags,
4743 language: mat.language,
4744 buffer: self.remote_id(),
4745 },
4746 extra_captures,
4747 buffer_id: self.remote_id(),
4748 })
4749 });
4750
4751 syntax_matches.advance();
4752 if test_range.is_some() {
4753 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4754 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4755 return test_range;
4756 }
4757 }
4758 })
4759 }
4760
4761 /// Returns selections for remote peers intersecting the given range.
4762 #[allow(clippy::type_complexity)]
4763 pub fn selections_in_range(
4764 &self,
4765 range: Range<Anchor>,
4766 include_local: bool,
4767 ) -> impl Iterator<
4768 Item = (
4769 ReplicaId,
4770 bool,
4771 CursorShape,
4772 impl Iterator<Item = &Selection<Anchor>> + '_,
4773 ),
4774 > + '_ {
4775 self.remote_selections
4776 .iter()
4777 .filter(move |(replica_id, set)| {
4778 (include_local || **replica_id != self.text.replica_id())
4779 && !set.selections.is_empty()
4780 })
4781 .map(move |(replica_id, set)| {
4782 let start_ix = match set.selections.binary_search_by(|probe| {
4783 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4784 }) {
4785 Ok(ix) | Err(ix) => ix,
4786 };
4787 let end_ix = match set.selections.binary_search_by(|probe| {
4788 probe.start.cmp(&range.end, self).then(Ordering::Less)
4789 }) {
4790 Ok(ix) | Err(ix) => ix,
4791 };
4792
4793 (
4794 *replica_id,
4795 set.line_mode,
4796 set.cursor_shape,
4797 set.selections[start_ix..end_ix].iter(),
4798 )
4799 })
4800 }
4801
4802 /// Returns if the buffer contains any diagnostics.
4803 pub fn has_diagnostics(&self) -> bool {
4804 !self.diagnostics.is_empty()
4805 }
4806
4807 /// Returns all the diagnostics intersecting the given range.
4808 pub fn diagnostics_in_range<'a, T, O>(
4809 &'a self,
4810 search_range: Range<T>,
4811 reversed: bool,
4812 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4813 where
4814 T: 'a + Clone + ToOffset,
4815 O: 'a + FromAnchor,
4816 {
4817 let mut iterators: Vec<_> = self
4818 .diagnostics
4819 .iter()
4820 .map(|(_, collection)| {
4821 collection
4822 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4823 .peekable()
4824 })
4825 .collect();
4826
4827 std::iter::from_fn(move || {
4828 let (next_ix, _) = iterators
4829 .iter_mut()
4830 .enumerate()
4831 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4832 .min_by(|(_, a), (_, b)| {
4833 let cmp = a
4834 .range
4835 .start
4836 .cmp(&b.range.start, self)
4837 // when range is equal, sort by diagnostic severity
4838 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4839 // and stabilize order with group_id
4840 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4841 if reversed { cmp.reverse() } else { cmp }
4842 })?;
4843 iterators[next_ix]
4844 .next()
4845 .map(
4846 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4847 diagnostic,
4848 range: FromAnchor::from_anchor(&range.start, self)
4849 ..FromAnchor::from_anchor(&range.end, self),
4850 },
4851 )
4852 })
4853 }
4854
4855 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4856 /// should be used instead.
4857 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4858 &self.diagnostics
4859 }
4860
4861 /// Returns all the diagnostic groups associated with the given
4862 /// language server ID. If no language server ID is provided,
4863 /// all diagnostics groups are returned.
4864 pub fn diagnostic_groups(
4865 &self,
4866 language_server_id: Option<LanguageServerId>,
4867 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4868 let mut groups = Vec::new();
4869
4870 if let Some(language_server_id) = language_server_id {
4871 if let Ok(ix) = self
4872 .diagnostics
4873 .binary_search_by_key(&language_server_id, |e| e.0)
4874 {
4875 self.diagnostics[ix]
4876 .1
4877 .groups(language_server_id, &mut groups, self);
4878 }
4879 } else {
4880 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4881 diagnostics.groups(*language_server_id, &mut groups, self);
4882 }
4883 }
4884
4885 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4886 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4887 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4888 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4889 });
4890
4891 groups
4892 }
4893
4894 /// Returns an iterator over the diagnostics for the given group.
4895 pub fn diagnostic_group<O>(
4896 &self,
4897 group_id: usize,
4898 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4899 where
4900 O: FromAnchor + 'static,
4901 {
4902 self.diagnostics
4903 .iter()
4904 .flat_map(move |(_, set)| set.group(group_id, self))
4905 }
4906
4907 /// An integer version number that accounts for all updates besides
4908 /// the buffer's text itself (which is versioned via a version vector).
4909 pub fn non_text_state_update_count(&self) -> usize {
4910 self.non_text_state_update_count
4911 }
4912
4913 /// An integer version that changes when the buffer's syntax changes.
4914 pub fn syntax_update_count(&self) -> usize {
4915 self.syntax.update_count()
4916 }
4917
4918 /// Returns a snapshot of underlying file.
4919 pub fn file(&self) -> Option<&Arc<dyn File>> {
4920 self.file.as_ref()
4921 }
4922
4923 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4924 if let Some(file) = self.file() {
4925 if file.path().file_name().is_none() || include_root {
4926 Some(file.full_path(cx).to_string_lossy().into_owned())
4927 } else {
4928 Some(file.path().display(file.path_style(cx)).to_string())
4929 }
4930 } else {
4931 None
4932 }
4933 }
4934
4935 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4936 let query_str = query.fuzzy_contents;
4937 if query_str.is_some_and(|query| query.is_empty()) {
4938 return BTreeMap::default();
4939 }
4940
4941 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4942 language,
4943 override_id: None,
4944 }));
4945
4946 let mut query_ix = 0;
4947 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4948 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4949
4950 let mut words = BTreeMap::default();
4951 let mut current_word_start_ix = None;
4952 let mut chunk_ix = query.range.start;
4953 for chunk in self.chunks(query.range, false) {
4954 for (i, c) in chunk.text.char_indices() {
4955 let ix = chunk_ix + i;
4956 if classifier.is_word(c) {
4957 if current_word_start_ix.is_none() {
4958 current_word_start_ix = Some(ix);
4959 }
4960
4961 if let Some(query_chars) = &query_chars
4962 && query_ix < query_len
4963 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4964 {
4965 query_ix += 1;
4966 }
4967 continue;
4968 } else if let Some(word_start) = current_word_start_ix.take()
4969 && query_ix == query_len
4970 {
4971 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4972 let mut word_text = self.text_for_range(word_start..ix).peekable();
4973 let first_char = word_text
4974 .peek()
4975 .and_then(|first_chunk| first_chunk.chars().next());
4976 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4977 if !query.skip_digits
4978 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4979 {
4980 words.insert(word_text.collect(), word_range);
4981 }
4982 }
4983 query_ix = 0;
4984 }
4985 chunk_ix += chunk.text.len();
4986 }
4987
4988 words
4989 }
4990}
4991
4992pub struct WordsQuery<'a> {
4993 /// Only returns words with all chars from the fuzzy string in them.
4994 pub fuzzy_contents: Option<&'a str>,
4995 /// Skips words that start with a digit.
4996 pub skip_digits: bool,
4997 /// Buffer offset range, to look for words.
4998 pub range: Range<usize>,
4999}
5000
5001fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5002 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5003}
5004
5005fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5006 let mut result = IndentSize::spaces(0);
5007 for c in text {
5008 let kind = match c {
5009 ' ' => IndentKind::Space,
5010 '\t' => IndentKind::Tab,
5011 _ => break,
5012 };
5013 if result.len == 0 {
5014 result.kind = kind;
5015 }
5016 result.len += 1;
5017 }
5018 result
5019}
5020
5021impl Clone for BufferSnapshot {
5022 fn clone(&self) -> Self {
5023 Self {
5024 text: self.text.clone(),
5025 syntax: self.syntax.clone(),
5026 file: self.file.clone(),
5027 remote_selections: self.remote_selections.clone(),
5028 diagnostics: self.diagnostics.clone(),
5029 language: self.language.clone(),
5030 tree_sitter_data: self.tree_sitter_data.clone(),
5031 non_text_state_update_count: self.non_text_state_update_count,
5032 }
5033 }
5034}
5035
5036impl Deref for BufferSnapshot {
5037 type Target = text::BufferSnapshot;
5038
5039 fn deref(&self) -> &Self::Target {
5040 &self.text
5041 }
5042}
5043
5044unsafe impl Send for BufferChunks<'_> {}
5045
5046impl<'a> BufferChunks<'a> {
5047 pub(crate) fn new(
5048 text: &'a Rope,
5049 range: Range<usize>,
5050 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5051 diagnostics: bool,
5052 buffer_snapshot: Option<&'a BufferSnapshot>,
5053 ) -> Self {
5054 let mut highlights = None;
5055 if let Some((captures, highlight_maps)) = syntax {
5056 highlights = Some(BufferChunkHighlights {
5057 captures,
5058 next_capture: None,
5059 stack: Default::default(),
5060 highlight_maps,
5061 })
5062 }
5063
5064 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5065 let chunks = text.chunks_in_range(range.clone());
5066
5067 let mut this = BufferChunks {
5068 range,
5069 buffer_snapshot,
5070 chunks,
5071 diagnostic_endpoints,
5072 error_depth: 0,
5073 warning_depth: 0,
5074 information_depth: 0,
5075 hint_depth: 0,
5076 unnecessary_depth: 0,
5077 underline: true,
5078 highlights,
5079 };
5080 this.initialize_diagnostic_endpoints();
5081 this
5082 }
5083
5084 /// Seeks to the given byte offset in the buffer.
5085 pub fn seek(&mut self, range: Range<usize>) {
5086 let old_range = std::mem::replace(&mut self.range, range.clone());
5087 self.chunks.set_range(self.range.clone());
5088 if let Some(highlights) = self.highlights.as_mut() {
5089 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5090 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5091 highlights
5092 .stack
5093 .retain(|(end_offset, _)| *end_offset > range.start);
5094 if let Some(capture) = &highlights.next_capture
5095 && range.start >= capture.node.start_byte()
5096 {
5097 let next_capture_end = capture.node.end_byte();
5098 if range.start < next_capture_end {
5099 highlights.stack.push((
5100 next_capture_end,
5101 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5102 ));
5103 }
5104 highlights.next_capture.take();
5105 }
5106 } else if let Some(snapshot) = self.buffer_snapshot {
5107 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5108 *highlights = BufferChunkHighlights {
5109 captures,
5110 next_capture: None,
5111 stack: Default::default(),
5112 highlight_maps,
5113 };
5114 } else {
5115 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5116 // Seeking such BufferChunks is not supported.
5117 debug_assert!(
5118 false,
5119 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5120 );
5121 }
5122
5123 highlights.captures.set_byte_range(self.range.clone());
5124 self.initialize_diagnostic_endpoints();
5125 }
5126 }
5127
5128 fn initialize_diagnostic_endpoints(&mut self) {
5129 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5130 && let Some(buffer) = self.buffer_snapshot
5131 {
5132 let mut diagnostic_endpoints = Vec::new();
5133 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5134 diagnostic_endpoints.push(DiagnosticEndpoint {
5135 offset: entry.range.start,
5136 is_start: true,
5137 severity: entry.diagnostic.severity,
5138 is_unnecessary: entry.diagnostic.is_unnecessary,
5139 underline: entry.diagnostic.underline,
5140 });
5141 diagnostic_endpoints.push(DiagnosticEndpoint {
5142 offset: entry.range.end,
5143 is_start: false,
5144 severity: entry.diagnostic.severity,
5145 is_unnecessary: entry.diagnostic.is_unnecessary,
5146 underline: entry.diagnostic.underline,
5147 });
5148 }
5149 diagnostic_endpoints
5150 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5151 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5152 self.hint_depth = 0;
5153 self.error_depth = 0;
5154 self.warning_depth = 0;
5155 self.information_depth = 0;
5156 }
5157 }
5158
5159 /// The current byte offset in the buffer.
5160 pub fn offset(&self) -> usize {
5161 self.range.start
5162 }
5163
5164 pub fn range(&self) -> Range<usize> {
5165 self.range.clone()
5166 }
5167
5168 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5169 let depth = match endpoint.severity {
5170 DiagnosticSeverity::ERROR => &mut self.error_depth,
5171 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5172 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5173 DiagnosticSeverity::HINT => &mut self.hint_depth,
5174 _ => return,
5175 };
5176 if endpoint.is_start {
5177 *depth += 1;
5178 } else {
5179 *depth -= 1;
5180 }
5181
5182 if endpoint.is_unnecessary {
5183 if endpoint.is_start {
5184 self.unnecessary_depth += 1;
5185 } else {
5186 self.unnecessary_depth -= 1;
5187 }
5188 }
5189 }
5190
5191 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5192 if self.error_depth > 0 {
5193 Some(DiagnosticSeverity::ERROR)
5194 } else if self.warning_depth > 0 {
5195 Some(DiagnosticSeverity::WARNING)
5196 } else if self.information_depth > 0 {
5197 Some(DiagnosticSeverity::INFORMATION)
5198 } else if self.hint_depth > 0 {
5199 Some(DiagnosticSeverity::HINT)
5200 } else {
5201 None
5202 }
5203 }
5204
5205 fn current_code_is_unnecessary(&self) -> bool {
5206 self.unnecessary_depth > 0
5207 }
5208}
5209
5210impl<'a> Iterator for BufferChunks<'a> {
5211 type Item = Chunk<'a>;
5212
5213 fn next(&mut self) -> Option<Self::Item> {
5214 let mut next_capture_start = usize::MAX;
5215 let mut next_diagnostic_endpoint = usize::MAX;
5216
5217 if let Some(highlights) = self.highlights.as_mut() {
5218 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5219 if *parent_capture_end <= self.range.start {
5220 highlights.stack.pop();
5221 } else {
5222 break;
5223 }
5224 }
5225
5226 if highlights.next_capture.is_none() {
5227 highlights.next_capture = highlights.captures.next();
5228 }
5229
5230 while let Some(capture) = highlights.next_capture.as_ref() {
5231 if self.range.start < capture.node.start_byte() {
5232 next_capture_start = capture.node.start_byte();
5233 break;
5234 } else {
5235 let highlight_id =
5236 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5237 highlights
5238 .stack
5239 .push((capture.node.end_byte(), highlight_id));
5240 highlights.next_capture = highlights.captures.next();
5241 }
5242 }
5243 }
5244
5245 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5246 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5247 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5248 if endpoint.offset <= self.range.start {
5249 self.update_diagnostic_depths(endpoint);
5250 diagnostic_endpoints.next();
5251 self.underline = endpoint.underline;
5252 } else {
5253 next_diagnostic_endpoint = endpoint.offset;
5254 break;
5255 }
5256 }
5257 }
5258 self.diagnostic_endpoints = diagnostic_endpoints;
5259
5260 if let Some(ChunkBitmaps {
5261 text: chunk,
5262 chars: chars_map,
5263 tabs,
5264 }) = self.chunks.peek_with_bitmaps()
5265 {
5266 let chunk_start = self.range.start;
5267 let mut chunk_end = (self.chunks.offset() + chunk.len())
5268 .min(next_capture_start)
5269 .min(next_diagnostic_endpoint);
5270 let mut highlight_id = None;
5271 if let Some(highlights) = self.highlights.as_ref()
5272 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5273 {
5274 chunk_end = chunk_end.min(*parent_capture_end);
5275 highlight_id = Some(*parent_highlight_id);
5276 }
5277 let bit_start = chunk_start - self.chunks.offset();
5278 let bit_end = chunk_end - self.chunks.offset();
5279
5280 let slice = &chunk[bit_start..bit_end];
5281
5282 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5283 let tabs = (tabs >> bit_start) & mask;
5284 let chars = (chars_map >> bit_start) & mask;
5285
5286 self.range.start = chunk_end;
5287 if self.range.start == self.chunks.offset() + chunk.len() {
5288 self.chunks.next().unwrap();
5289 }
5290
5291 Some(Chunk {
5292 text: slice,
5293 syntax_highlight_id: highlight_id,
5294 underline: self.underline,
5295 diagnostic_severity: self.current_diagnostic_severity(),
5296 is_unnecessary: self.current_code_is_unnecessary(),
5297 tabs,
5298 chars,
5299 ..Chunk::default()
5300 })
5301 } else {
5302 None
5303 }
5304 }
5305}
5306
5307impl operation_queue::Operation for Operation {
5308 fn lamport_timestamp(&self) -> clock::Lamport {
5309 match self {
5310 Operation::Buffer(_) => {
5311 unreachable!("buffer operations should never be deferred at this layer")
5312 }
5313 Operation::UpdateDiagnostics {
5314 lamport_timestamp, ..
5315 }
5316 | Operation::UpdateSelections {
5317 lamport_timestamp, ..
5318 }
5319 | Operation::UpdateCompletionTriggers {
5320 lamport_timestamp, ..
5321 }
5322 | Operation::UpdateLineEnding {
5323 lamport_timestamp, ..
5324 } => *lamport_timestamp,
5325 }
5326 }
5327}
5328
5329impl Default for Diagnostic {
5330 fn default() -> Self {
5331 Self {
5332 source: Default::default(),
5333 source_kind: DiagnosticSourceKind::Other,
5334 code: None,
5335 code_description: None,
5336 severity: DiagnosticSeverity::ERROR,
5337 message: Default::default(),
5338 markdown: None,
5339 group_id: 0,
5340 is_primary: false,
5341 is_disk_based: false,
5342 is_unnecessary: false,
5343 underline: true,
5344 data: None,
5345 }
5346 }
5347}
5348
5349impl IndentSize {
5350 /// Returns an [`IndentSize`] representing the given spaces.
5351 pub fn spaces(len: u32) -> Self {
5352 Self {
5353 len,
5354 kind: IndentKind::Space,
5355 }
5356 }
5357
5358 /// Returns an [`IndentSize`] representing a tab.
5359 pub fn tab() -> Self {
5360 Self {
5361 len: 1,
5362 kind: IndentKind::Tab,
5363 }
5364 }
5365
5366 /// An iterator over the characters represented by this [`IndentSize`].
5367 pub fn chars(&self) -> impl Iterator<Item = char> {
5368 iter::repeat(self.char()).take(self.len as usize)
5369 }
5370
5371 /// The character representation of this [`IndentSize`].
5372 pub fn char(&self) -> char {
5373 match self.kind {
5374 IndentKind::Space => ' ',
5375 IndentKind::Tab => '\t',
5376 }
5377 }
5378
5379 /// Consumes the current [`IndentSize`] and returns a new one that has
5380 /// been shrunk or enlarged by the given size along the given direction.
5381 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5382 match direction {
5383 Ordering::Less => {
5384 if self.kind == size.kind && self.len >= size.len {
5385 self.len -= size.len;
5386 }
5387 }
5388 Ordering::Equal => {}
5389 Ordering::Greater => {
5390 if self.len == 0 {
5391 self = size;
5392 } else if self.kind == size.kind {
5393 self.len += size.len;
5394 }
5395 }
5396 }
5397 self
5398 }
5399
5400 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5401 match self.kind {
5402 IndentKind::Space => self.len as usize,
5403 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5404 }
5405 }
5406}
5407
5408#[cfg(any(test, feature = "test-support"))]
5409pub struct TestFile {
5410 pub path: Arc<RelPath>,
5411 pub root_name: String,
5412 pub local_root: Option<PathBuf>,
5413}
5414
5415#[cfg(any(test, feature = "test-support"))]
5416impl File for TestFile {
5417 fn path(&self) -> &Arc<RelPath> {
5418 &self.path
5419 }
5420
5421 fn full_path(&self, _: &gpui::App) -> PathBuf {
5422 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5423 }
5424
5425 fn as_local(&self) -> Option<&dyn LocalFile> {
5426 if self.local_root.is_some() {
5427 Some(self)
5428 } else {
5429 None
5430 }
5431 }
5432
5433 fn disk_state(&self) -> DiskState {
5434 unimplemented!()
5435 }
5436
5437 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5438 self.path().file_name().unwrap_or(self.root_name.as_ref())
5439 }
5440
5441 fn worktree_id(&self, _: &App) -> WorktreeId {
5442 WorktreeId::from_usize(0)
5443 }
5444
5445 fn to_proto(&self, _: &App) -> rpc::proto::File {
5446 unimplemented!()
5447 }
5448
5449 fn is_private(&self) -> bool {
5450 false
5451 }
5452
5453 fn path_style(&self, _cx: &App) -> PathStyle {
5454 PathStyle::local()
5455 }
5456}
5457
5458#[cfg(any(test, feature = "test-support"))]
5459impl LocalFile for TestFile {
5460 fn abs_path(&self, _cx: &App) -> PathBuf {
5461 PathBuf::from(self.local_root.as_ref().unwrap())
5462 .join(&self.root_name)
5463 .join(self.path.as_std_path())
5464 }
5465
5466 fn load(&self, _cx: &App) -> Task<Result<String>> {
5467 unimplemented!()
5468 }
5469
5470 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5471 unimplemented!()
5472 }
5473}
5474
5475pub(crate) fn contiguous_ranges(
5476 values: impl Iterator<Item = u32>,
5477 max_len: usize,
5478) -> impl Iterator<Item = Range<u32>> {
5479 let mut values = values;
5480 let mut current_range: Option<Range<u32>> = None;
5481 std::iter::from_fn(move || {
5482 loop {
5483 if let Some(value) = values.next() {
5484 if let Some(range) = &mut current_range
5485 && value == range.end
5486 && range.len() < max_len
5487 {
5488 range.end += 1;
5489 continue;
5490 }
5491
5492 let prev_range = current_range.clone();
5493 current_range = Some(value..(value + 1));
5494 if prev_range.is_some() {
5495 return prev_range;
5496 }
5497 } else {
5498 return current_range.take();
5499 }
5500 }
5501 })
5502}
5503
5504#[derive(Default, Debug)]
5505pub struct CharClassifier {
5506 scope: Option<LanguageScope>,
5507 scope_context: Option<CharScopeContext>,
5508 ignore_punctuation: bool,
5509}
5510
5511impl CharClassifier {
5512 pub fn new(scope: Option<LanguageScope>) -> Self {
5513 Self {
5514 scope,
5515 scope_context: None,
5516 ignore_punctuation: false,
5517 }
5518 }
5519
5520 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5521 Self {
5522 scope_context,
5523 ..self
5524 }
5525 }
5526
5527 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5528 Self {
5529 ignore_punctuation,
5530 ..self
5531 }
5532 }
5533
5534 pub fn is_whitespace(&self, c: char) -> bool {
5535 self.kind(c) == CharKind::Whitespace
5536 }
5537
5538 pub fn is_word(&self, c: char) -> bool {
5539 self.kind(c) == CharKind::Word
5540 }
5541
5542 pub fn is_punctuation(&self, c: char) -> bool {
5543 self.kind(c) == CharKind::Punctuation
5544 }
5545
5546 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5547 if c.is_alphanumeric() || c == '_' {
5548 return CharKind::Word;
5549 }
5550
5551 if let Some(scope) = &self.scope {
5552 let characters = match self.scope_context {
5553 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5554 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5555 None => scope.word_characters(),
5556 };
5557 if let Some(characters) = characters
5558 && characters.contains(&c)
5559 {
5560 return CharKind::Word;
5561 }
5562 }
5563
5564 if c.is_whitespace() {
5565 return CharKind::Whitespace;
5566 }
5567
5568 if ignore_punctuation {
5569 CharKind::Word
5570 } else {
5571 CharKind::Punctuation
5572 }
5573 }
5574
5575 pub fn kind(&self, c: char) -> CharKind {
5576 self.kind_with(c, self.ignore_punctuation)
5577 }
5578}
5579
5580/// Find all of the ranges of whitespace that occur at the ends of lines
5581/// in the given rope.
5582///
5583/// This could also be done with a regex search, but this implementation
5584/// avoids copying text.
5585pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5586 let mut ranges = Vec::new();
5587
5588 let mut offset = 0;
5589 let mut prev_chunk_trailing_whitespace_range = 0..0;
5590 for chunk in rope.chunks() {
5591 let mut prev_line_trailing_whitespace_range = 0..0;
5592 for (i, line) in chunk.split('\n').enumerate() {
5593 let line_end_offset = offset + line.len();
5594 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5595 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5596
5597 if i == 0 && trimmed_line_len == 0 {
5598 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5599 }
5600 if !prev_line_trailing_whitespace_range.is_empty() {
5601 ranges.push(prev_line_trailing_whitespace_range);
5602 }
5603
5604 offset = line_end_offset + 1;
5605 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5606 }
5607
5608 offset -= 1;
5609 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5610 }
5611
5612 if !prev_chunk_trailing_whitespace_range.is_empty() {
5613 ranges.push(prev_chunk_trailing_whitespace_range);
5614 }
5615
5616 ranges
5617}