1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// A machine-readable code that identifies this diagnostic.
249 pub code: Option<NumberOrString>,
250 pub code_description: Option<lsp::Uri>,
251 /// Whether this diagnostic is a hint, warning, or error.
252 pub severity: DiagnosticSeverity,
253 /// The human-readable message associated with this diagnostic.
254 pub message: String,
255 /// The human-readable message (in markdown format)
256 pub markdown: Option<String>,
257 /// An id that identifies the group to which this diagnostic belongs.
258 ///
259 /// When a language server produces a diagnostic with
260 /// one or more associated diagnostics, those diagnostics are all
261 /// assigned a single group ID.
262 pub group_id: usize,
263 /// Whether this diagnostic is the primary diagnostic for its group.
264 ///
265 /// In a given group, the primary diagnostic is the top-level diagnostic
266 /// returned by the language server. The non-primary diagnostics are the
267 /// associated diagnostics.
268 pub is_primary: bool,
269 /// Whether this diagnostic is considered to originate from an analysis of
270 /// files on disk, as opposed to any unsaved buffer contents. This is a
271 /// property of a given diagnostic source, and is configured for a given
272 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
273 /// for the language server.
274 pub is_disk_based: bool,
275 /// Whether this diagnostic marks unnecessary code.
276 pub is_unnecessary: bool,
277 /// Quick separation of diagnostics groups based by their source.
278 pub source_kind: DiagnosticSourceKind,
279 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
280 pub data: Option<Value>,
281 /// Whether to underline the corresponding text range in the editor.
282 pub underline: bool,
283}
284
285#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
286pub enum DiagnosticSourceKind {
287 Pulled,
288 Pushed,
289 Other,
290}
291
292/// An operation used to synchronize this buffer with its other replicas.
293#[derive(Clone, Debug, PartialEq)]
294pub enum Operation {
295 /// A text operation.
296 Buffer(text::Operation),
297
298 /// An update to the buffer's diagnostics.
299 UpdateDiagnostics {
300 /// The id of the language server that produced the new diagnostics.
301 server_id: LanguageServerId,
302 /// The diagnostics.
303 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 },
307
308 /// An update to the most recent selections in this buffer.
309 UpdateSelections {
310 /// The selections.
311 selections: Arc<[Selection<Anchor>]>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// Whether the selections are in 'line mode'.
315 line_mode: bool,
316 /// The [`CursorShape`] associated with these selections.
317 cursor_shape: CursorShape,
318 },
319
320 /// An update to the characters that should trigger autocompletion
321 /// for this buffer.
322 UpdateCompletionTriggers {
323 /// The characters that trigger autocompletion.
324 triggers: Vec<String>,
325 /// The buffer's lamport timestamp.
326 lamport_timestamp: clock::Lamport,
327 /// The language server ID.
328 server_id: LanguageServerId,
329 },
330
331 /// An update to the line ending type of this buffer.
332 UpdateLineEnding {
333 /// The line ending type.
334 line_ending: LineEnding,
335 /// The buffer's lamport timestamp.
336 lamport_timestamp: clock::Lamport,
337 },
338}
339
340/// An event that occurs in a buffer.
341#[derive(Clone, Debug, PartialEq)]
342pub enum BufferEvent {
343 /// The buffer was changed in a way that must be
344 /// propagated to its other replicas.
345 Operation {
346 operation: Operation,
347 is_local: bool,
348 },
349 /// The buffer was edited.
350 Edited,
351 /// The buffer's `dirty` bit changed.
352 DirtyChanged,
353 /// The buffer was saved.
354 Saved,
355 /// The buffer's file was changed on disk.
356 FileHandleChanged,
357 /// The buffer was reloaded.
358 Reloaded,
359 /// The buffer is in need of a reload
360 ReloadNeeded,
361 /// The buffer's language was changed.
362 /// The boolean indicates whether this buffer did not have a language before, but does now.
363 LanguageChanged(bool),
364 /// The buffer's syntax trees were updated.
365 Reparsed,
366 /// The buffer's diagnostics were updated.
367 DiagnosticsUpdated,
368 /// The buffer gained or lost editing capabilities.
369 CapabilityChanged,
370}
371
372/// The file associated with a buffer.
373pub trait File: Send + Sync + Any {
374 /// Returns the [`LocalFile`] associated with this file, if the
375 /// file is local.
376 fn as_local(&self) -> Option<&dyn LocalFile>;
377
378 /// Returns whether this file is local.
379 fn is_local(&self) -> bool {
380 self.as_local().is_some()
381 }
382
383 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
384 /// only available in some states, such as modification time.
385 fn disk_state(&self) -> DiskState;
386
387 /// Returns the path of this file relative to the worktree's root directory.
388 fn path(&self) -> &Arc<RelPath>;
389
390 /// Returns the path of this file relative to the worktree's parent directory (this means it
391 /// includes the name of the worktree's root folder).
392 fn full_path(&self, cx: &App) -> PathBuf;
393
394 /// Returns the path style of this file.
395 fn path_style(&self, cx: &App) -> PathStyle;
396
397 /// Returns the last component of this handle's absolute path. If this handle refers to the root
398 /// of its worktree, then this method will return the name of the worktree itself.
399 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
400
401 /// Returns the id of the worktree to which this file belongs.
402 ///
403 /// This is needed for looking up project-specific settings.
404 fn worktree_id(&self, cx: &App) -> WorktreeId;
405
406 /// Converts this file into a protobuf message.
407 fn to_proto(&self, cx: &App) -> rpc::proto::File;
408
409 /// Return whether Zed considers this to be a private file.
410 fn is_private(&self) -> bool;
411}
412
413/// The file's storage status - whether it's stored (`Present`), and if so when it was last
414/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
415/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
416/// indicator for new files.
417#[derive(Copy, Clone, Debug, PartialEq)]
418pub enum DiskState {
419 /// File created in Zed that has not been saved.
420 New,
421 /// File present on the filesystem.
422 Present { mtime: MTime },
423 /// Deleted file that was previously present.
424 Deleted,
425}
426
427impl DiskState {
428 /// Returns the file's last known modification time on disk.
429 pub fn mtime(self) -> Option<MTime> {
430 match self {
431 DiskState::New => None,
432 DiskState::Present { mtime } => Some(mtime),
433 DiskState::Deleted => None,
434 }
435 }
436
437 pub fn exists(&self) -> bool {
438 match self {
439 DiskState::New => false,
440 DiskState::Present { .. } => true,
441 DiskState::Deleted => false,
442 }
443 }
444}
445
446/// The file associated with a buffer, in the case where the file is on the local disk.
447pub trait LocalFile: File {
448 /// Returns the absolute path of this file
449 fn abs_path(&self, cx: &App) -> PathBuf;
450
451 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
452 fn load(&self, cx: &App) -> Task<Result<String>>;
453
454 /// Loads the file's contents from disk.
455 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
456}
457
458/// The auto-indent behavior associated with an editing operation.
459/// For some editing operations, each affected line of text has its
460/// indentation recomputed. For other operations, the entire block
461/// of edited text is adjusted uniformly.
462#[derive(Clone, Debug)]
463pub enum AutoindentMode {
464 /// Indent each line of inserted text.
465 EachLine,
466 /// Apply the same indentation adjustment to all of the lines
467 /// in a given insertion.
468 Block {
469 /// The original indentation column of the first line of each
470 /// insertion, if it has been copied.
471 ///
472 /// Knowing this makes it possible to preserve the relative indentation
473 /// of every line in the insertion from when it was copied.
474 ///
475 /// If the original indent column is `a`, and the first line of insertion
476 /// is then auto-indented to column `b`, then every other line of
477 /// the insertion will be auto-indented to column `b - a`
478 original_indent_columns: Vec<Option<u32>>,
479 },
480}
481
482#[derive(Clone)]
483struct AutoindentRequest {
484 before_edit: BufferSnapshot,
485 entries: Vec<AutoindentRequestEntry>,
486 is_block_mode: bool,
487 ignore_empty_lines: bool,
488}
489
490#[derive(Debug, Clone)]
491struct AutoindentRequestEntry {
492 /// A range of the buffer whose indentation should be adjusted.
493 range: Range<Anchor>,
494 /// Whether or not these lines should be considered brand new, for the
495 /// purpose of auto-indent. When text is not new, its indentation will
496 /// only be adjusted if the suggested indentation level has *changed*
497 /// since the edit was made.
498 first_line_is_new: bool,
499 indent_size: IndentSize,
500 original_indent_column: Option<u32>,
501}
502
503#[derive(Debug)]
504struct IndentSuggestion {
505 basis_row: u32,
506 delta: Ordering,
507 within_error: bool,
508}
509
510struct BufferChunkHighlights<'a> {
511 captures: SyntaxMapCaptures<'a>,
512 next_capture: Option<SyntaxMapCapture<'a>>,
513 stack: Vec<(usize, HighlightId)>,
514 highlight_maps: Vec<HighlightMap>,
515}
516
517/// An iterator that yields chunks of a buffer's text, along with their
518/// syntax highlights and diagnostic status.
519pub struct BufferChunks<'a> {
520 buffer_snapshot: Option<&'a BufferSnapshot>,
521 range: Range<usize>,
522 chunks: text::Chunks<'a>,
523 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
524 error_depth: usize,
525 warning_depth: usize,
526 information_depth: usize,
527 hint_depth: usize,
528 unnecessary_depth: usize,
529 underline: bool,
530 highlights: Option<BufferChunkHighlights<'a>>,
531}
532
533/// A chunk of a buffer's text, along with its syntax highlight and
534/// diagnostic status.
535#[derive(Clone, Debug, Default)]
536pub struct Chunk<'a> {
537 /// The text of the chunk.
538 pub text: &'a str,
539 /// The syntax highlighting style of the chunk.
540 pub syntax_highlight_id: Option<HighlightId>,
541 /// The highlight style that has been applied to this chunk in
542 /// the editor.
543 pub highlight_style: Option<HighlightStyle>,
544 /// The severity of diagnostic associated with this chunk, if any.
545 pub diagnostic_severity: Option<DiagnosticSeverity>,
546 /// A bitset of which characters are tabs in this string.
547 pub tabs: u128,
548 /// Bitmap of character indices in this chunk
549 pub chars: u128,
550 /// Whether this chunk of text is marked as unnecessary.
551 pub is_unnecessary: bool,
552 /// Whether this chunk of text was originally a tab character.
553 pub is_tab: bool,
554 /// Whether this chunk of text was originally an inlay.
555 pub is_inlay: bool,
556 /// Whether to underline the corresponding text range in the editor.
557 pub underline: bool,
558}
559
560/// A set of edits to a given version of a buffer, computed asynchronously.
561#[derive(Debug)]
562pub struct Diff {
563 pub base_version: clock::Global,
564 pub line_ending: LineEnding,
565 pub edits: Vec<(Range<usize>, Arc<str>)>,
566}
567
568#[derive(Debug, Clone, Copy)]
569pub(crate) struct DiagnosticEndpoint {
570 offset: usize,
571 is_start: bool,
572 underline: bool,
573 severity: DiagnosticSeverity,
574 is_unnecessary: bool,
575}
576
577/// A class of characters, used for characterizing a run of text.
578#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
579pub enum CharKind {
580 /// Whitespace.
581 Whitespace,
582 /// Punctuation.
583 Punctuation,
584 /// Word.
585 Word,
586}
587
588/// Context for character classification within a specific scope.
589#[derive(Copy, Clone, Eq, PartialEq, Debug)]
590pub enum CharScopeContext {
591 /// Character classification for completion queries.
592 ///
593 /// This context treats certain characters as word constituents that would
594 /// normally be considered punctuation, such as '-' in Tailwind classes
595 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
596 Completion,
597 /// Character classification for linked edits.
598 ///
599 /// This context handles characters that should be treated as part of
600 /// identifiers during linked editing operations, such as '.' in JSX
601 /// component names like `<Animated.View>`.
602 LinkedEdit,
603}
604
605/// A runnable is a set of data about a region that could be resolved into a task
606pub struct Runnable {
607 pub tags: SmallVec<[RunnableTag; 1]>,
608 pub language: Arc<Language>,
609 pub buffer: BufferId,
610}
611
612#[derive(Default, Clone, Debug)]
613pub struct HighlightedText {
614 pub text: SharedString,
615 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
616}
617
618#[derive(Default, Debug)]
619struct HighlightedTextBuilder {
620 pub text: String,
621 highlights: Vec<(Range<usize>, HighlightStyle)>,
622}
623
624impl HighlightedText {
625 pub fn from_buffer_range<T: ToOffset>(
626 range: Range<T>,
627 snapshot: &text::BufferSnapshot,
628 syntax_snapshot: &SyntaxSnapshot,
629 override_style: Option<HighlightStyle>,
630 syntax_theme: &SyntaxTheme,
631 ) -> Self {
632 let mut highlighted_text = HighlightedTextBuilder::default();
633 highlighted_text.add_text_from_buffer_range(
634 range,
635 snapshot,
636 syntax_snapshot,
637 override_style,
638 syntax_theme,
639 );
640 highlighted_text.build()
641 }
642
643 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
644 gpui::StyledText::new(self.text.clone())
645 .with_default_highlights(default_style, self.highlights.iter().cloned())
646 }
647
648 /// Returns the first line without leading whitespace unless highlighted
649 /// and a boolean indicating if there are more lines after
650 pub fn first_line_preview(self) -> (Self, bool) {
651 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
652 let first_line = &self.text[..newline_ix];
653
654 // Trim leading whitespace, unless an edit starts prior to it.
655 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
656 if let Some((first_highlight_range, _)) = self.highlights.first() {
657 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
658 }
659
660 let preview_text = &first_line[preview_start_ix..];
661 let preview_highlights = self
662 .highlights
663 .into_iter()
664 .skip_while(|(range, _)| range.end <= preview_start_ix)
665 .take_while(|(range, _)| range.start < newline_ix)
666 .filter_map(|(mut range, highlight)| {
667 range.start = range.start.saturating_sub(preview_start_ix);
668 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
669 if range.is_empty() {
670 None
671 } else {
672 Some((range, highlight))
673 }
674 });
675
676 let preview = Self {
677 text: SharedString::new(preview_text),
678 highlights: preview_highlights.collect(),
679 };
680
681 (preview, self.text.len() > newline_ix)
682 }
683}
684
685impl HighlightedTextBuilder {
686 pub fn build(self) -> HighlightedText {
687 HighlightedText {
688 text: self.text.into(),
689 highlights: self.highlights,
690 }
691 }
692
693 pub fn add_text_from_buffer_range<T: ToOffset>(
694 &mut self,
695 range: Range<T>,
696 snapshot: &text::BufferSnapshot,
697 syntax_snapshot: &SyntaxSnapshot,
698 override_style: Option<HighlightStyle>,
699 syntax_theme: &SyntaxTheme,
700 ) {
701 let range = range.to_offset(snapshot);
702 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
703 let start = self.text.len();
704 self.text.push_str(chunk.text);
705 let end = self.text.len();
706
707 if let Some(highlight_style) = chunk
708 .syntax_highlight_id
709 .and_then(|id| id.style(syntax_theme))
710 {
711 let highlight_style = override_style.map_or(highlight_style, |override_style| {
712 highlight_style.highlight(override_style)
713 });
714 self.highlights.push((start..end, highlight_style));
715 } else if let Some(override_style) = override_style {
716 self.highlights.push((start..end, override_style));
717 }
718 }
719 }
720
721 fn highlighted_chunks<'a>(
722 range: Range<usize>,
723 snapshot: &'a text::BufferSnapshot,
724 syntax_snapshot: &'a SyntaxSnapshot,
725 ) -> BufferChunks<'a> {
726 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
727 grammar
728 .highlights_config
729 .as_ref()
730 .map(|config| &config.query)
731 });
732
733 let highlight_maps = captures
734 .grammars()
735 .iter()
736 .map(|grammar| grammar.highlight_map())
737 .collect();
738
739 BufferChunks::new(
740 snapshot.as_rope(),
741 range,
742 Some((captures, highlight_maps)),
743 false,
744 None,
745 )
746 }
747}
748
749#[derive(Clone)]
750pub struct EditPreview {
751 old_snapshot: text::BufferSnapshot,
752 applied_edits_snapshot: text::BufferSnapshot,
753 syntax_snapshot: SyntaxSnapshot,
754}
755
756impl EditPreview {
757 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
758 let (first, _) = edits.first()?;
759 let (last, _) = edits.last()?;
760
761 let start = first.start.to_point(&self.old_snapshot);
762 let old_end = last.end.to_point(&self.old_snapshot);
763 let new_end = last
764 .end
765 .bias_right(&self.old_snapshot)
766 .to_point(&self.applied_edits_snapshot);
767
768 let start = Point::new(start.row.saturating_sub(3), 0);
769 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
770 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
771
772 Some(unified_diff(
773 &self
774 .old_snapshot
775 .text_for_range(start..old_end)
776 .collect::<String>(),
777 &self
778 .applied_edits_snapshot
779 .text_for_range(start..new_end)
780 .collect::<String>(),
781 ))
782 }
783
784 pub fn highlight_edits(
785 &self,
786 current_snapshot: &BufferSnapshot,
787 edits: &[(Range<Anchor>, impl AsRef<str>)],
788 include_deletions: bool,
789 cx: &App,
790 ) -> HighlightedText {
791 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
792 return HighlightedText::default();
793 };
794
795 let mut highlighted_text = HighlightedTextBuilder::default();
796
797 let visible_range_in_preview_snapshot =
798 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
799 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
800
801 let insertion_highlight_style = HighlightStyle {
802 background_color: Some(cx.theme().status().created_background),
803 ..Default::default()
804 };
805 let deletion_highlight_style = HighlightStyle {
806 background_color: Some(cx.theme().status().deleted_background),
807 ..Default::default()
808 };
809 let syntax_theme = cx.theme().syntax();
810
811 for (range, edit_text) in edits {
812 let edit_new_end_in_preview_snapshot = range
813 .end
814 .bias_right(&self.old_snapshot)
815 .to_offset(&self.applied_edits_snapshot);
816 let edit_start_in_preview_snapshot =
817 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
818
819 let unchanged_range_in_preview_snapshot =
820 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
821 if !unchanged_range_in_preview_snapshot.is_empty() {
822 highlighted_text.add_text_from_buffer_range(
823 unchanged_range_in_preview_snapshot,
824 &self.applied_edits_snapshot,
825 &self.syntax_snapshot,
826 None,
827 syntax_theme,
828 );
829 }
830
831 let range_in_current_snapshot = range.to_offset(current_snapshot);
832 if include_deletions && !range_in_current_snapshot.is_empty() {
833 highlighted_text.add_text_from_buffer_range(
834 range_in_current_snapshot,
835 ¤t_snapshot.text,
836 ¤t_snapshot.syntax,
837 Some(deletion_highlight_style),
838 syntax_theme,
839 );
840 }
841
842 if !edit_text.as_ref().is_empty() {
843 highlighted_text.add_text_from_buffer_range(
844 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
845 &self.applied_edits_snapshot,
846 &self.syntax_snapshot,
847 Some(insertion_highlight_style),
848 syntax_theme,
849 );
850 }
851
852 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
853 }
854
855 highlighted_text.add_text_from_buffer_range(
856 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
857 &self.applied_edits_snapshot,
858 &self.syntax_snapshot,
859 None,
860 syntax_theme,
861 );
862
863 highlighted_text.build()
864 }
865
866 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
867 cx.new(|cx| {
868 let mut buffer = Buffer::local_normalized(
869 self.applied_edits_snapshot.as_rope().clone(),
870 self.applied_edits_snapshot.line_ending(),
871 cx,
872 );
873 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
874 buffer
875 })
876 }
877
878 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
879 let (first, _) = edits.first()?;
880 let (last, _) = edits.last()?;
881
882 let start = first
883 .start
884 .bias_left(&self.old_snapshot)
885 .to_point(&self.applied_edits_snapshot);
886 let end = last
887 .end
888 .bias_right(&self.old_snapshot)
889 .to_point(&self.applied_edits_snapshot);
890
891 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
892 let range = Point::new(start.row, 0)
893 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
894
895 Some(range)
896 }
897}
898
899#[derive(Clone, Debug, PartialEq, Eq)]
900pub struct BracketMatch<T> {
901 pub open_range: Range<T>,
902 pub close_range: Range<T>,
903 pub newline_only: bool,
904 pub syntax_layer_depth: usize,
905 pub color_index: Option<usize>,
906}
907
908impl<T> BracketMatch<T> {
909 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
910 (self.open_range, self.close_range)
911 }
912}
913
914impl Buffer {
915 /// Create a new buffer with the given base text.
916 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
917 Self::build(
918 TextBuffer::new(
919 ReplicaId::LOCAL,
920 cx.entity_id().as_non_zero_u64().into(),
921 base_text.into(),
922 ),
923 None,
924 Capability::ReadWrite,
925 )
926 }
927
928 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
929 pub fn local_normalized(
930 base_text_normalized: Rope,
931 line_ending: LineEnding,
932 cx: &Context<Self>,
933 ) -> Self {
934 Self::build(
935 TextBuffer::new_normalized(
936 ReplicaId::LOCAL,
937 cx.entity_id().as_non_zero_u64().into(),
938 line_ending,
939 base_text_normalized,
940 ),
941 None,
942 Capability::ReadWrite,
943 )
944 }
945
946 /// Create a new buffer that is a replica of a remote buffer.
947 pub fn remote(
948 remote_id: BufferId,
949 replica_id: ReplicaId,
950 capability: Capability,
951 base_text: impl Into<String>,
952 ) -> Self {
953 Self::build(
954 TextBuffer::new(replica_id, remote_id, base_text.into()),
955 None,
956 capability,
957 )
958 }
959
960 /// Create a new buffer that is a replica of a remote buffer, populating its
961 /// state from the given protobuf message.
962 pub fn from_proto(
963 replica_id: ReplicaId,
964 capability: Capability,
965 message: proto::BufferState,
966 file: Option<Arc<dyn File>>,
967 ) -> Result<Self> {
968 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
969 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
970 let mut this = Self::build(buffer, file, capability);
971 this.text.set_line_ending(proto::deserialize_line_ending(
972 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
973 ));
974 this.saved_version = proto::deserialize_version(&message.saved_version);
975 this.saved_mtime = message.saved_mtime.map(|time| time.into());
976 Ok(this)
977 }
978
979 /// Serialize the buffer's state to a protobuf message.
980 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
981 proto::BufferState {
982 id: self.remote_id().into(),
983 file: self.file.as_ref().map(|f| f.to_proto(cx)),
984 base_text: self.base_text().to_string(),
985 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
986 saved_version: proto::serialize_version(&self.saved_version),
987 saved_mtime: self.saved_mtime.map(|time| time.into()),
988 }
989 }
990
991 /// Serialize as protobufs all of the changes to the buffer since the given version.
992 pub fn serialize_ops(
993 &self,
994 since: Option<clock::Global>,
995 cx: &App,
996 ) -> Task<Vec<proto::Operation>> {
997 let mut operations = Vec::new();
998 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
999
1000 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1001 proto::serialize_operation(&Operation::UpdateSelections {
1002 selections: set.selections.clone(),
1003 lamport_timestamp: set.lamport_timestamp,
1004 line_mode: set.line_mode,
1005 cursor_shape: set.cursor_shape,
1006 })
1007 }));
1008
1009 for (server_id, diagnostics) in &self.diagnostics {
1010 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1011 lamport_timestamp: self.diagnostics_timestamp,
1012 server_id: *server_id,
1013 diagnostics: diagnostics.iter().cloned().collect(),
1014 }));
1015 }
1016
1017 for (server_id, completions) in &self.completion_triggers_per_language_server {
1018 operations.push(proto::serialize_operation(
1019 &Operation::UpdateCompletionTriggers {
1020 triggers: completions.iter().cloned().collect(),
1021 lamport_timestamp: self.completion_triggers_timestamp,
1022 server_id: *server_id,
1023 },
1024 ));
1025 }
1026
1027 let text_operations = self.text.operations().clone();
1028 cx.background_spawn(async move {
1029 let since = since.unwrap_or_default();
1030 operations.extend(
1031 text_operations
1032 .iter()
1033 .filter(|(_, op)| !since.observed(op.timestamp()))
1034 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1035 );
1036 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1037 operations
1038 })
1039 }
1040
1041 /// Assign a language to the buffer, returning the buffer.
1042 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1043 self.set_language_async(Some(language), cx);
1044 self
1045 }
1046
1047 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1048 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1049 self.set_language(Some(language), cx);
1050 self
1051 }
1052
1053 /// Returns the [`Capability`] of this buffer.
1054 pub fn capability(&self) -> Capability {
1055 self.capability
1056 }
1057
1058 /// Whether this buffer can only be read.
1059 pub fn read_only(&self) -> bool {
1060 self.capability == Capability::ReadOnly
1061 }
1062
1063 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1064 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1065 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1066 let snapshot = buffer.snapshot();
1067 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1068 let tree_sitter_data = TreeSitterData::new(snapshot);
1069 Self {
1070 saved_mtime,
1071 tree_sitter_data: Arc::new(tree_sitter_data),
1072 saved_version: buffer.version(),
1073 preview_version: buffer.version(),
1074 reload_task: None,
1075 transaction_depth: 0,
1076 was_dirty_before_starting_transaction: None,
1077 has_unsaved_edits: Cell::new((buffer.version(), false)),
1078 text: buffer,
1079 branch_state: None,
1080 file,
1081 capability,
1082 syntax_map,
1083 reparse: None,
1084 non_text_state_update_count: 0,
1085 sync_parse_timeout: Duration::from_millis(1),
1086 parse_status: watch::channel(ParseStatus::Idle),
1087 autoindent_requests: Default::default(),
1088 wait_for_autoindent_txs: Default::default(),
1089 pending_autoindent: Default::default(),
1090 language: None,
1091 remote_selections: Default::default(),
1092 diagnostics: Default::default(),
1093 diagnostics_timestamp: Lamport::MIN,
1094 completion_triggers: Default::default(),
1095 completion_triggers_per_language_server: Default::default(),
1096 completion_triggers_timestamp: Lamport::MIN,
1097 deferred_ops: OperationQueue::new(),
1098 has_conflict: false,
1099 change_bits: Default::default(),
1100 _subscriptions: Vec::new(),
1101 }
1102 }
1103
1104 pub fn build_snapshot(
1105 text: Rope,
1106 language: Option<Arc<Language>>,
1107 language_registry: Option<Arc<LanguageRegistry>>,
1108 cx: &mut App,
1109 ) -> impl Future<Output = BufferSnapshot> + use<> {
1110 let entity_id = cx.reserve_entity::<Self>().entity_id();
1111 let buffer_id = entity_id.as_non_zero_u64().into();
1112 async move {
1113 let text =
1114 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1115 .snapshot();
1116 let mut syntax = SyntaxMap::new(&text).snapshot();
1117 if let Some(language) = language.clone() {
1118 let language_registry = language_registry.clone();
1119 syntax.reparse(&text, language_registry, language);
1120 }
1121 let tree_sitter_data = TreeSitterData::new(text.clone());
1122 BufferSnapshot {
1123 text,
1124 syntax,
1125 file: None,
1126 diagnostics: Default::default(),
1127 remote_selections: Default::default(),
1128 tree_sitter_data: Arc::new(tree_sitter_data),
1129 language,
1130 non_text_state_update_count: 0,
1131 }
1132 }
1133 }
1134
1135 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1136 let entity_id = cx.reserve_entity::<Self>().entity_id();
1137 let buffer_id = entity_id.as_non_zero_u64().into();
1138 let text = TextBuffer::new_normalized(
1139 ReplicaId::LOCAL,
1140 buffer_id,
1141 Default::default(),
1142 Rope::new(),
1143 )
1144 .snapshot();
1145 let syntax = SyntaxMap::new(&text).snapshot();
1146 let tree_sitter_data = TreeSitterData::new(text.clone());
1147 BufferSnapshot {
1148 text,
1149 syntax,
1150 tree_sitter_data: Arc::new(tree_sitter_data),
1151 file: None,
1152 diagnostics: Default::default(),
1153 remote_selections: Default::default(),
1154 language: None,
1155 non_text_state_update_count: 0,
1156 }
1157 }
1158
1159 #[cfg(any(test, feature = "test-support"))]
1160 pub fn build_snapshot_sync(
1161 text: Rope,
1162 language: Option<Arc<Language>>,
1163 language_registry: Option<Arc<LanguageRegistry>>,
1164 cx: &mut App,
1165 ) -> BufferSnapshot {
1166 let entity_id = cx.reserve_entity::<Self>().entity_id();
1167 let buffer_id = entity_id.as_non_zero_u64().into();
1168 let text =
1169 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1170 .snapshot();
1171 let mut syntax = SyntaxMap::new(&text).snapshot();
1172 if let Some(language) = language.clone() {
1173 syntax.reparse(&text, language_registry, language);
1174 }
1175 let tree_sitter_data = TreeSitterData::new(text.clone());
1176 BufferSnapshot {
1177 text,
1178 syntax,
1179 tree_sitter_data: Arc::new(tree_sitter_data),
1180 file: None,
1181 diagnostics: Default::default(),
1182 remote_selections: Default::default(),
1183 language,
1184 non_text_state_update_count: 0,
1185 }
1186 }
1187
1188 /// Retrieve a snapshot of the buffer's current state. This is computationally
1189 /// cheap, and allows reading from the buffer on a background thread.
1190 pub fn snapshot(&self) -> BufferSnapshot {
1191 let text = self.text.snapshot();
1192 let mut syntax_map = self.syntax_map.lock();
1193 syntax_map.interpolate(&text);
1194 let syntax = syntax_map.snapshot();
1195
1196 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1197 Arc::new(TreeSitterData::new(text.clone()))
1198 } else {
1199 self.tree_sitter_data.clone()
1200 };
1201
1202 BufferSnapshot {
1203 text,
1204 syntax,
1205 tree_sitter_data,
1206 file: self.file.clone(),
1207 remote_selections: self.remote_selections.clone(),
1208 diagnostics: self.diagnostics.clone(),
1209 language: self.language.clone(),
1210 non_text_state_update_count: self.non_text_state_update_count,
1211 }
1212 }
1213
1214 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1215 let this = cx.entity();
1216 cx.new(|cx| {
1217 let mut branch = Self {
1218 branch_state: Some(BufferBranchState {
1219 base_buffer: this.clone(),
1220 merged_operations: Default::default(),
1221 }),
1222 language: self.language.clone(),
1223 has_conflict: self.has_conflict,
1224 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1225 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1226 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1227 };
1228 if let Some(language_registry) = self.language_registry() {
1229 branch.set_language_registry(language_registry);
1230 }
1231
1232 // Reparse the branch buffer so that we get syntax highlighting immediately.
1233 branch.reparse(cx, true);
1234
1235 branch
1236 })
1237 }
1238
1239 pub fn preview_edits(
1240 &self,
1241 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1242 cx: &App,
1243 ) -> Task<EditPreview> {
1244 let registry = self.language_registry();
1245 let language = self.language().cloned();
1246 let old_snapshot = self.text.snapshot();
1247 let mut branch_buffer = self.text.branch();
1248 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1249 cx.background_spawn(async move {
1250 if !edits.is_empty() {
1251 if let Some(language) = language.clone() {
1252 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1253 }
1254
1255 branch_buffer.edit(edits.iter().cloned());
1256 let snapshot = branch_buffer.snapshot();
1257 syntax_snapshot.interpolate(&snapshot);
1258
1259 if let Some(language) = language {
1260 syntax_snapshot.reparse(&snapshot, registry, language);
1261 }
1262 }
1263 EditPreview {
1264 old_snapshot,
1265 applied_edits_snapshot: branch_buffer.snapshot(),
1266 syntax_snapshot,
1267 }
1268 })
1269 }
1270
1271 /// Applies all of the changes in this buffer that intersect any of the
1272 /// given `ranges` to its base buffer.
1273 ///
1274 /// If `ranges` is empty, then all changes will be applied. This buffer must
1275 /// be a branch buffer to call this method.
1276 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1277 let Some(base_buffer) = self.base_buffer() else {
1278 debug_panic!("not a branch buffer");
1279 return;
1280 };
1281
1282 let mut ranges = if ranges.is_empty() {
1283 &[0..usize::MAX]
1284 } else {
1285 ranges.as_slice()
1286 }
1287 .iter()
1288 .peekable();
1289
1290 let mut edits = Vec::new();
1291 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1292 let mut is_included = false;
1293 while let Some(range) = ranges.peek() {
1294 if range.end < edit.new.start {
1295 ranges.next().unwrap();
1296 } else {
1297 if range.start <= edit.new.end {
1298 is_included = true;
1299 }
1300 break;
1301 }
1302 }
1303
1304 if is_included {
1305 edits.push((
1306 edit.old.clone(),
1307 self.text_for_range(edit.new.clone()).collect::<String>(),
1308 ));
1309 }
1310 }
1311
1312 let operation = base_buffer.update(cx, |base_buffer, cx| {
1313 // cx.emit(BufferEvent::DiffBaseChanged);
1314 base_buffer.edit(edits, None, cx)
1315 });
1316
1317 if let Some(operation) = operation
1318 && let Some(BufferBranchState {
1319 merged_operations, ..
1320 }) = &mut self.branch_state
1321 {
1322 merged_operations.push(operation);
1323 }
1324 }
1325
1326 fn on_base_buffer_event(
1327 &mut self,
1328 _: Entity<Buffer>,
1329 event: &BufferEvent,
1330 cx: &mut Context<Self>,
1331 ) {
1332 let BufferEvent::Operation { operation, .. } = event else {
1333 return;
1334 };
1335 let Some(BufferBranchState {
1336 merged_operations, ..
1337 }) = &mut self.branch_state
1338 else {
1339 return;
1340 };
1341
1342 let mut operation_to_undo = None;
1343 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1344 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1345 {
1346 merged_operations.remove(ix);
1347 operation_to_undo = Some(operation.timestamp);
1348 }
1349
1350 self.apply_ops([operation.clone()], cx);
1351
1352 if let Some(timestamp) = operation_to_undo {
1353 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1354 self.undo_operations(counts, cx);
1355 }
1356 }
1357
1358 #[cfg(test)]
1359 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1360 &self.text
1361 }
1362
1363 /// Retrieve a snapshot of the buffer's raw text, without any
1364 /// language-related state like the syntax tree or diagnostics.
1365 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1366 self.text.snapshot()
1367 }
1368
1369 /// The file associated with the buffer, if any.
1370 pub fn file(&self) -> Option<&Arc<dyn File>> {
1371 self.file.as_ref()
1372 }
1373
1374 /// The version of the buffer that was last saved or reloaded from disk.
1375 pub fn saved_version(&self) -> &clock::Global {
1376 &self.saved_version
1377 }
1378
1379 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1380 pub fn saved_mtime(&self) -> Option<MTime> {
1381 self.saved_mtime
1382 }
1383
1384 /// Assign a language to the buffer.
1385 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1386 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1387 }
1388
1389 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1390 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1391 self.set_language_(language, true, cx);
1392 }
1393
1394 fn set_language_(
1395 &mut self,
1396 language: Option<Arc<Language>>,
1397 may_block: bool,
1398 cx: &mut Context<Self>,
1399 ) {
1400 self.non_text_state_update_count += 1;
1401 self.syntax_map.lock().clear(&self.text);
1402 let old_language = std::mem::replace(&mut self.language, language);
1403 self.was_changed();
1404 self.reparse(cx, may_block);
1405 let has_fresh_language =
1406 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1407 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1408 }
1409
1410 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1411 /// other languages if parts of the buffer are written in different languages.
1412 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1413 self.syntax_map
1414 .lock()
1415 .set_language_registry(language_registry);
1416 }
1417
1418 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1419 self.syntax_map.lock().language_registry()
1420 }
1421
1422 /// Assign the line ending type to the buffer.
1423 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1424 self.text.set_line_ending(line_ending);
1425
1426 let lamport_timestamp = self.text.lamport_clock.tick();
1427 self.send_operation(
1428 Operation::UpdateLineEnding {
1429 line_ending,
1430 lamport_timestamp,
1431 },
1432 true,
1433 cx,
1434 );
1435 }
1436
1437 /// Assign the buffer a new [`Capability`].
1438 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1439 if self.capability != capability {
1440 self.capability = capability;
1441 cx.emit(BufferEvent::CapabilityChanged)
1442 }
1443 }
1444
1445 /// This method is called to signal that the buffer has been saved.
1446 pub fn did_save(
1447 &mut self,
1448 version: clock::Global,
1449 mtime: Option<MTime>,
1450 cx: &mut Context<Self>,
1451 ) {
1452 self.saved_version = version.clone();
1453 self.has_unsaved_edits.set((version, false));
1454 self.has_conflict = false;
1455 self.saved_mtime = mtime;
1456 self.was_changed();
1457 cx.emit(BufferEvent::Saved);
1458 cx.notify();
1459 }
1460
1461 /// Reloads the contents of the buffer from disk.
1462 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1463 let (tx, rx) = futures::channel::oneshot::channel();
1464 let prev_version = self.text.version();
1465 self.reload_task = Some(cx.spawn(async move |this, cx| {
1466 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1467 let file = this.file.as_ref()?.as_local()?;
1468
1469 Some((file.disk_state().mtime(), file.load(cx)))
1470 })?
1471 else {
1472 return Ok(());
1473 };
1474
1475 let new_text = new_text.await?;
1476 let diff = this
1477 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1478 .await;
1479 this.update(cx, |this, cx| {
1480 if this.version() == diff.base_version {
1481 this.finalize_last_transaction();
1482 this.apply_diff(diff, cx);
1483 tx.send(this.finalize_last_transaction().cloned()).ok();
1484 this.has_conflict = false;
1485 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1486 } else {
1487 if !diff.edits.is_empty()
1488 || this
1489 .edits_since::<usize>(&diff.base_version)
1490 .next()
1491 .is_some()
1492 {
1493 this.has_conflict = true;
1494 }
1495
1496 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1497 }
1498
1499 this.reload_task.take();
1500 })
1501 }));
1502 rx
1503 }
1504
1505 /// This method is called to signal that the buffer has been reloaded.
1506 pub fn did_reload(
1507 &mut self,
1508 version: clock::Global,
1509 line_ending: LineEnding,
1510 mtime: Option<MTime>,
1511 cx: &mut Context<Self>,
1512 ) {
1513 self.saved_version = version;
1514 self.has_unsaved_edits
1515 .set((self.saved_version.clone(), false));
1516 self.text.set_line_ending(line_ending);
1517 self.saved_mtime = mtime;
1518 cx.emit(BufferEvent::Reloaded);
1519 cx.notify();
1520 }
1521
1522 /// Updates the [`File`] backing this buffer. This should be called when
1523 /// the file has changed or has been deleted.
1524 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1525 let was_dirty = self.is_dirty();
1526 let mut file_changed = false;
1527
1528 if let Some(old_file) = self.file.as_ref() {
1529 if new_file.path() != old_file.path() {
1530 file_changed = true;
1531 }
1532
1533 let old_state = old_file.disk_state();
1534 let new_state = new_file.disk_state();
1535 if old_state != new_state {
1536 file_changed = true;
1537 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1538 cx.emit(BufferEvent::ReloadNeeded)
1539 }
1540 }
1541 } else {
1542 file_changed = true;
1543 };
1544
1545 self.file = Some(new_file);
1546 if file_changed {
1547 self.was_changed();
1548 self.non_text_state_update_count += 1;
1549 if was_dirty != self.is_dirty() {
1550 cx.emit(BufferEvent::DirtyChanged);
1551 }
1552 cx.emit(BufferEvent::FileHandleChanged);
1553 cx.notify();
1554 }
1555 }
1556
1557 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1558 Some(self.branch_state.as_ref()?.base_buffer.clone())
1559 }
1560
1561 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1562 pub fn language(&self) -> Option<&Arc<Language>> {
1563 self.language.as_ref()
1564 }
1565
1566 /// Returns the [`Language`] at the given location.
1567 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1568 let offset = position.to_offset(self);
1569 let mut is_first = true;
1570 let start_anchor = self.anchor_before(offset);
1571 let end_anchor = self.anchor_after(offset);
1572 self.syntax_map
1573 .lock()
1574 .layers_for_range(offset..offset, &self.text, false)
1575 .filter(|layer| {
1576 if is_first {
1577 is_first = false;
1578 return true;
1579 }
1580
1581 layer
1582 .included_sub_ranges
1583 .map(|sub_ranges| {
1584 sub_ranges.iter().any(|sub_range| {
1585 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1586 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1587 !is_before_start && !is_after_end
1588 })
1589 })
1590 .unwrap_or(true)
1591 })
1592 .last()
1593 .map(|info| info.language.clone())
1594 .or_else(|| self.language.clone())
1595 }
1596
1597 /// Returns each [`Language`] for the active syntax layers at the given location.
1598 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1599 let offset = position.to_offset(self);
1600 let mut languages: Vec<Arc<Language>> = self
1601 .syntax_map
1602 .lock()
1603 .layers_for_range(offset..offset, &self.text, false)
1604 .map(|info| info.language.clone())
1605 .collect();
1606
1607 if languages.is_empty()
1608 && let Some(buffer_language) = self.language()
1609 {
1610 languages.push(buffer_language.clone());
1611 }
1612
1613 languages
1614 }
1615
1616 /// An integer version number that accounts for all updates besides
1617 /// the buffer's text itself (which is versioned via a version vector).
1618 pub fn non_text_state_update_count(&self) -> usize {
1619 self.non_text_state_update_count
1620 }
1621
1622 /// Whether the buffer is being parsed in the background.
1623 #[cfg(any(test, feature = "test-support"))]
1624 pub fn is_parsing(&self) -> bool {
1625 self.reparse.is_some()
1626 }
1627
1628 /// Indicates whether the buffer contains any regions that may be
1629 /// written in a language that hasn't been loaded yet.
1630 pub fn contains_unknown_injections(&self) -> bool {
1631 self.syntax_map.lock().contains_unknown_injections()
1632 }
1633
1634 #[cfg(any(test, feature = "test-support"))]
1635 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1636 self.sync_parse_timeout = timeout;
1637 }
1638
1639 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1640 match Arc::get_mut(&mut self.tree_sitter_data) {
1641 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1642 None => {
1643 let tree_sitter_data = TreeSitterData::new(snapshot);
1644 self.tree_sitter_data = Arc::new(tree_sitter_data)
1645 }
1646 }
1647 }
1648
1649 /// Called after an edit to synchronize the buffer's main parse tree with
1650 /// the buffer's new underlying state.
1651 ///
1652 /// Locks the syntax map and interpolates the edits since the last reparse
1653 /// into the foreground syntax tree.
1654 ///
1655 /// Then takes a stable snapshot of the syntax map before unlocking it.
1656 /// The snapshot with the interpolated edits is sent to a background thread,
1657 /// where we ask Tree-sitter to perform an incremental parse.
1658 ///
1659 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1660 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1661 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1662 ///
1663 /// If we time out waiting on the parse, we spawn a second task waiting
1664 /// until the parse does complete and return with the interpolated tree still
1665 /// in the foreground. When the background parse completes, call back into
1666 /// the main thread and assign the foreground parse state.
1667 ///
1668 /// If the buffer or grammar changed since the start of the background parse,
1669 /// initiate an additional reparse recursively. To avoid concurrent parses
1670 /// for the same buffer, we only initiate a new parse if we are not already
1671 /// parsing in the background.
1672 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1673 if self.text.version() != *self.tree_sitter_data.version() {
1674 self.invalidate_tree_sitter_data(self.text.snapshot());
1675 }
1676 if self.reparse.is_some() {
1677 return;
1678 }
1679 let language = if let Some(language) = self.language.clone() {
1680 language
1681 } else {
1682 return;
1683 };
1684
1685 let text = self.text_snapshot();
1686 let parsed_version = self.version();
1687
1688 let mut syntax_map = self.syntax_map.lock();
1689 syntax_map.interpolate(&text);
1690 let language_registry = syntax_map.language_registry();
1691 let mut syntax_snapshot = syntax_map.snapshot();
1692 drop(syntax_map);
1693
1694 let parse_task = cx.background_spawn({
1695 let language = language.clone();
1696 let language_registry = language_registry.clone();
1697 async move {
1698 syntax_snapshot.reparse(&text, language_registry, language);
1699 syntax_snapshot
1700 }
1701 });
1702
1703 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1704 if may_block {
1705 match cx
1706 .background_executor()
1707 .block_with_timeout(self.sync_parse_timeout, parse_task)
1708 {
1709 Ok(new_syntax_snapshot) => {
1710 self.did_finish_parsing(new_syntax_snapshot, cx);
1711 self.reparse = None;
1712 }
1713 Err(parse_task) => {
1714 self.reparse = Some(cx.spawn(async move |this, cx| {
1715 let new_syntax_map = cx.background_spawn(parse_task).await;
1716 this.update(cx, move |this, cx| {
1717 let grammar_changed = || {
1718 this.language.as_ref().is_none_or(|current_language| {
1719 !Arc::ptr_eq(&language, current_language)
1720 })
1721 };
1722 let language_registry_changed = || {
1723 new_syntax_map.contains_unknown_injections()
1724 && language_registry.is_some_and(|registry| {
1725 registry.version()
1726 != new_syntax_map.language_registry_version()
1727 })
1728 };
1729 let parse_again = this.version.changed_since(&parsed_version)
1730 || language_registry_changed()
1731 || grammar_changed();
1732 this.did_finish_parsing(new_syntax_map, cx);
1733 this.reparse = None;
1734 if parse_again {
1735 this.reparse(cx, false);
1736 }
1737 })
1738 .ok();
1739 }));
1740 }
1741 }
1742 } else {
1743 self.reparse = Some(cx.spawn(async move |this, cx| {
1744 let new_syntax_map = cx.background_spawn(parse_task).await;
1745 this.update(cx, move |this, cx| {
1746 let grammar_changed = || {
1747 this.language.as_ref().is_none_or(|current_language| {
1748 !Arc::ptr_eq(&language, current_language)
1749 })
1750 };
1751 let language_registry_changed = || {
1752 new_syntax_map.contains_unknown_injections()
1753 && language_registry.is_some_and(|registry| {
1754 registry.version() != new_syntax_map.language_registry_version()
1755 })
1756 };
1757 let parse_again = this.version.changed_since(&parsed_version)
1758 || language_registry_changed()
1759 || grammar_changed();
1760 this.did_finish_parsing(new_syntax_map, cx);
1761 this.reparse = None;
1762 if parse_again {
1763 this.reparse(cx, false);
1764 }
1765 })
1766 .ok();
1767 }));
1768 }
1769 }
1770
1771 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1772 self.was_changed();
1773 self.non_text_state_update_count += 1;
1774 self.syntax_map.lock().did_parse(syntax_snapshot);
1775 self.request_autoindent(cx);
1776 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1777 self.invalidate_tree_sitter_data(self.text.snapshot());
1778 cx.emit(BufferEvent::Reparsed);
1779 cx.notify();
1780 }
1781
1782 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1783 self.parse_status.1.clone()
1784 }
1785
1786 /// Wait until the buffer is no longer parsing
1787 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1788 let mut parse_status = self.parse_status();
1789 async move {
1790 while *parse_status.borrow() != ParseStatus::Idle {
1791 if parse_status.changed().await.is_err() {
1792 break;
1793 }
1794 }
1795 }
1796 }
1797
1798 /// Assign to the buffer a set of diagnostics created by a given language server.
1799 pub fn update_diagnostics(
1800 &mut self,
1801 server_id: LanguageServerId,
1802 diagnostics: DiagnosticSet,
1803 cx: &mut Context<Self>,
1804 ) {
1805 let lamport_timestamp = self.text.lamport_clock.tick();
1806 let op = Operation::UpdateDiagnostics {
1807 server_id,
1808 diagnostics: diagnostics.iter().cloned().collect(),
1809 lamport_timestamp,
1810 };
1811
1812 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1813 self.send_operation(op, true, cx);
1814 }
1815
1816 pub fn buffer_diagnostics(
1817 &self,
1818 for_server: Option<LanguageServerId>,
1819 ) -> Vec<&DiagnosticEntry<Anchor>> {
1820 match for_server {
1821 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1822 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1823 Err(_) => Vec::new(),
1824 },
1825 None => self
1826 .diagnostics
1827 .iter()
1828 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1829 .collect(),
1830 }
1831 }
1832
1833 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1834 if let Some(indent_sizes) = self.compute_autoindents() {
1835 let indent_sizes = cx.background_spawn(indent_sizes);
1836 match cx
1837 .background_executor()
1838 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1839 {
1840 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1841 Err(indent_sizes) => {
1842 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1843 let indent_sizes = indent_sizes.await;
1844 this.update(cx, |this, cx| {
1845 this.apply_autoindents(indent_sizes, cx);
1846 })
1847 .ok();
1848 }));
1849 }
1850 }
1851 } else {
1852 self.autoindent_requests.clear();
1853 for tx in self.wait_for_autoindent_txs.drain(..) {
1854 tx.send(()).ok();
1855 }
1856 }
1857 }
1858
1859 fn compute_autoindents(
1860 &self,
1861 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1862 let max_rows_between_yields = 100;
1863 let snapshot = self.snapshot();
1864 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1865 return None;
1866 }
1867
1868 let autoindent_requests = self.autoindent_requests.clone();
1869 Some(async move {
1870 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1871 for request in autoindent_requests {
1872 // Resolve each edited range to its row in the current buffer and in the
1873 // buffer before this batch of edits.
1874 let mut row_ranges = Vec::new();
1875 let mut old_to_new_rows = BTreeMap::new();
1876 let mut language_indent_sizes_by_new_row = Vec::new();
1877 for entry in &request.entries {
1878 let position = entry.range.start;
1879 let new_row = position.to_point(&snapshot).row;
1880 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1881 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1882
1883 if !entry.first_line_is_new {
1884 let old_row = position.to_point(&request.before_edit).row;
1885 old_to_new_rows.insert(old_row, new_row);
1886 }
1887 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1888 }
1889
1890 // Build a map containing the suggested indentation for each of the edited lines
1891 // with respect to the state of the buffer before these edits. This map is keyed
1892 // by the rows for these lines in the current state of the buffer.
1893 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1894 let old_edited_ranges =
1895 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1896 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1897 let mut language_indent_size = IndentSize::default();
1898 for old_edited_range in old_edited_ranges {
1899 let suggestions = request
1900 .before_edit
1901 .suggest_autoindents(old_edited_range.clone())
1902 .into_iter()
1903 .flatten();
1904 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1905 if let Some(suggestion) = suggestion {
1906 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1907
1908 // Find the indent size based on the language for this row.
1909 while let Some((row, size)) = language_indent_sizes.peek() {
1910 if *row > new_row {
1911 break;
1912 }
1913 language_indent_size = *size;
1914 language_indent_sizes.next();
1915 }
1916
1917 let suggested_indent = old_to_new_rows
1918 .get(&suggestion.basis_row)
1919 .and_then(|from_row| {
1920 Some(old_suggestions.get(from_row).copied()?.0)
1921 })
1922 .unwrap_or_else(|| {
1923 request
1924 .before_edit
1925 .indent_size_for_line(suggestion.basis_row)
1926 })
1927 .with_delta(suggestion.delta, language_indent_size);
1928 old_suggestions
1929 .insert(new_row, (suggested_indent, suggestion.within_error));
1930 }
1931 }
1932 yield_now().await;
1933 }
1934
1935 // Compute new suggestions for each line, but only include them in the result
1936 // if they differ from the old suggestion for that line.
1937 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1938 let mut language_indent_size = IndentSize::default();
1939 for (row_range, original_indent_column) in row_ranges {
1940 let new_edited_row_range = if request.is_block_mode {
1941 row_range.start..row_range.start + 1
1942 } else {
1943 row_range.clone()
1944 };
1945
1946 let suggestions = snapshot
1947 .suggest_autoindents(new_edited_row_range.clone())
1948 .into_iter()
1949 .flatten();
1950 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1951 if let Some(suggestion) = suggestion {
1952 // Find the indent size based on the language for this row.
1953 while let Some((row, size)) = language_indent_sizes.peek() {
1954 if *row > new_row {
1955 break;
1956 }
1957 language_indent_size = *size;
1958 language_indent_sizes.next();
1959 }
1960
1961 let suggested_indent = indent_sizes
1962 .get(&suggestion.basis_row)
1963 .copied()
1964 .map(|e| e.0)
1965 .unwrap_or_else(|| {
1966 snapshot.indent_size_for_line(suggestion.basis_row)
1967 })
1968 .with_delta(suggestion.delta, language_indent_size);
1969
1970 if old_suggestions.get(&new_row).is_none_or(
1971 |(old_indentation, was_within_error)| {
1972 suggested_indent != *old_indentation
1973 && (!suggestion.within_error || *was_within_error)
1974 },
1975 ) {
1976 indent_sizes.insert(
1977 new_row,
1978 (suggested_indent, request.ignore_empty_lines),
1979 );
1980 }
1981 }
1982 }
1983
1984 if let (true, Some(original_indent_column)) =
1985 (request.is_block_mode, original_indent_column)
1986 {
1987 let new_indent =
1988 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1989 *indent
1990 } else {
1991 snapshot.indent_size_for_line(row_range.start)
1992 };
1993 let delta = new_indent.len as i64 - original_indent_column as i64;
1994 if delta != 0 {
1995 for row in row_range.skip(1) {
1996 indent_sizes.entry(row).or_insert_with(|| {
1997 let mut size = snapshot.indent_size_for_line(row);
1998 if size.kind == new_indent.kind {
1999 match delta.cmp(&0) {
2000 Ordering::Greater => size.len += delta as u32,
2001 Ordering::Less => {
2002 size.len = size.len.saturating_sub(-delta as u32)
2003 }
2004 Ordering::Equal => {}
2005 }
2006 }
2007 (size, request.ignore_empty_lines)
2008 });
2009 }
2010 }
2011 }
2012
2013 yield_now().await;
2014 }
2015 }
2016
2017 indent_sizes
2018 .into_iter()
2019 .filter_map(|(row, (indent, ignore_empty_lines))| {
2020 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2021 None
2022 } else {
2023 Some((row, indent))
2024 }
2025 })
2026 .collect()
2027 })
2028 }
2029
2030 fn apply_autoindents(
2031 &mut self,
2032 indent_sizes: BTreeMap<u32, IndentSize>,
2033 cx: &mut Context<Self>,
2034 ) {
2035 self.autoindent_requests.clear();
2036 for tx in self.wait_for_autoindent_txs.drain(..) {
2037 tx.send(()).ok();
2038 }
2039
2040 let edits: Vec<_> = indent_sizes
2041 .into_iter()
2042 .filter_map(|(row, indent_size)| {
2043 let current_size = indent_size_for_line(self, row);
2044 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2045 })
2046 .collect();
2047
2048 let preserve_preview = self.preserve_preview();
2049 self.edit(edits, None, cx);
2050 if preserve_preview {
2051 self.refresh_preview();
2052 }
2053 }
2054
2055 /// Create a minimal edit that will cause the given row to be indented
2056 /// with the given size. After applying this edit, the length of the line
2057 /// will always be at least `new_size.len`.
2058 pub fn edit_for_indent_size_adjustment(
2059 row: u32,
2060 current_size: IndentSize,
2061 new_size: IndentSize,
2062 ) -> Option<(Range<Point>, String)> {
2063 if new_size.kind == current_size.kind {
2064 match new_size.len.cmp(¤t_size.len) {
2065 Ordering::Greater => {
2066 let point = Point::new(row, 0);
2067 Some((
2068 point..point,
2069 iter::repeat(new_size.char())
2070 .take((new_size.len - current_size.len) as usize)
2071 .collect::<String>(),
2072 ))
2073 }
2074
2075 Ordering::Less => Some((
2076 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2077 String::new(),
2078 )),
2079
2080 Ordering::Equal => None,
2081 }
2082 } else {
2083 Some((
2084 Point::new(row, 0)..Point::new(row, current_size.len),
2085 iter::repeat(new_size.char())
2086 .take(new_size.len as usize)
2087 .collect::<String>(),
2088 ))
2089 }
2090 }
2091
2092 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2093 /// and the given new text.
2094 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2095 let old_text = self.as_rope().clone();
2096 let base_version = self.version();
2097 cx.background_executor()
2098 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2099 let old_text = old_text.to_string();
2100 let line_ending = LineEnding::detect(&new_text);
2101 LineEnding::normalize(&mut new_text);
2102 let edits = text_diff(&old_text, &new_text);
2103 Diff {
2104 base_version,
2105 line_ending,
2106 edits,
2107 }
2108 })
2109 }
2110
2111 /// Spawns a background task that searches the buffer for any whitespace
2112 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2113 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2114 let old_text = self.as_rope().clone();
2115 let line_ending = self.line_ending();
2116 let base_version = self.version();
2117 cx.background_spawn(async move {
2118 let ranges = trailing_whitespace_ranges(&old_text);
2119 let empty = Arc::<str>::from("");
2120 Diff {
2121 base_version,
2122 line_ending,
2123 edits: ranges
2124 .into_iter()
2125 .map(|range| (range, empty.clone()))
2126 .collect(),
2127 }
2128 })
2129 }
2130
2131 /// Ensures that the buffer ends with a single newline character, and
2132 /// no other whitespace. Skips if the buffer is empty.
2133 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2134 let len = self.len();
2135 if len == 0 {
2136 return;
2137 }
2138 let mut offset = len;
2139 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2140 let non_whitespace_len = chunk
2141 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2142 .len();
2143 offset -= chunk.len();
2144 offset += non_whitespace_len;
2145 if non_whitespace_len != 0 {
2146 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2147 return;
2148 }
2149 break;
2150 }
2151 }
2152 self.edit([(offset..len, "\n")], None, cx);
2153 }
2154
2155 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2156 /// calculated, then adjust the diff to account for those changes, and discard any
2157 /// parts of the diff that conflict with those changes.
2158 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2159 let snapshot = self.snapshot();
2160 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2161 let mut delta = 0;
2162 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2163 while let Some(edit_since) = edits_since.peek() {
2164 // If the edit occurs after a diff hunk, then it does not
2165 // affect that hunk.
2166 if edit_since.old.start > range.end {
2167 break;
2168 }
2169 // If the edit precedes the diff hunk, then adjust the hunk
2170 // to reflect the edit.
2171 else if edit_since.old.end < range.start {
2172 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2173 edits_since.next();
2174 }
2175 // If the edit intersects a diff hunk, then discard that hunk.
2176 else {
2177 return None;
2178 }
2179 }
2180
2181 let start = (range.start as i64 + delta) as usize;
2182 let end = (range.end as i64 + delta) as usize;
2183 Some((start..end, new_text))
2184 });
2185
2186 self.start_transaction();
2187 self.text.set_line_ending(diff.line_ending);
2188 self.edit(adjusted_edits, None, cx);
2189 self.end_transaction(cx)
2190 }
2191
2192 pub fn has_unsaved_edits(&self) -> bool {
2193 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2194
2195 if last_version == self.version {
2196 self.has_unsaved_edits
2197 .set((last_version, has_unsaved_edits));
2198 return has_unsaved_edits;
2199 }
2200
2201 let has_edits = self.has_edits_since(&self.saved_version);
2202 self.has_unsaved_edits
2203 .set((self.version.clone(), has_edits));
2204 has_edits
2205 }
2206
2207 /// Checks if the buffer has unsaved changes.
2208 pub fn is_dirty(&self) -> bool {
2209 if self.capability == Capability::ReadOnly {
2210 return false;
2211 }
2212 if self.has_conflict {
2213 return true;
2214 }
2215 match self.file.as_ref().map(|f| f.disk_state()) {
2216 Some(DiskState::New) | Some(DiskState::Deleted) => {
2217 !self.is_empty() && self.has_unsaved_edits()
2218 }
2219 _ => self.has_unsaved_edits(),
2220 }
2221 }
2222
2223 /// Marks the buffer as having a conflict regardless of current buffer state.
2224 pub fn set_conflict(&mut self) {
2225 self.has_conflict = true;
2226 }
2227
2228 /// Checks if the buffer and its file have both changed since the buffer
2229 /// was last saved or reloaded.
2230 pub fn has_conflict(&self) -> bool {
2231 if self.has_conflict {
2232 return true;
2233 }
2234 let Some(file) = self.file.as_ref() else {
2235 return false;
2236 };
2237 match file.disk_state() {
2238 DiskState::New => false,
2239 DiskState::Present { mtime } => match self.saved_mtime {
2240 Some(saved_mtime) => {
2241 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2242 }
2243 None => true,
2244 },
2245 DiskState::Deleted => false,
2246 }
2247 }
2248
2249 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2250 pub fn subscribe(&mut self) -> Subscription<usize> {
2251 self.text.subscribe()
2252 }
2253
2254 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2255 ///
2256 /// This allows downstream code to check if the buffer's text has changed without
2257 /// waiting for an effect cycle, which would be required if using eents.
2258 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2259 if let Err(ix) = self
2260 .change_bits
2261 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2262 {
2263 self.change_bits.insert(ix, bit);
2264 }
2265 }
2266
2267 /// Set the change bit for all "listeners".
2268 fn was_changed(&mut self) {
2269 self.change_bits.retain(|change_bit| {
2270 change_bit
2271 .upgrade()
2272 .inspect(|bit| {
2273 _ = bit.replace(true);
2274 })
2275 .is_some()
2276 });
2277 }
2278
2279 /// Starts a transaction, if one is not already in-progress. When undoing or
2280 /// redoing edits, all of the edits performed within a transaction are undone
2281 /// or redone together.
2282 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2283 self.start_transaction_at(Instant::now())
2284 }
2285
2286 /// Starts a transaction, providing the current time. Subsequent transactions
2287 /// that occur within a short period of time will be grouped together. This
2288 /// is controlled by the buffer's undo grouping duration.
2289 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2290 self.transaction_depth += 1;
2291 if self.was_dirty_before_starting_transaction.is_none() {
2292 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2293 }
2294 self.text.start_transaction_at(now)
2295 }
2296
2297 /// Terminates the current transaction, if this is the outermost transaction.
2298 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2299 self.end_transaction_at(Instant::now(), cx)
2300 }
2301
2302 /// Terminates the current transaction, providing the current time. Subsequent transactions
2303 /// that occur within a short period of time will be grouped together. This
2304 /// is controlled by the buffer's undo grouping duration.
2305 pub fn end_transaction_at(
2306 &mut self,
2307 now: Instant,
2308 cx: &mut Context<Self>,
2309 ) -> Option<TransactionId> {
2310 assert!(self.transaction_depth > 0);
2311 self.transaction_depth -= 1;
2312 let was_dirty = if self.transaction_depth == 0 {
2313 self.was_dirty_before_starting_transaction.take().unwrap()
2314 } else {
2315 false
2316 };
2317 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2318 self.did_edit(&start_version, was_dirty, cx);
2319 Some(transaction_id)
2320 } else {
2321 None
2322 }
2323 }
2324
2325 /// Manually add a transaction to the buffer's undo history.
2326 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2327 self.text.push_transaction(transaction, now);
2328 }
2329
2330 /// Differs from `push_transaction` in that it does not clear the redo
2331 /// stack. Intended to be used to create a parent transaction to merge
2332 /// potential child transactions into.
2333 ///
2334 /// The caller is responsible for removing it from the undo history using
2335 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2336 /// are merged into this transaction, the caller is responsible for ensuring
2337 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2338 /// cleared is to create transactions with the usual `start_transaction` and
2339 /// `end_transaction` methods and merging the resulting transactions into
2340 /// the transaction created by this method
2341 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2342 self.text.push_empty_transaction(now)
2343 }
2344
2345 /// Prevent the last transaction from being grouped with any subsequent transactions,
2346 /// even if they occur with the buffer's undo grouping duration.
2347 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2348 self.text.finalize_last_transaction()
2349 }
2350
2351 /// Manually group all changes since a given transaction.
2352 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2353 self.text.group_until_transaction(transaction_id);
2354 }
2355
2356 /// Manually remove a transaction from the buffer's undo history
2357 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2358 self.text.forget_transaction(transaction_id)
2359 }
2360
2361 /// Retrieve a transaction from the buffer's undo history
2362 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2363 self.text.get_transaction(transaction_id)
2364 }
2365
2366 /// Manually merge two transactions in the buffer's undo history.
2367 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2368 self.text.merge_transactions(transaction, destination);
2369 }
2370
2371 /// Waits for the buffer to receive operations with the given timestamps.
2372 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2373 &mut self,
2374 edit_ids: It,
2375 ) -> impl Future<Output = Result<()>> + use<It> {
2376 self.text.wait_for_edits(edit_ids)
2377 }
2378
2379 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2380 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2381 &mut self,
2382 anchors: It,
2383 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2384 self.text.wait_for_anchors(anchors)
2385 }
2386
2387 /// Waits for the buffer to receive operations up to the given version.
2388 pub fn wait_for_version(
2389 &mut self,
2390 version: clock::Global,
2391 ) -> impl Future<Output = Result<()>> + use<> {
2392 self.text.wait_for_version(version)
2393 }
2394
2395 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2396 /// [`Buffer::wait_for_version`] to resolve with an error.
2397 pub fn give_up_waiting(&mut self) {
2398 self.text.give_up_waiting();
2399 }
2400
2401 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2402 let mut rx = None;
2403 if !self.autoindent_requests.is_empty() {
2404 let channel = oneshot::channel();
2405 self.wait_for_autoindent_txs.push(channel.0);
2406 rx = Some(channel.1);
2407 }
2408 rx
2409 }
2410
2411 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2412 pub fn set_active_selections(
2413 &mut self,
2414 selections: Arc<[Selection<Anchor>]>,
2415 line_mode: bool,
2416 cursor_shape: CursorShape,
2417 cx: &mut Context<Self>,
2418 ) {
2419 let lamport_timestamp = self.text.lamport_clock.tick();
2420 self.remote_selections.insert(
2421 self.text.replica_id(),
2422 SelectionSet {
2423 selections: selections.clone(),
2424 lamport_timestamp,
2425 line_mode,
2426 cursor_shape,
2427 },
2428 );
2429 self.send_operation(
2430 Operation::UpdateSelections {
2431 selections,
2432 line_mode,
2433 lamport_timestamp,
2434 cursor_shape,
2435 },
2436 true,
2437 cx,
2438 );
2439 self.non_text_state_update_count += 1;
2440 cx.notify();
2441 }
2442
2443 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2444 /// this replica.
2445 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2446 if self
2447 .remote_selections
2448 .get(&self.text.replica_id())
2449 .is_none_or(|set| !set.selections.is_empty())
2450 {
2451 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2452 }
2453 }
2454
2455 pub fn set_agent_selections(
2456 &mut self,
2457 selections: Arc<[Selection<Anchor>]>,
2458 line_mode: bool,
2459 cursor_shape: CursorShape,
2460 cx: &mut Context<Self>,
2461 ) {
2462 let lamport_timestamp = self.text.lamport_clock.tick();
2463 self.remote_selections.insert(
2464 ReplicaId::AGENT,
2465 SelectionSet {
2466 selections,
2467 lamport_timestamp,
2468 line_mode,
2469 cursor_shape,
2470 },
2471 );
2472 self.non_text_state_update_count += 1;
2473 cx.notify();
2474 }
2475
2476 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2477 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2478 }
2479
2480 /// Replaces the buffer's entire text.
2481 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2482 where
2483 T: Into<Arc<str>>,
2484 {
2485 self.autoindent_requests.clear();
2486 self.edit([(0..self.len(), text)], None, cx)
2487 }
2488
2489 /// Appends the given text to the end of the buffer.
2490 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2491 where
2492 T: Into<Arc<str>>,
2493 {
2494 self.edit([(self.len()..self.len(), text)], None, cx)
2495 }
2496
2497 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2498 /// delete, and a string of text to insert at that location.
2499 ///
2500 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2501 /// request for the edited ranges, which will be processed when the buffer finishes
2502 /// parsing.
2503 ///
2504 /// Parsing takes place at the end of a transaction, and may compute synchronously
2505 /// or asynchronously, depending on the changes.
2506 pub fn edit<I, S, T>(
2507 &mut self,
2508 edits_iter: I,
2509 autoindent_mode: Option<AutoindentMode>,
2510 cx: &mut Context<Self>,
2511 ) -> Option<clock::Lamport>
2512 where
2513 I: IntoIterator<Item = (Range<S>, T)>,
2514 S: ToOffset,
2515 T: Into<Arc<str>>,
2516 {
2517 // Skip invalid edits and coalesce contiguous ones.
2518 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2519
2520 for (range, new_text) in edits_iter {
2521 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2522
2523 if range.start > range.end {
2524 mem::swap(&mut range.start, &mut range.end);
2525 }
2526 let new_text = new_text.into();
2527 if !new_text.is_empty() || !range.is_empty() {
2528 if let Some((prev_range, prev_text)) = edits.last_mut()
2529 && prev_range.end >= range.start
2530 {
2531 prev_range.end = cmp::max(prev_range.end, range.end);
2532 *prev_text = format!("{prev_text}{new_text}").into();
2533 } else {
2534 edits.push((range, new_text));
2535 }
2536 }
2537 }
2538 if edits.is_empty() {
2539 return None;
2540 }
2541
2542 self.start_transaction();
2543 self.pending_autoindent.take();
2544 let autoindent_request = autoindent_mode
2545 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2546
2547 let edit_operation = self.text.edit(edits.iter().cloned());
2548 let edit_id = edit_operation.timestamp();
2549
2550 if let Some((before_edit, mode)) = autoindent_request {
2551 let mut delta = 0isize;
2552 let mut previous_setting = None;
2553 let entries: Vec<_> = edits
2554 .into_iter()
2555 .enumerate()
2556 .zip(&edit_operation.as_edit().unwrap().new_text)
2557 .filter(|((_, (range, _)), _)| {
2558 let language = before_edit.language_at(range.start);
2559 let language_id = language.map(|l| l.id());
2560 if let Some((cached_language_id, auto_indent)) = previous_setting
2561 && cached_language_id == language_id
2562 {
2563 auto_indent
2564 } else {
2565 // The auto-indent setting is not present in editorconfigs, hence
2566 // we can avoid passing the file here.
2567 let auto_indent =
2568 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2569 previous_setting = Some((language_id, auto_indent));
2570 auto_indent
2571 }
2572 })
2573 .map(|((ix, (range, _)), new_text)| {
2574 let new_text_length = new_text.len();
2575 let old_start = range.start.to_point(&before_edit);
2576 let new_start = (delta + range.start as isize) as usize;
2577 let range_len = range.end - range.start;
2578 delta += new_text_length as isize - range_len as isize;
2579
2580 // Decide what range of the insertion to auto-indent, and whether
2581 // the first line of the insertion should be considered a newly-inserted line
2582 // or an edit to an existing line.
2583 let mut range_of_insertion_to_indent = 0..new_text_length;
2584 let mut first_line_is_new = true;
2585
2586 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2587 let old_line_end = before_edit.line_len(old_start.row);
2588
2589 if old_start.column > old_line_start {
2590 first_line_is_new = false;
2591 }
2592
2593 if !new_text.contains('\n')
2594 && (old_start.column + (range_len as u32) < old_line_end
2595 || old_line_end == old_line_start)
2596 {
2597 first_line_is_new = false;
2598 }
2599
2600 // When inserting text starting with a newline, avoid auto-indenting the
2601 // previous line.
2602 if new_text.starts_with('\n') {
2603 range_of_insertion_to_indent.start += 1;
2604 first_line_is_new = true;
2605 }
2606
2607 let mut original_indent_column = None;
2608 if let AutoindentMode::Block {
2609 original_indent_columns,
2610 } = &mode
2611 {
2612 original_indent_column = Some(if new_text.starts_with('\n') {
2613 indent_size_for_text(
2614 new_text[range_of_insertion_to_indent.clone()].chars(),
2615 )
2616 .len
2617 } else {
2618 original_indent_columns
2619 .get(ix)
2620 .copied()
2621 .flatten()
2622 .unwrap_or_else(|| {
2623 indent_size_for_text(
2624 new_text[range_of_insertion_to_indent.clone()].chars(),
2625 )
2626 .len
2627 })
2628 });
2629
2630 // Avoid auto-indenting the line after the edit.
2631 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2632 range_of_insertion_to_indent.end -= 1;
2633 }
2634 }
2635
2636 AutoindentRequestEntry {
2637 first_line_is_new,
2638 original_indent_column,
2639 indent_size: before_edit.language_indent_size_at(range.start, cx),
2640 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2641 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2642 }
2643 })
2644 .collect();
2645
2646 if !entries.is_empty() {
2647 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2648 before_edit,
2649 entries,
2650 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2651 ignore_empty_lines: false,
2652 }));
2653 }
2654 }
2655
2656 self.end_transaction(cx);
2657 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2658 Some(edit_id)
2659 }
2660
2661 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2662 self.was_changed();
2663
2664 if self.edits_since::<usize>(old_version).next().is_none() {
2665 return;
2666 }
2667
2668 self.reparse(cx, true);
2669 cx.emit(BufferEvent::Edited);
2670 if was_dirty != self.is_dirty() {
2671 cx.emit(BufferEvent::DirtyChanged);
2672 }
2673 cx.notify();
2674 }
2675
2676 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2677 where
2678 I: IntoIterator<Item = Range<T>>,
2679 T: ToOffset + Copy,
2680 {
2681 let before_edit = self.snapshot();
2682 let entries = ranges
2683 .into_iter()
2684 .map(|range| AutoindentRequestEntry {
2685 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2686 first_line_is_new: true,
2687 indent_size: before_edit.language_indent_size_at(range.start, cx),
2688 original_indent_column: None,
2689 })
2690 .collect();
2691 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2692 before_edit,
2693 entries,
2694 is_block_mode: false,
2695 ignore_empty_lines: true,
2696 }));
2697 self.request_autoindent(cx);
2698 }
2699
2700 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2701 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2702 pub fn insert_empty_line(
2703 &mut self,
2704 position: impl ToPoint,
2705 space_above: bool,
2706 space_below: bool,
2707 cx: &mut Context<Self>,
2708 ) -> Point {
2709 let mut position = position.to_point(self);
2710
2711 self.start_transaction();
2712
2713 self.edit(
2714 [(position..position, "\n")],
2715 Some(AutoindentMode::EachLine),
2716 cx,
2717 );
2718
2719 if position.column > 0 {
2720 position += Point::new(1, 0);
2721 }
2722
2723 if !self.is_line_blank(position.row) {
2724 self.edit(
2725 [(position..position, "\n")],
2726 Some(AutoindentMode::EachLine),
2727 cx,
2728 );
2729 }
2730
2731 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2732 self.edit(
2733 [(position..position, "\n")],
2734 Some(AutoindentMode::EachLine),
2735 cx,
2736 );
2737 position.row += 1;
2738 }
2739
2740 if space_below
2741 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2742 {
2743 self.edit(
2744 [(position..position, "\n")],
2745 Some(AutoindentMode::EachLine),
2746 cx,
2747 );
2748 }
2749
2750 self.end_transaction(cx);
2751
2752 position
2753 }
2754
2755 /// Applies the given remote operations to the buffer.
2756 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2757 self.pending_autoindent.take();
2758 let was_dirty = self.is_dirty();
2759 let old_version = self.version.clone();
2760 let mut deferred_ops = Vec::new();
2761 let buffer_ops = ops
2762 .into_iter()
2763 .filter_map(|op| match op {
2764 Operation::Buffer(op) => Some(op),
2765 _ => {
2766 if self.can_apply_op(&op) {
2767 self.apply_op(op, cx);
2768 } else {
2769 deferred_ops.push(op);
2770 }
2771 None
2772 }
2773 })
2774 .collect::<Vec<_>>();
2775 for operation in buffer_ops.iter() {
2776 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2777 }
2778 self.text.apply_ops(buffer_ops);
2779 self.deferred_ops.insert(deferred_ops);
2780 self.flush_deferred_ops(cx);
2781 self.did_edit(&old_version, was_dirty, cx);
2782 // Notify independently of whether the buffer was edited as the operations could include a
2783 // selection update.
2784 cx.notify();
2785 }
2786
2787 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2788 let mut deferred_ops = Vec::new();
2789 for op in self.deferred_ops.drain().iter().cloned() {
2790 if self.can_apply_op(&op) {
2791 self.apply_op(op, cx);
2792 } else {
2793 deferred_ops.push(op);
2794 }
2795 }
2796 self.deferred_ops.insert(deferred_ops);
2797 }
2798
2799 pub fn has_deferred_ops(&self) -> bool {
2800 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2801 }
2802
2803 fn can_apply_op(&self, operation: &Operation) -> bool {
2804 match operation {
2805 Operation::Buffer(_) => {
2806 unreachable!("buffer operations should never be applied at this layer")
2807 }
2808 Operation::UpdateDiagnostics {
2809 diagnostics: diagnostic_set,
2810 ..
2811 } => diagnostic_set.iter().all(|diagnostic| {
2812 self.text.can_resolve(&diagnostic.range.start)
2813 && self.text.can_resolve(&diagnostic.range.end)
2814 }),
2815 Operation::UpdateSelections { selections, .. } => selections
2816 .iter()
2817 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2818 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2819 }
2820 }
2821
2822 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2823 match operation {
2824 Operation::Buffer(_) => {
2825 unreachable!("buffer operations should never be applied at this layer")
2826 }
2827 Operation::UpdateDiagnostics {
2828 server_id,
2829 diagnostics: diagnostic_set,
2830 lamport_timestamp,
2831 } => {
2832 let snapshot = self.snapshot();
2833 self.apply_diagnostic_update(
2834 server_id,
2835 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2836 lamport_timestamp,
2837 cx,
2838 );
2839 }
2840 Operation::UpdateSelections {
2841 selections,
2842 lamport_timestamp,
2843 line_mode,
2844 cursor_shape,
2845 } => {
2846 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2847 && set.lamport_timestamp > lamport_timestamp
2848 {
2849 return;
2850 }
2851
2852 self.remote_selections.insert(
2853 lamport_timestamp.replica_id,
2854 SelectionSet {
2855 selections,
2856 lamport_timestamp,
2857 line_mode,
2858 cursor_shape,
2859 },
2860 );
2861 self.text.lamport_clock.observe(lamport_timestamp);
2862 self.non_text_state_update_count += 1;
2863 }
2864 Operation::UpdateCompletionTriggers {
2865 triggers,
2866 lamport_timestamp,
2867 server_id,
2868 } => {
2869 if triggers.is_empty() {
2870 self.completion_triggers_per_language_server
2871 .remove(&server_id);
2872 self.completion_triggers = self
2873 .completion_triggers_per_language_server
2874 .values()
2875 .flat_map(|triggers| triggers.iter().cloned())
2876 .collect();
2877 } else {
2878 self.completion_triggers_per_language_server
2879 .insert(server_id, triggers.iter().cloned().collect());
2880 self.completion_triggers.extend(triggers);
2881 }
2882 self.text.lamport_clock.observe(lamport_timestamp);
2883 }
2884 Operation::UpdateLineEnding {
2885 line_ending,
2886 lamport_timestamp,
2887 } => {
2888 self.text.set_line_ending(line_ending);
2889 self.text.lamport_clock.observe(lamport_timestamp);
2890 }
2891 }
2892 }
2893
2894 fn apply_diagnostic_update(
2895 &mut self,
2896 server_id: LanguageServerId,
2897 diagnostics: DiagnosticSet,
2898 lamport_timestamp: clock::Lamport,
2899 cx: &mut Context<Self>,
2900 ) {
2901 if lamport_timestamp > self.diagnostics_timestamp {
2902 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2903 if diagnostics.is_empty() {
2904 if let Ok(ix) = ix {
2905 self.diagnostics.remove(ix);
2906 }
2907 } else {
2908 match ix {
2909 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2910 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2911 };
2912 }
2913 self.diagnostics_timestamp = lamport_timestamp;
2914 self.non_text_state_update_count += 1;
2915 self.text.lamport_clock.observe(lamport_timestamp);
2916 cx.notify();
2917 cx.emit(BufferEvent::DiagnosticsUpdated);
2918 }
2919 }
2920
2921 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2922 self.was_changed();
2923 cx.emit(BufferEvent::Operation {
2924 operation,
2925 is_local,
2926 });
2927 }
2928
2929 /// Removes the selections for a given peer.
2930 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2931 self.remote_selections.remove(&replica_id);
2932 cx.notify();
2933 }
2934
2935 /// Undoes the most recent transaction.
2936 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2937 let was_dirty = self.is_dirty();
2938 let old_version = self.version.clone();
2939
2940 if let Some((transaction_id, operation)) = self.text.undo() {
2941 self.send_operation(Operation::Buffer(operation), true, cx);
2942 self.did_edit(&old_version, was_dirty, cx);
2943 Some(transaction_id)
2944 } else {
2945 None
2946 }
2947 }
2948
2949 /// Manually undoes a specific transaction in the buffer's undo history.
2950 pub fn undo_transaction(
2951 &mut self,
2952 transaction_id: TransactionId,
2953 cx: &mut Context<Self>,
2954 ) -> bool {
2955 let was_dirty = self.is_dirty();
2956 let old_version = self.version.clone();
2957 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2958 self.send_operation(Operation::Buffer(operation), true, cx);
2959 self.did_edit(&old_version, was_dirty, cx);
2960 true
2961 } else {
2962 false
2963 }
2964 }
2965
2966 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2967 pub fn undo_to_transaction(
2968 &mut self,
2969 transaction_id: TransactionId,
2970 cx: &mut Context<Self>,
2971 ) -> bool {
2972 let was_dirty = self.is_dirty();
2973 let old_version = self.version.clone();
2974
2975 let operations = self.text.undo_to_transaction(transaction_id);
2976 let undone = !operations.is_empty();
2977 for operation in operations {
2978 self.send_operation(Operation::Buffer(operation), true, cx);
2979 }
2980 if undone {
2981 self.did_edit(&old_version, was_dirty, cx)
2982 }
2983 undone
2984 }
2985
2986 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2987 let was_dirty = self.is_dirty();
2988 let operation = self.text.undo_operations(counts);
2989 let old_version = self.version.clone();
2990 self.send_operation(Operation::Buffer(operation), true, cx);
2991 self.did_edit(&old_version, was_dirty, cx);
2992 }
2993
2994 /// Manually redoes a specific transaction in the buffer's redo history.
2995 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2996 let was_dirty = self.is_dirty();
2997 let old_version = self.version.clone();
2998
2999 if let Some((transaction_id, operation)) = self.text.redo() {
3000 self.send_operation(Operation::Buffer(operation), true, cx);
3001 self.did_edit(&old_version, was_dirty, cx);
3002 Some(transaction_id)
3003 } else {
3004 None
3005 }
3006 }
3007
3008 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3009 pub fn redo_to_transaction(
3010 &mut self,
3011 transaction_id: TransactionId,
3012 cx: &mut Context<Self>,
3013 ) -> bool {
3014 let was_dirty = self.is_dirty();
3015 let old_version = self.version.clone();
3016
3017 let operations = self.text.redo_to_transaction(transaction_id);
3018 let redone = !operations.is_empty();
3019 for operation in operations {
3020 self.send_operation(Operation::Buffer(operation), true, cx);
3021 }
3022 if redone {
3023 self.did_edit(&old_version, was_dirty, cx)
3024 }
3025 redone
3026 }
3027
3028 /// Override current completion triggers with the user-provided completion triggers.
3029 pub fn set_completion_triggers(
3030 &mut self,
3031 server_id: LanguageServerId,
3032 triggers: BTreeSet<String>,
3033 cx: &mut Context<Self>,
3034 ) {
3035 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3036 if triggers.is_empty() {
3037 self.completion_triggers_per_language_server
3038 .remove(&server_id);
3039 self.completion_triggers = self
3040 .completion_triggers_per_language_server
3041 .values()
3042 .flat_map(|triggers| triggers.iter().cloned())
3043 .collect();
3044 } else {
3045 self.completion_triggers_per_language_server
3046 .insert(server_id, triggers.clone());
3047 self.completion_triggers.extend(triggers.iter().cloned());
3048 }
3049 self.send_operation(
3050 Operation::UpdateCompletionTriggers {
3051 triggers: triggers.into_iter().collect(),
3052 lamport_timestamp: self.completion_triggers_timestamp,
3053 server_id,
3054 },
3055 true,
3056 cx,
3057 );
3058 cx.notify();
3059 }
3060
3061 /// Returns a list of strings which trigger a completion menu for this language.
3062 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3063 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3064 &self.completion_triggers
3065 }
3066
3067 /// Call this directly after performing edits to prevent the preview tab
3068 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3069 /// to return false until there are additional edits.
3070 pub fn refresh_preview(&mut self) {
3071 self.preview_version = self.version.clone();
3072 }
3073
3074 /// Whether we should preserve the preview status of a tab containing this buffer.
3075 pub fn preserve_preview(&self) -> bool {
3076 !self.has_edits_since(&self.preview_version)
3077 }
3078}
3079
3080#[doc(hidden)]
3081#[cfg(any(test, feature = "test-support"))]
3082impl Buffer {
3083 pub fn edit_via_marked_text(
3084 &mut self,
3085 marked_string: &str,
3086 autoindent_mode: Option<AutoindentMode>,
3087 cx: &mut Context<Self>,
3088 ) {
3089 let edits = self.edits_for_marked_text(marked_string);
3090 self.edit(edits, autoindent_mode, cx);
3091 }
3092
3093 pub fn set_group_interval(&mut self, group_interval: Duration) {
3094 self.text.set_group_interval(group_interval);
3095 }
3096
3097 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3098 where
3099 T: rand::Rng,
3100 {
3101 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3102 let mut last_end = None;
3103 for _ in 0..old_range_count {
3104 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3105 break;
3106 }
3107
3108 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3109 let mut range = self.random_byte_range(new_start, rng);
3110 if rng.random_bool(0.2) {
3111 mem::swap(&mut range.start, &mut range.end);
3112 }
3113 last_end = Some(range.end);
3114
3115 let new_text_len = rng.random_range(0..10);
3116 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3117 new_text = new_text.to_uppercase();
3118
3119 edits.push((range, new_text));
3120 }
3121 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3122 self.edit(edits, None, cx);
3123 }
3124
3125 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3126 let was_dirty = self.is_dirty();
3127 let old_version = self.version.clone();
3128
3129 let ops = self.text.randomly_undo_redo(rng);
3130 if !ops.is_empty() {
3131 for op in ops {
3132 self.send_operation(Operation::Buffer(op), true, cx);
3133 self.did_edit(&old_version, was_dirty, cx);
3134 }
3135 }
3136 }
3137}
3138
3139impl EventEmitter<BufferEvent> for Buffer {}
3140
3141impl Deref for Buffer {
3142 type Target = TextBuffer;
3143
3144 fn deref(&self) -> &Self::Target {
3145 &self.text
3146 }
3147}
3148
3149impl BufferSnapshot {
3150 /// Returns [`IndentSize`] for a given line that respects user settings and
3151 /// language preferences.
3152 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3153 indent_size_for_line(self, row)
3154 }
3155
3156 /// Returns [`IndentSize`] for a given position that respects user settings
3157 /// and language preferences.
3158 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3159 let settings = language_settings(
3160 self.language_at(position).map(|l| l.name()),
3161 self.file(),
3162 cx,
3163 );
3164 if settings.hard_tabs {
3165 IndentSize::tab()
3166 } else {
3167 IndentSize::spaces(settings.tab_size.get())
3168 }
3169 }
3170
3171 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3172 /// is passed in as `single_indent_size`.
3173 pub fn suggested_indents(
3174 &self,
3175 rows: impl Iterator<Item = u32>,
3176 single_indent_size: IndentSize,
3177 ) -> BTreeMap<u32, IndentSize> {
3178 let mut result = BTreeMap::new();
3179
3180 for row_range in contiguous_ranges(rows, 10) {
3181 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3182 Some(suggestions) => suggestions,
3183 _ => break,
3184 };
3185
3186 for (row, suggestion) in row_range.zip(suggestions) {
3187 let indent_size = if let Some(suggestion) = suggestion {
3188 result
3189 .get(&suggestion.basis_row)
3190 .copied()
3191 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3192 .with_delta(suggestion.delta, single_indent_size)
3193 } else {
3194 self.indent_size_for_line(row)
3195 };
3196
3197 result.insert(row, indent_size);
3198 }
3199 }
3200
3201 result
3202 }
3203
3204 fn suggest_autoindents(
3205 &self,
3206 row_range: Range<u32>,
3207 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3208 let config = &self.language.as_ref()?.config;
3209 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3210
3211 #[derive(Debug, Clone)]
3212 struct StartPosition {
3213 start: Point,
3214 suffix: SharedString,
3215 }
3216
3217 // Find the suggested indentation ranges based on the syntax tree.
3218 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3219 let end = Point::new(row_range.end, 0);
3220 let range = (start..end).to_offset(&self.text);
3221 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3222 Some(&grammar.indents_config.as_ref()?.query)
3223 });
3224 let indent_configs = matches
3225 .grammars()
3226 .iter()
3227 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3228 .collect::<Vec<_>>();
3229
3230 let mut indent_ranges = Vec::<Range<Point>>::new();
3231 let mut start_positions = Vec::<StartPosition>::new();
3232 let mut outdent_positions = Vec::<Point>::new();
3233 while let Some(mat) = matches.peek() {
3234 let mut start: Option<Point> = None;
3235 let mut end: Option<Point> = None;
3236
3237 let config = indent_configs[mat.grammar_index];
3238 for capture in mat.captures {
3239 if capture.index == config.indent_capture_ix {
3240 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3241 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3242 } else if Some(capture.index) == config.start_capture_ix {
3243 start = Some(Point::from_ts_point(capture.node.end_position()));
3244 } else if Some(capture.index) == config.end_capture_ix {
3245 end = Some(Point::from_ts_point(capture.node.start_position()));
3246 } else if Some(capture.index) == config.outdent_capture_ix {
3247 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3248 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3249 start_positions.push(StartPosition {
3250 start: Point::from_ts_point(capture.node.start_position()),
3251 suffix: suffix.clone(),
3252 });
3253 }
3254 }
3255
3256 matches.advance();
3257 if let Some((start, end)) = start.zip(end) {
3258 if start.row == end.row {
3259 continue;
3260 }
3261 let range = start..end;
3262 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3263 Err(ix) => indent_ranges.insert(ix, range),
3264 Ok(ix) => {
3265 let prev_range = &mut indent_ranges[ix];
3266 prev_range.end = prev_range.end.max(range.end);
3267 }
3268 }
3269 }
3270 }
3271
3272 let mut error_ranges = Vec::<Range<Point>>::new();
3273 let mut matches = self
3274 .syntax
3275 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3276 while let Some(mat) = matches.peek() {
3277 let node = mat.captures[0].node;
3278 let start = Point::from_ts_point(node.start_position());
3279 let end = Point::from_ts_point(node.end_position());
3280 let range = start..end;
3281 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3282 Ok(ix) | Err(ix) => ix,
3283 };
3284 let mut end_ix = ix;
3285 while let Some(existing_range) = error_ranges.get(end_ix) {
3286 if existing_range.end < end {
3287 end_ix += 1;
3288 } else {
3289 break;
3290 }
3291 }
3292 error_ranges.splice(ix..end_ix, [range]);
3293 matches.advance();
3294 }
3295
3296 outdent_positions.sort();
3297 for outdent_position in outdent_positions {
3298 // find the innermost indent range containing this outdent_position
3299 // set its end to the outdent position
3300 if let Some(range_to_truncate) = indent_ranges
3301 .iter_mut()
3302 .filter(|indent_range| indent_range.contains(&outdent_position))
3303 .next_back()
3304 {
3305 range_to_truncate.end = outdent_position;
3306 }
3307 }
3308
3309 start_positions.sort_by_key(|b| b.start);
3310
3311 // Find the suggested indentation increases and decreased based on regexes.
3312 let mut regex_outdent_map = HashMap::default();
3313 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3314 let mut start_positions_iter = start_positions.iter().peekable();
3315
3316 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3317 self.for_each_line(
3318 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3319 ..Point::new(row_range.end, 0),
3320 |row, line| {
3321 if config
3322 .decrease_indent_pattern
3323 .as_ref()
3324 .is_some_and(|regex| regex.is_match(line))
3325 {
3326 indent_change_rows.push((row, Ordering::Less));
3327 }
3328 if config
3329 .increase_indent_pattern
3330 .as_ref()
3331 .is_some_and(|regex| regex.is_match(line))
3332 {
3333 indent_change_rows.push((row + 1, Ordering::Greater));
3334 }
3335 while let Some(pos) = start_positions_iter.peek() {
3336 if pos.start.row < row {
3337 let pos = start_positions_iter.next().unwrap();
3338 last_seen_suffix
3339 .entry(pos.suffix.to_string())
3340 .or_default()
3341 .push(pos.start);
3342 } else {
3343 break;
3344 }
3345 }
3346 for rule in &config.decrease_indent_patterns {
3347 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3348 let row_start_column = self.indent_size_for_line(row).len;
3349 let basis_row = rule
3350 .valid_after
3351 .iter()
3352 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3353 .flatten()
3354 .filter(|start_point| start_point.column <= row_start_column)
3355 .max_by_key(|start_point| start_point.row);
3356 if let Some(outdent_to_row) = basis_row {
3357 regex_outdent_map.insert(row, outdent_to_row.row);
3358 }
3359 break;
3360 }
3361 }
3362 },
3363 );
3364
3365 let mut indent_changes = indent_change_rows.into_iter().peekable();
3366 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3367 prev_non_blank_row.unwrap_or(0)
3368 } else {
3369 row_range.start.saturating_sub(1)
3370 };
3371
3372 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3373 Some(row_range.map(move |row| {
3374 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3375
3376 let mut indent_from_prev_row = false;
3377 let mut outdent_from_prev_row = false;
3378 let mut outdent_to_row = u32::MAX;
3379 let mut from_regex = false;
3380
3381 while let Some((indent_row, delta)) = indent_changes.peek() {
3382 match indent_row.cmp(&row) {
3383 Ordering::Equal => match delta {
3384 Ordering::Less => {
3385 from_regex = true;
3386 outdent_from_prev_row = true
3387 }
3388 Ordering::Greater => {
3389 indent_from_prev_row = true;
3390 from_regex = true
3391 }
3392 _ => {}
3393 },
3394
3395 Ordering::Greater => break,
3396 Ordering::Less => {}
3397 }
3398
3399 indent_changes.next();
3400 }
3401
3402 for range in &indent_ranges {
3403 if range.start.row >= row {
3404 break;
3405 }
3406 if range.start.row == prev_row && range.end > row_start {
3407 indent_from_prev_row = true;
3408 }
3409 if range.end > prev_row_start && range.end <= row_start {
3410 outdent_to_row = outdent_to_row.min(range.start.row);
3411 }
3412 }
3413
3414 if let Some(basis_row) = regex_outdent_map.get(&row) {
3415 indent_from_prev_row = false;
3416 outdent_to_row = *basis_row;
3417 from_regex = true;
3418 }
3419
3420 let within_error = error_ranges
3421 .iter()
3422 .any(|e| e.start.row < row && e.end > row_start);
3423
3424 let suggestion = if outdent_to_row == prev_row
3425 || (outdent_from_prev_row && indent_from_prev_row)
3426 {
3427 Some(IndentSuggestion {
3428 basis_row: prev_row,
3429 delta: Ordering::Equal,
3430 within_error: within_error && !from_regex,
3431 })
3432 } else if indent_from_prev_row {
3433 Some(IndentSuggestion {
3434 basis_row: prev_row,
3435 delta: Ordering::Greater,
3436 within_error: within_error && !from_regex,
3437 })
3438 } else if outdent_to_row < prev_row {
3439 Some(IndentSuggestion {
3440 basis_row: outdent_to_row,
3441 delta: Ordering::Equal,
3442 within_error: within_error && !from_regex,
3443 })
3444 } else if outdent_from_prev_row {
3445 Some(IndentSuggestion {
3446 basis_row: prev_row,
3447 delta: Ordering::Less,
3448 within_error: within_error && !from_regex,
3449 })
3450 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3451 {
3452 Some(IndentSuggestion {
3453 basis_row: prev_row,
3454 delta: Ordering::Equal,
3455 within_error: within_error && !from_regex,
3456 })
3457 } else {
3458 None
3459 };
3460
3461 prev_row = row;
3462 prev_row_start = row_start;
3463 suggestion
3464 }))
3465 }
3466
3467 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3468 while row > 0 {
3469 row -= 1;
3470 if !self.is_line_blank(row) {
3471 return Some(row);
3472 }
3473 }
3474 None
3475 }
3476
3477 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3478 let captures = self.syntax.captures(range, &self.text, |grammar| {
3479 grammar
3480 .highlights_config
3481 .as_ref()
3482 .map(|config| &config.query)
3483 });
3484 let highlight_maps = captures
3485 .grammars()
3486 .iter()
3487 .map(|grammar| grammar.highlight_map())
3488 .collect();
3489 (captures, highlight_maps)
3490 }
3491
3492 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3493 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3494 /// returned in chunks where each chunk has a single syntax highlighting style and
3495 /// diagnostic status.
3496 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3497 let range = range.start.to_offset(self)..range.end.to_offset(self);
3498
3499 let mut syntax = None;
3500 if language_aware {
3501 syntax = Some(self.get_highlights(range.clone()));
3502 }
3503 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3504 let diagnostics = language_aware;
3505 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3506 }
3507
3508 pub fn highlighted_text_for_range<T: ToOffset>(
3509 &self,
3510 range: Range<T>,
3511 override_style: Option<HighlightStyle>,
3512 syntax_theme: &SyntaxTheme,
3513 ) -> HighlightedText {
3514 HighlightedText::from_buffer_range(
3515 range,
3516 &self.text,
3517 &self.syntax,
3518 override_style,
3519 syntax_theme,
3520 )
3521 }
3522
3523 /// Invokes the given callback for each line of text in the given range of the buffer.
3524 /// Uses callback to avoid allocating a string for each line.
3525 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3526 let mut line = String::new();
3527 let mut row = range.start.row;
3528 for chunk in self
3529 .as_rope()
3530 .chunks_in_range(range.to_offset(self))
3531 .chain(["\n"])
3532 {
3533 for (newline_ix, text) in chunk.split('\n').enumerate() {
3534 if newline_ix > 0 {
3535 callback(row, &line);
3536 row += 1;
3537 line.clear();
3538 }
3539 line.push_str(text);
3540 }
3541 }
3542 }
3543
3544 /// Iterates over every [`SyntaxLayer`] in the buffer.
3545 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3546 self.syntax_layers_for_range(0..self.len(), true)
3547 }
3548
3549 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3550 let offset = position.to_offset(self);
3551 self.syntax_layers_for_range(offset..offset, false)
3552 .filter(|l| {
3553 if let Some(ranges) = l.included_sub_ranges {
3554 ranges.iter().any(|range| {
3555 let start = range.start.to_offset(self);
3556 start <= offset && {
3557 let end = range.end.to_offset(self);
3558 offset < end
3559 }
3560 })
3561 } else {
3562 l.node().start_byte() <= offset && l.node().end_byte() > offset
3563 }
3564 })
3565 .last()
3566 }
3567
3568 pub fn syntax_layers_for_range<D: ToOffset>(
3569 &self,
3570 range: Range<D>,
3571 include_hidden: bool,
3572 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3573 self.syntax
3574 .layers_for_range(range, &self.text, include_hidden)
3575 }
3576
3577 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3578 &self,
3579 range: Range<D>,
3580 ) -> Option<SyntaxLayer<'_>> {
3581 let range = range.to_offset(self);
3582 self.syntax
3583 .layers_for_range(range, &self.text, false)
3584 .max_by(|a, b| {
3585 if a.depth != b.depth {
3586 a.depth.cmp(&b.depth)
3587 } else if a.offset.0 != b.offset.0 {
3588 a.offset.0.cmp(&b.offset.0)
3589 } else {
3590 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3591 }
3592 })
3593 }
3594
3595 /// Returns the main [`Language`].
3596 pub fn language(&self) -> Option<&Arc<Language>> {
3597 self.language.as_ref()
3598 }
3599
3600 /// Returns the [`Language`] at the given location.
3601 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3602 self.syntax_layer_at(position)
3603 .map(|info| info.language)
3604 .or(self.language.as_ref())
3605 }
3606
3607 /// Returns the settings for the language at the given location.
3608 pub fn settings_at<'a, D: ToOffset>(
3609 &'a self,
3610 position: D,
3611 cx: &'a App,
3612 ) -> Cow<'a, LanguageSettings> {
3613 language_settings(
3614 self.language_at(position).map(|l| l.name()),
3615 self.file.as_ref(),
3616 cx,
3617 )
3618 }
3619
3620 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3621 CharClassifier::new(self.language_scope_at(point))
3622 }
3623
3624 /// Returns the [`LanguageScope`] at the given location.
3625 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3626 let offset = position.to_offset(self);
3627 let mut scope = None;
3628 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3629
3630 // Use the layer that has the smallest node intersecting the given point.
3631 for layer in self
3632 .syntax
3633 .layers_for_range(offset..offset, &self.text, false)
3634 {
3635 let mut cursor = layer.node().walk();
3636
3637 let mut range = None;
3638 loop {
3639 let child_range = cursor.node().byte_range();
3640 if !child_range.contains(&offset) {
3641 break;
3642 }
3643
3644 range = Some(child_range);
3645 if cursor.goto_first_child_for_byte(offset).is_none() {
3646 break;
3647 }
3648 }
3649
3650 if let Some(range) = range
3651 && smallest_range_and_depth.as_ref().is_none_or(
3652 |(smallest_range, smallest_range_depth)| {
3653 if layer.depth > *smallest_range_depth {
3654 true
3655 } else if layer.depth == *smallest_range_depth {
3656 range.len() < smallest_range.len()
3657 } else {
3658 false
3659 }
3660 },
3661 )
3662 {
3663 smallest_range_and_depth = Some((range, layer.depth));
3664 scope = Some(LanguageScope {
3665 language: layer.language.clone(),
3666 override_id: layer.override_id(offset, &self.text),
3667 });
3668 }
3669 }
3670
3671 scope.or_else(|| {
3672 self.language.clone().map(|language| LanguageScope {
3673 language,
3674 override_id: None,
3675 })
3676 })
3677 }
3678
3679 /// Returns a tuple of the range and character kind of the word
3680 /// surrounding the given position.
3681 pub fn surrounding_word<T: ToOffset>(
3682 &self,
3683 start: T,
3684 scope_context: Option<CharScopeContext>,
3685 ) -> (Range<usize>, Option<CharKind>) {
3686 let mut start = start.to_offset(self);
3687 let mut end = start;
3688 let mut next_chars = self.chars_at(start).take(128).peekable();
3689 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3690
3691 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3692 let word_kind = cmp::max(
3693 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3694 next_chars.peek().copied().map(|c| classifier.kind(c)),
3695 );
3696
3697 for ch in prev_chars {
3698 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3699 start -= ch.len_utf8();
3700 } else {
3701 break;
3702 }
3703 }
3704
3705 for ch in next_chars {
3706 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3707 end += ch.len_utf8();
3708 } else {
3709 break;
3710 }
3711 }
3712
3713 (start..end, word_kind)
3714 }
3715
3716 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3717 /// range. When `require_larger` is true, the node found must be larger than the query range.
3718 ///
3719 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3720 /// be moved to the root of the tree.
3721 fn goto_node_enclosing_range(
3722 cursor: &mut tree_sitter::TreeCursor,
3723 query_range: &Range<usize>,
3724 require_larger: bool,
3725 ) -> bool {
3726 let mut ascending = false;
3727 loop {
3728 let mut range = cursor.node().byte_range();
3729 if query_range.is_empty() {
3730 // When the query range is empty and the current node starts after it, move to the
3731 // previous sibling to find the node the containing node.
3732 if range.start > query_range.start {
3733 cursor.goto_previous_sibling();
3734 range = cursor.node().byte_range();
3735 }
3736 } else {
3737 // When the query range is non-empty and the current node ends exactly at the start,
3738 // move to the next sibling to find a node that extends beyond the start.
3739 if range.end == query_range.start {
3740 cursor.goto_next_sibling();
3741 range = cursor.node().byte_range();
3742 }
3743 }
3744
3745 let encloses = range.contains_inclusive(query_range)
3746 && (!require_larger || range.len() > query_range.len());
3747 if !encloses {
3748 ascending = true;
3749 if !cursor.goto_parent() {
3750 return false;
3751 }
3752 continue;
3753 } else if ascending {
3754 return true;
3755 }
3756
3757 // Descend into the current node.
3758 if cursor
3759 .goto_first_child_for_byte(query_range.start)
3760 .is_none()
3761 {
3762 return true;
3763 }
3764 }
3765 }
3766
3767 pub fn syntax_ancestor<'a, T: ToOffset>(
3768 &'a self,
3769 range: Range<T>,
3770 ) -> Option<tree_sitter::Node<'a>> {
3771 let range = range.start.to_offset(self)..range.end.to_offset(self);
3772 let mut result: Option<tree_sitter::Node<'a>> = None;
3773 for layer in self
3774 .syntax
3775 .layers_for_range(range.clone(), &self.text, true)
3776 {
3777 let mut cursor = layer.node().walk();
3778
3779 // Find the node that both contains the range and is larger than it.
3780 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3781 continue;
3782 }
3783
3784 let left_node = cursor.node();
3785 let mut layer_result = left_node;
3786
3787 // For an empty range, try to find another node immediately to the right of the range.
3788 if left_node.end_byte() == range.start {
3789 let mut right_node = None;
3790 while !cursor.goto_next_sibling() {
3791 if !cursor.goto_parent() {
3792 break;
3793 }
3794 }
3795
3796 while cursor.node().start_byte() == range.start {
3797 right_node = Some(cursor.node());
3798 if !cursor.goto_first_child() {
3799 break;
3800 }
3801 }
3802
3803 // If there is a candidate node on both sides of the (empty) range, then
3804 // decide between the two by favoring a named node over an anonymous token.
3805 // If both nodes are the same in that regard, favor the right one.
3806 if let Some(right_node) = right_node
3807 && (right_node.is_named() || !left_node.is_named())
3808 {
3809 layer_result = right_node;
3810 }
3811 }
3812
3813 if let Some(previous_result) = &result
3814 && previous_result.byte_range().len() < layer_result.byte_range().len()
3815 {
3816 continue;
3817 }
3818 result = Some(layer_result);
3819 }
3820
3821 result
3822 }
3823
3824 /// Find the previous sibling syntax node at the given range.
3825 ///
3826 /// This function locates the syntax node that precedes the node containing
3827 /// the given range. It searches hierarchically by:
3828 /// 1. Finding the node that contains the given range
3829 /// 2. Looking for the previous sibling at the same tree level
3830 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3831 ///
3832 /// Returns `None` if there is no previous sibling at any ancestor level.
3833 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3834 &'a self,
3835 range: Range<T>,
3836 ) -> Option<tree_sitter::Node<'a>> {
3837 let range = range.start.to_offset(self)..range.end.to_offset(self);
3838 let mut result: Option<tree_sitter::Node<'a>> = None;
3839
3840 for layer in self
3841 .syntax
3842 .layers_for_range(range.clone(), &self.text, true)
3843 {
3844 let mut cursor = layer.node().walk();
3845
3846 // Find the node that contains the range
3847 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3848 continue;
3849 }
3850
3851 // Look for the previous sibling, moving up ancestor levels if needed
3852 loop {
3853 if cursor.goto_previous_sibling() {
3854 let layer_result = cursor.node();
3855
3856 if let Some(previous_result) = &result {
3857 if previous_result.byte_range().end < layer_result.byte_range().end {
3858 continue;
3859 }
3860 }
3861 result = Some(layer_result);
3862 break;
3863 }
3864
3865 // No sibling found at this level, try moving up to parent
3866 if !cursor.goto_parent() {
3867 break;
3868 }
3869 }
3870 }
3871
3872 result
3873 }
3874
3875 /// Find the next sibling syntax node at the given range.
3876 ///
3877 /// This function locates the syntax node that follows the node containing
3878 /// the given range. It searches hierarchically by:
3879 /// 1. Finding the node that contains the given range
3880 /// 2. Looking for the next sibling at the same tree level
3881 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3882 ///
3883 /// Returns `None` if there is no next sibling at any ancestor level.
3884 pub fn syntax_next_sibling<'a, T: ToOffset>(
3885 &'a self,
3886 range: Range<T>,
3887 ) -> Option<tree_sitter::Node<'a>> {
3888 let range = range.start.to_offset(self)..range.end.to_offset(self);
3889 let mut result: Option<tree_sitter::Node<'a>> = None;
3890
3891 for layer in self
3892 .syntax
3893 .layers_for_range(range.clone(), &self.text, true)
3894 {
3895 let mut cursor = layer.node().walk();
3896
3897 // Find the node that contains the range
3898 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3899 continue;
3900 }
3901
3902 // Look for the next sibling, moving up ancestor levels if needed
3903 loop {
3904 if cursor.goto_next_sibling() {
3905 let layer_result = cursor.node();
3906
3907 if let Some(previous_result) = &result {
3908 if previous_result.byte_range().start > layer_result.byte_range().start {
3909 continue;
3910 }
3911 }
3912 result = Some(layer_result);
3913 break;
3914 }
3915
3916 // No sibling found at this level, try moving up to parent
3917 if !cursor.goto_parent() {
3918 break;
3919 }
3920 }
3921 }
3922
3923 result
3924 }
3925
3926 /// Returns the root syntax node within the given row
3927 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3928 let start_offset = position.to_offset(self);
3929
3930 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3931
3932 let layer = self
3933 .syntax
3934 .layers_for_range(start_offset..start_offset, &self.text, true)
3935 .next()?;
3936
3937 let mut cursor = layer.node().walk();
3938
3939 // Descend to the first leaf that touches the start of the range.
3940 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3941 if cursor.node().end_byte() == start_offset {
3942 cursor.goto_next_sibling();
3943 }
3944 }
3945
3946 // Ascend to the root node within the same row.
3947 while cursor.goto_parent() {
3948 if cursor.node().start_position().row != row {
3949 break;
3950 }
3951 }
3952
3953 Some(cursor.node())
3954 }
3955
3956 /// Returns the outline for the buffer.
3957 ///
3958 /// This method allows passing an optional [`SyntaxTheme`] to
3959 /// syntax-highlight the returned symbols.
3960 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3961 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3962 }
3963
3964 /// Returns all the symbols that contain the given position.
3965 ///
3966 /// This method allows passing an optional [`SyntaxTheme`] to
3967 /// syntax-highlight the returned symbols.
3968 pub fn symbols_containing<T: ToOffset>(
3969 &self,
3970 position: T,
3971 theme: Option<&SyntaxTheme>,
3972 ) -> Vec<OutlineItem<Anchor>> {
3973 let position = position.to_offset(self);
3974 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3975 let end = self.clip_offset(position + 1, Bias::Right);
3976 let mut items = self.outline_items_containing(start..end, false, theme);
3977 let mut prev_depth = None;
3978 items.retain(|item| {
3979 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3980 prev_depth = Some(item.depth);
3981 result
3982 });
3983 items
3984 }
3985
3986 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3987 let range = range.to_offset(self);
3988 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3989 grammar.outline_config.as_ref().map(|c| &c.query)
3990 });
3991 let configs = matches
3992 .grammars()
3993 .iter()
3994 .map(|g| g.outline_config.as_ref().unwrap())
3995 .collect::<Vec<_>>();
3996
3997 while let Some(mat) = matches.peek() {
3998 let config = &configs[mat.grammar_index];
3999 let containing_item_node = maybe!({
4000 let item_node = mat.captures.iter().find_map(|cap| {
4001 if cap.index == config.item_capture_ix {
4002 Some(cap.node)
4003 } else {
4004 None
4005 }
4006 })?;
4007
4008 let item_byte_range = item_node.byte_range();
4009 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4010 None
4011 } else {
4012 Some(item_node)
4013 }
4014 });
4015
4016 if let Some(item_node) = containing_item_node {
4017 return Some(
4018 Point::from_ts_point(item_node.start_position())
4019 ..Point::from_ts_point(item_node.end_position()),
4020 );
4021 }
4022
4023 matches.advance();
4024 }
4025 None
4026 }
4027
4028 pub fn outline_items_containing<T: ToOffset>(
4029 &self,
4030 range: Range<T>,
4031 include_extra_context: bool,
4032 theme: Option<&SyntaxTheme>,
4033 ) -> Vec<OutlineItem<Anchor>> {
4034 self.outline_items_containing_internal(
4035 range,
4036 include_extra_context,
4037 theme,
4038 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4039 )
4040 }
4041
4042 pub fn outline_items_as_points_containing<T: ToOffset>(
4043 &self,
4044 range: Range<T>,
4045 include_extra_context: bool,
4046 theme: Option<&SyntaxTheme>,
4047 ) -> Vec<OutlineItem<Point>> {
4048 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4049 range
4050 })
4051 }
4052
4053 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4054 &self,
4055 range: Range<T>,
4056 include_extra_context: bool,
4057 theme: Option<&SyntaxTheme>,
4058 ) -> Vec<OutlineItem<usize>> {
4059 self.outline_items_containing_internal(
4060 range,
4061 include_extra_context,
4062 theme,
4063 |buffer, range| range.to_offset(buffer),
4064 )
4065 }
4066
4067 fn outline_items_containing_internal<T: ToOffset, U>(
4068 &self,
4069 range: Range<T>,
4070 include_extra_context: bool,
4071 theme: Option<&SyntaxTheme>,
4072 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4073 ) -> Vec<OutlineItem<U>> {
4074 let range = range.to_offset(self);
4075 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4076 grammar.outline_config.as_ref().map(|c| &c.query)
4077 });
4078
4079 let mut items = Vec::new();
4080 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4081 while let Some(mat) = matches.peek() {
4082 let config = matches.grammars()[mat.grammar_index]
4083 .outline_config
4084 .as_ref()
4085 .unwrap();
4086 if let Some(item) =
4087 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4088 {
4089 items.push(item);
4090 } else if let Some(capture) = mat
4091 .captures
4092 .iter()
4093 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4094 {
4095 let capture_range = capture.node.start_position()..capture.node.end_position();
4096 let mut capture_row_range =
4097 capture_range.start.row as u32..capture_range.end.row as u32;
4098 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4099 {
4100 capture_row_range.end -= 1;
4101 }
4102 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4103 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4104 last_row_range.end = capture_row_range.end;
4105 } else {
4106 annotation_row_ranges.push(capture_row_range);
4107 }
4108 } else {
4109 annotation_row_ranges.push(capture_row_range);
4110 }
4111 }
4112 matches.advance();
4113 }
4114
4115 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4116
4117 // Assign depths based on containment relationships and convert to anchors.
4118 let mut item_ends_stack = Vec::<Point>::new();
4119 let mut anchor_items = Vec::new();
4120 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4121 for item in items {
4122 while let Some(last_end) = item_ends_stack.last().copied() {
4123 if last_end < item.range.end {
4124 item_ends_stack.pop();
4125 } else {
4126 break;
4127 }
4128 }
4129
4130 let mut annotation_row_range = None;
4131 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4132 let row_preceding_item = item.range.start.row.saturating_sub(1);
4133 if next_annotation_row_range.end < row_preceding_item {
4134 annotation_row_ranges.next();
4135 } else {
4136 if next_annotation_row_range.end == row_preceding_item {
4137 annotation_row_range = Some(next_annotation_row_range.clone());
4138 annotation_row_ranges.next();
4139 }
4140 break;
4141 }
4142 }
4143
4144 anchor_items.push(OutlineItem {
4145 depth: item_ends_stack.len(),
4146 range: range_callback(self, item.range.clone()),
4147 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4148 text: item.text,
4149 highlight_ranges: item.highlight_ranges,
4150 name_ranges: item.name_ranges,
4151 body_range: item.body_range.map(|r| range_callback(self, r)),
4152 annotation_range: annotation_row_range.map(|annotation_range| {
4153 let point_range = Point::new(annotation_range.start, 0)
4154 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4155 range_callback(self, point_range)
4156 }),
4157 });
4158 item_ends_stack.push(item.range.end);
4159 }
4160
4161 anchor_items
4162 }
4163
4164 fn next_outline_item(
4165 &self,
4166 config: &OutlineConfig,
4167 mat: &SyntaxMapMatch,
4168 range: &Range<usize>,
4169 include_extra_context: bool,
4170 theme: Option<&SyntaxTheme>,
4171 ) -> Option<OutlineItem<Point>> {
4172 let item_node = mat.captures.iter().find_map(|cap| {
4173 if cap.index == config.item_capture_ix {
4174 Some(cap.node)
4175 } else {
4176 None
4177 }
4178 })?;
4179
4180 let item_byte_range = item_node.byte_range();
4181 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4182 return None;
4183 }
4184 let item_point_range = Point::from_ts_point(item_node.start_position())
4185 ..Point::from_ts_point(item_node.end_position());
4186
4187 let mut open_point = None;
4188 let mut close_point = None;
4189
4190 let mut buffer_ranges = Vec::new();
4191 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4192 let mut range = node.start_byte()..node.end_byte();
4193 let start = node.start_position();
4194 if node.end_position().row > start.row {
4195 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4196 }
4197
4198 if !range.is_empty() {
4199 buffer_ranges.push((range, node_is_name));
4200 }
4201 };
4202
4203 for capture in mat.captures {
4204 if capture.index == config.name_capture_ix {
4205 add_to_buffer_ranges(capture.node, true);
4206 } else if Some(capture.index) == config.context_capture_ix
4207 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4208 {
4209 add_to_buffer_ranges(capture.node, false);
4210 } else {
4211 if Some(capture.index) == config.open_capture_ix {
4212 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4213 } else if Some(capture.index) == config.close_capture_ix {
4214 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4215 }
4216 }
4217 }
4218
4219 if buffer_ranges.is_empty() {
4220 return None;
4221 }
4222 let source_range_for_text =
4223 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4224
4225 let mut text = String::new();
4226 let mut highlight_ranges = Vec::new();
4227 let mut name_ranges = Vec::new();
4228 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4229 let mut last_buffer_range_end = 0;
4230 for (buffer_range, is_name) in buffer_ranges {
4231 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4232 if space_added {
4233 text.push(' ');
4234 }
4235 let before_append_len = text.len();
4236 let mut offset = buffer_range.start;
4237 chunks.seek(buffer_range.clone());
4238 for mut chunk in chunks.by_ref() {
4239 if chunk.text.len() > buffer_range.end - offset {
4240 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4241 offset = buffer_range.end;
4242 } else {
4243 offset += chunk.text.len();
4244 }
4245 let style = chunk
4246 .syntax_highlight_id
4247 .zip(theme)
4248 .and_then(|(highlight, theme)| highlight.style(theme));
4249 if let Some(style) = style {
4250 let start = text.len();
4251 let end = start + chunk.text.len();
4252 highlight_ranges.push((start..end, style));
4253 }
4254 text.push_str(chunk.text);
4255 if offset >= buffer_range.end {
4256 break;
4257 }
4258 }
4259 if is_name {
4260 let after_append_len = text.len();
4261 let start = if space_added && !name_ranges.is_empty() {
4262 before_append_len - 1
4263 } else {
4264 before_append_len
4265 };
4266 name_ranges.push(start..after_append_len);
4267 }
4268 last_buffer_range_end = buffer_range.end;
4269 }
4270
4271 Some(OutlineItem {
4272 depth: 0, // We'll calculate the depth later
4273 range: item_point_range,
4274 source_range_for_text: source_range_for_text.to_point(self),
4275 text,
4276 highlight_ranges,
4277 name_ranges,
4278 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4279 annotation_range: None,
4280 })
4281 }
4282
4283 pub fn function_body_fold_ranges<T: ToOffset>(
4284 &self,
4285 within: Range<T>,
4286 ) -> impl Iterator<Item = Range<usize>> + '_ {
4287 self.text_object_ranges(within, TreeSitterOptions::default())
4288 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4289 }
4290
4291 /// For each grammar in the language, runs the provided
4292 /// [`tree_sitter::Query`] against the given range.
4293 pub fn matches(
4294 &self,
4295 range: Range<usize>,
4296 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4297 ) -> SyntaxMapMatches<'_> {
4298 self.syntax.matches(range, self, query)
4299 }
4300
4301 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4302 /// Hence, may return more bracket pairs than the range contains.
4303 ///
4304 /// Will omit known chunks.
4305 /// The resulting bracket match collections are not ordered.
4306 pub fn fetch_bracket_ranges(
4307 &self,
4308 range: Range<usize>,
4309 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4310 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4311 let mut all_bracket_matches = HashMap::default();
4312
4313 for chunk in self
4314 .tree_sitter_data
4315 .chunks
4316 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4317 {
4318 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4319 continue;
4320 }
4321 let Some(chunk_range) = self.tree_sitter_data.chunks.chunk_range(chunk) else {
4322 continue;
4323 };
4324 let chunk_range = chunk_range.to_offset(&self);
4325
4326 if let Some(cached_brackets) =
4327 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4328 {
4329 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4330 continue;
4331 }
4332
4333 let mut all_brackets = Vec::new();
4334 let mut opens = Vec::new();
4335 let mut color_pairs = Vec::new();
4336
4337 let mut matches = self
4338 .syntax
4339 .matches(chunk_range.clone(), &self.text, |grammar| {
4340 grammar.brackets_config.as_ref().map(|c| &c.query)
4341 });
4342 let configs = matches
4343 .grammars()
4344 .iter()
4345 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4346 .collect::<Vec<_>>();
4347
4348 while let Some(mat) = matches.peek() {
4349 let mut open = None;
4350 let mut close = None;
4351 let syntax_layer_depth = mat.depth;
4352 let config = configs[mat.grammar_index];
4353 let pattern = &config.patterns[mat.pattern_index];
4354 for capture in mat.captures {
4355 if capture.index == config.open_capture_ix {
4356 open = Some(capture.node.byte_range());
4357 } else if capture.index == config.close_capture_ix {
4358 close = Some(capture.node.byte_range());
4359 }
4360 }
4361
4362 matches.advance();
4363
4364 let Some((open_range, close_range)) = open.zip(close) else {
4365 continue;
4366 };
4367
4368 let bracket_range = open_range.start..=close_range.end;
4369 if !bracket_range.overlaps(&chunk_range) {
4370 continue;
4371 }
4372
4373 let index = all_brackets.len();
4374 all_brackets.push(BracketMatch {
4375 open_range: open_range.clone(),
4376 close_range: close_range.clone(),
4377 newline_only: pattern.newline_only,
4378 syntax_layer_depth,
4379 color_index: None,
4380 });
4381
4382 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4383 // bracket will match the entire tag with all text inside.
4384 // For now, avoid highlighting any pair that has more than single char in each bracket.
4385 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4386 let should_color =
4387 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4388 if should_color {
4389 opens.push(open_range.clone());
4390 color_pairs.push((open_range, close_range, index));
4391 }
4392 }
4393
4394 opens.sort_by_key(|r| (r.start, r.end));
4395 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4396 color_pairs.sort_by_key(|(_, close, _)| close.end);
4397
4398 let mut open_stack = Vec::new();
4399 let mut open_index = 0;
4400 for (open, close, index) in color_pairs {
4401 while open_index < opens.len() && opens[open_index].start < close.start {
4402 open_stack.push(opens[open_index].clone());
4403 open_index += 1;
4404 }
4405
4406 if open_stack.last() == Some(&open) {
4407 let depth_index = open_stack.len() - 1;
4408 all_brackets[index].color_index = Some(depth_index);
4409 open_stack.pop();
4410 }
4411 }
4412
4413 all_brackets.sort_by_key(|bracket_match| {
4414 (bracket_match.open_range.start, bracket_match.open_range.end)
4415 });
4416
4417 if let empty_slot @ None =
4418 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4419 {
4420 *empty_slot = Some(all_brackets.clone());
4421 }
4422 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4423 }
4424
4425 all_bracket_matches
4426 }
4427
4428 pub fn all_bracket_ranges(
4429 &self,
4430 range: Range<usize>,
4431 ) -> impl Iterator<Item = BracketMatch<usize>> {
4432 self.fetch_bracket_ranges(range.clone(), None)
4433 .into_values()
4434 .flatten()
4435 .filter(move |bracket_match| {
4436 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4437 bracket_range.overlaps(&range)
4438 })
4439 }
4440
4441 /// Returns bracket range pairs overlapping or adjacent to `range`
4442 pub fn bracket_ranges<T: ToOffset>(
4443 &self,
4444 range: Range<T>,
4445 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4446 // Find bracket pairs that *inclusively* contain the given range.
4447 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4448 self.all_bracket_ranges(range)
4449 .filter(|pair| !pair.newline_only)
4450 }
4451
4452 pub fn debug_variables_query<T: ToOffset>(
4453 &self,
4454 range: Range<T>,
4455 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4456 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4457
4458 let mut matches = self.syntax.matches_with_options(
4459 range.clone(),
4460 &self.text,
4461 TreeSitterOptions::default(),
4462 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4463 );
4464
4465 let configs = matches
4466 .grammars()
4467 .iter()
4468 .map(|grammar| grammar.debug_variables_config.as_ref())
4469 .collect::<Vec<_>>();
4470
4471 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4472
4473 iter::from_fn(move || {
4474 loop {
4475 while let Some(capture) = captures.pop() {
4476 if capture.0.overlaps(&range) {
4477 return Some(capture);
4478 }
4479 }
4480
4481 let mat = matches.peek()?;
4482
4483 let Some(config) = configs[mat.grammar_index].as_ref() else {
4484 matches.advance();
4485 continue;
4486 };
4487
4488 for capture in mat.captures {
4489 let Some(ix) = config
4490 .objects_by_capture_ix
4491 .binary_search_by_key(&capture.index, |e| e.0)
4492 .ok()
4493 else {
4494 continue;
4495 };
4496 let text_object = config.objects_by_capture_ix[ix].1;
4497 let byte_range = capture.node.byte_range();
4498
4499 let mut found = false;
4500 for (range, existing) in captures.iter_mut() {
4501 if existing == &text_object {
4502 range.start = range.start.min(byte_range.start);
4503 range.end = range.end.max(byte_range.end);
4504 found = true;
4505 break;
4506 }
4507 }
4508
4509 if !found {
4510 captures.push((byte_range, text_object));
4511 }
4512 }
4513
4514 matches.advance();
4515 }
4516 })
4517 }
4518
4519 pub fn text_object_ranges<T: ToOffset>(
4520 &self,
4521 range: Range<T>,
4522 options: TreeSitterOptions,
4523 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4524 let range =
4525 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4526
4527 let mut matches =
4528 self.syntax
4529 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4530 grammar.text_object_config.as_ref().map(|c| &c.query)
4531 });
4532
4533 let configs = matches
4534 .grammars()
4535 .iter()
4536 .map(|grammar| grammar.text_object_config.as_ref())
4537 .collect::<Vec<_>>();
4538
4539 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4540
4541 iter::from_fn(move || {
4542 loop {
4543 while let Some(capture) = captures.pop() {
4544 if capture.0.overlaps(&range) {
4545 return Some(capture);
4546 }
4547 }
4548
4549 let mat = matches.peek()?;
4550
4551 let Some(config) = configs[mat.grammar_index].as_ref() else {
4552 matches.advance();
4553 continue;
4554 };
4555
4556 for capture in mat.captures {
4557 let Some(ix) = config
4558 .text_objects_by_capture_ix
4559 .binary_search_by_key(&capture.index, |e| e.0)
4560 .ok()
4561 else {
4562 continue;
4563 };
4564 let text_object = config.text_objects_by_capture_ix[ix].1;
4565 let byte_range = capture.node.byte_range();
4566
4567 let mut found = false;
4568 for (range, existing) in captures.iter_mut() {
4569 if existing == &text_object {
4570 range.start = range.start.min(byte_range.start);
4571 range.end = range.end.max(byte_range.end);
4572 found = true;
4573 break;
4574 }
4575 }
4576
4577 if !found {
4578 captures.push((byte_range, text_object));
4579 }
4580 }
4581
4582 matches.advance();
4583 }
4584 })
4585 }
4586
4587 /// Returns enclosing bracket ranges containing the given range
4588 pub fn enclosing_bracket_ranges<T: ToOffset>(
4589 &self,
4590 range: Range<T>,
4591 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4592 let range = range.start.to_offset(self)..range.end.to_offset(self);
4593
4594 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4595 let max_depth = result
4596 .iter()
4597 .map(|mat| mat.syntax_layer_depth)
4598 .max()
4599 .unwrap_or(0);
4600 result.into_iter().filter(move |pair| {
4601 pair.open_range.start <= range.start
4602 && pair.close_range.end >= range.end
4603 && pair.syntax_layer_depth == max_depth
4604 })
4605 }
4606
4607 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4608 ///
4609 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4610 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4611 &self,
4612 range: Range<T>,
4613 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4614 ) -> Option<(Range<usize>, Range<usize>)> {
4615 let range = range.start.to_offset(self)..range.end.to_offset(self);
4616
4617 // Get the ranges of the innermost pair of brackets.
4618 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4619
4620 for pair in self.enclosing_bracket_ranges(range) {
4621 if let Some(range_filter) = range_filter
4622 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4623 {
4624 continue;
4625 }
4626
4627 let len = pair.close_range.end - pair.open_range.start;
4628
4629 if let Some((existing_open, existing_close)) = &result {
4630 let existing_len = existing_close.end - existing_open.start;
4631 if len > existing_len {
4632 continue;
4633 }
4634 }
4635
4636 result = Some((pair.open_range, pair.close_range));
4637 }
4638
4639 result
4640 }
4641
4642 /// Returns anchor ranges for any matches of the redaction query.
4643 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4644 /// will be run on the relevant section of the buffer.
4645 pub fn redacted_ranges<T: ToOffset>(
4646 &self,
4647 range: Range<T>,
4648 ) -> impl Iterator<Item = Range<usize>> + '_ {
4649 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4650 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4651 grammar
4652 .redactions_config
4653 .as_ref()
4654 .map(|config| &config.query)
4655 });
4656
4657 let configs = syntax_matches
4658 .grammars()
4659 .iter()
4660 .map(|grammar| grammar.redactions_config.as_ref())
4661 .collect::<Vec<_>>();
4662
4663 iter::from_fn(move || {
4664 let redacted_range = syntax_matches
4665 .peek()
4666 .and_then(|mat| {
4667 configs[mat.grammar_index].and_then(|config| {
4668 mat.captures
4669 .iter()
4670 .find(|capture| capture.index == config.redaction_capture_ix)
4671 })
4672 })
4673 .map(|mat| mat.node.byte_range());
4674 syntax_matches.advance();
4675 redacted_range
4676 })
4677 }
4678
4679 pub fn injections_intersecting_range<T: ToOffset>(
4680 &self,
4681 range: Range<T>,
4682 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4683 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4684
4685 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4686 grammar
4687 .injection_config
4688 .as_ref()
4689 .map(|config| &config.query)
4690 });
4691
4692 let configs = syntax_matches
4693 .grammars()
4694 .iter()
4695 .map(|grammar| grammar.injection_config.as_ref())
4696 .collect::<Vec<_>>();
4697
4698 iter::from_fn(move || {
4699 let ranges = syntax_matches.peek().and_then(|mat| {
4700 let config = &configs[mat.grammar_index]?;
4701 let content_capture_range = mat.captures.iter().find_map(|capture| {
4702 if capture.index == config.content_capture_ix {
4703 Some(capture.node.byte_range())
4704 } else {
4705 None
4706 }
4707 })?;
4708 let language = self.language_at(content_capture_range.start)?;
4709 Some((content_capture_range, language))
4710 });
4711 syntax_matches.advance();
4712 ranges
4713 })
4714 }
4715
4716 pub fn runnable_ranges(
4717 &self,
4718 offset_range: Range<usize>,
4719 ) -> impl Iterator<Item = RunnableRange> + '_ {
4720 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4721 grammar.runnable_config.as_ref().map(|config| &config.query)
4722 });
4723
4724 let test_configs = syntax_matches
4725 .grammars()
4726 .iter()
4727 .map(|grammar| grammar.runnable_config.as_ref())
4728 .collect::<Vec<_>>();
4729
4730 iter::from_fn(move || {
4731 loop {
4732 let mat = syntax_matches.peek()?;
4733
4734 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4735 let mut run_range = None;
4736 let full_range = mat.captures.iter().fold(
4737 Range {
4738 start: usize::MAX,
4739 end: 0,
4740 },
4741 |mut acc, next| {
4742 let byte_range = next.node.byte_range();
4743 if acc.start > byte_range.start {
4744 acc.start = byte_range.start;
4745 }
4746 if acc.end < byte_range.end {
4747 acc.end = byte_range.end;
4748 }
4749 acc
4750 },
4751 );
4752 if full_range.start > full_range.end {
4753 // We did not find a full spanning range of this match.
4754 return None;
4755 }
4756 let extra_captures: SmallVec<[_; 1]> =
4757 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4758 test_configs
4759 .extra_captures
4760 .get(capture.index as usize)
4761 .cloned()
4762 .and_then(|tag_name| match tag_name {
4763 RunnableCapture::Named(name) => {
4764 Some((capture.node.byte_range(), name))
4765 }
4766 RunnableCapture::Run => {
4767 let _ = run_range.insert(capture.node.byte_range());
4768 None
4769 }
4770 })
4771 }));
4772 let run_range = run_range?;
4773 let tags = test_configs
4774 .query
4775 .property_settings(mat.pattern_index)
4776 .iter()
4777 .filter_map(|property| {
4778 if *property.key == *"tag" {
4779 property
4780 .value
4781 .as_ref()
4782 .map(|value| RunnableTag(value.to_string().into()))
4783 } else {
4784 None
4785 }
4786 })
4787 .collect();
4788 let extra_captures = extra_captures
4789 .into_iter()
4790 .map(|(range, name)| {
4791 (
4792 name.to_string(),
4793 self.text_for_range(range).collect::<String>(),
4794 )
4795 })
4796 .collect();
4797 // All tags should have the same range.
4798 Some(RunnableRange {
4799 run_range,
4800 full_range,
4801 runnable: Runnable {
4802 tags,
4803 language: mat.language,
4804 buffer: self.remote_id(),
4805 },
4806 extra_captures,
4807 buffer_id: self.remote_id(),
4808 })
4809 });
4810
4811 syntax_matches.advance();
4812 if test_range.is_some() {
4813 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4814 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4815 return test_range;
4816 }
4817 }
4818 })
4819 }
4820
4821 /// Returns selections for remote peers intersecting the given range.
4822 #[allow(clippy::type_complexity)]
4823 pub fn selections_in_range(
4824 &self,
4825 range: Range<Anchor>,
4826 include_local: bool,
4827 ) -> impl Iterator<
4828 Item = (
4829 ReplicaId,
4830 bool,
4831 CursorShape,
4832 impl Iterator<Item = &Selection<Anchor>> + '_,
4833 ),
4834 > + '_ {
4835 self.remote_selections
4836 .iter()
4837 .filter(move |(replica_id, set)| {
4838 (include_local || **replica_id != self.text.replica_id())
4839 && !set.selections.is_empty()
4840 })
4841 .map(move |(replica_id, set)| {
4842 let start_ix = match set.selections.binary_search_by(|probe| {
4843 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4844 }) {
4845 Ok(ix) | Err(ix) => ix,
4846 };
4847 let end_ix = match set.selections.binary_search_by(|probe| {
4848 probe.start.cmp(&range.end, self).then(Ordering::Less)
4849 }) {
4850 Ok(ix) | Err(ix) => ix,
4851 };
4852
4853 (
4854 *replica_id,
4855 set.line_mode,
4856 set.cursor_shape,
4857 set.selections[start_ix..end_ix].iter(),
4858 )
4859 })
4860 }
4861
4862 /// Returns if the buffer contains any diagnostics.
4863 pub fn has_diagnostics(&self) -> bool {
4864 !self.diagnostics.is_empty()
4865 }
4866
4867 /// Returns all the diagnostics intersecting the given range.
4868 pub fn diagnostics_in_range<'a, T, O>(
4869 &'a self,
4870 search_range: Range<T>,
4871 reversed: bool,
4872 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4873 where
4874 T: 'a + Clone + ToOffset,
4875 O: 'a + FromAnchor,
4876 {
4877 let mut iterators: Vec<_> = self
4878 .diagnostics
4879 .iter()
4880 .map(|(_, collection)| {
4881 collection
4882 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4883 .peekable()
4884 })
4885 .collect();
4886
4887 std::iter::from_fn(move || {
4888 let (next_ix, _) = iterators
4889 .iter_mut()
4890 .enumerate()
4891 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4892 .min_by(|(_, a), (_, b)| {
4893 let cmp = a
4894 .range
4895 .start
4896 .cmp(&b.range.start, self)
4897 // when range is equal, sort by diagnostic severity
4898 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4899 // and stabilize order with group_id
4900 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4901 if reversed { cmp.reverse() } else { cmp }
4902 })?;
4903 iterators[next_ix]
4904 .next()
4905 .map(
4906 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4907 diagnostic,
4908 range: FromAnchor::from_anchor(&range.start, self)
4909 ..FromAnchor::from_anchor(&range.end, self),
4910 },
4911 )
4912 })
4913 }
4914
4915 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4916 /// should be used instead.
4917 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4918 &self.diagnostics
4919 }
4920
4921 /// Returns all the diagnostic groups associated with the given
4922 /// language server ID. If no language server ID is provided,
4923 /// all diagnostics groups are returned.
4924 pub fn diagnostic_groups(
4925 &self,
4926 language_server_id: Option<LanguageServerId>,
4927 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4928 let mut groups = Vec::new();
4929
4930 if let Some(language_server_id) = language_server_id {
4931 if let Ok(ix) = self
4932 .diagnostics
4933 .binary_search_by_key(&language_server_id, |e| e.0)
4934 {
4935 self.diagnostics[ix]
4936 .1
4937 .groups(language_server_id, &mut groups, self);
4938 }
4939 } else {
4940 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4941 diagnostics.groups(*language_server_id, &mut groups, self);
4942 }
4943 }
4944
4945 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4946 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4947 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4948 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4949 });
4950
4951 groups
4952 }
4953
4954 /// Returns an iterator over the diagnostics for the given group.
4955 pub fn diagnostic_group<O>(
4956 &self,
4957 group_id: usize,
4958 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4959 where
4960 O: FromAnchor + 'static,
4961 {
4962 self.diagnostics
4963 .iter()
4964 .flat_map(move |(_, set)| set.group(group_id, self))
4965 }
4966
4967 /// An integer version number that accounts for all updates besides
4968 /// the buffer's text itself (which is versioned via a version vector).
4969 pub fn non_text_state_update_count(&self) -> usize {
4970 self.non_text_state_update_count
4971 }
4972
4973 /// An integer version that changes when the buffer's syntax changes.
4974 pub fn syntax_update_count(&self) -> usize {
4975 self.syntax.update_count()
4976 }
4977
4978 /// Returns a snapshot of underlying file.
4979 pub fn file(&self) -> Option<&Arc<dyn File>> {
4980 self.file.as_ref()
4981 }
4982
4983 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4984 if let Some(file) = self.file() {
4985 if file.path().file_name().is_none() || include_root {
4986 Some(file.full_path(cx).to_string_lossy().into_owned())
4987 } else {
4988 Some(file.path().display(file.path_style(cx)).to_string())
4989 }
4990 } else {
4991 None
4992 }
4993 }
4994
4995 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4996 let query_str = query.fuzzy_contents;
4997 if query_str.is_some_and(|query| query.is_empty()) {
4998 return BTreeMap::default();
4999 }
5000
5001 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5002 language,
5003 override_id: None,
5004 }));
5005
5006 let mut query_ix = 0;
5007 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5008 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5009
5010 let mut words = BTreeMap::default();
5011 let mut current_word_start_ix = None;
5012 let mut chunk_ix = query.range.start;
5013 for chunk in self.chunks(query.range, false) {
5014 for (i, c) in chunk.text.char_indices() {
5015 let ix = chunk_ix + i;
5016 if classifier.is_word(c) {
5017 if current_word_start_ix.is_none() {
5018 current_word_start_ix = Some(ix);
5019 }
5020
5021 if let Some(query_chars) = &query_chars
5022 && query_ix < query_len
5023 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5024 {
5025 query_ix += 1;
5026 }
5027 continue;
5028 } else if let Some(word_start) = current_word_start_ix.take()
5029 && query_ix == query_len
5030 {
5031 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5032 let mut word_text = self.text_for_range(word_start..ix).peekable();
5033 let first_char = word_text
5034 .peek()
5035 .and_then(|first_chunk| first_chunk.chars().next());
5036 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5037 if !query.skip_digits
5038 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5039 {
5040 words.insert(word_text.collect(), word_range);
5041 }
5042 }
5043 query_ix = 0;
5044 }
5045 chunk_ix += chunk.text.len();
5046 }
5047
5048 words
5049 }
5050}
5051
5052pub struct WordsQuery<'a> {
5053 /// Only returns words with all chars from the fuzzy string in them.
5054 pub fuzzy_contents: Option<&'a str>,
5055 /// Skips words that start with a digit.
5056 pub skip_digits: bool,
5057 /// Buffer offset range, to look for words.
5058 pub range: Range<usize>,
5059}
5060
5061fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5062 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5063}
5064
5065fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5066 let mut result = IndentSize::spaces(0);
5067 for c in text {
5068 let kind = match c {
5069 ' ' => IndentKind::Space,
5070 '\t' => IndentKind::Tab,
5071 _ => break,
5072 };
5073 if result.len == 0 {
5074 result.kind = kind;
5075 }
5076 result.len += 1;
5077 }
5078 result
5079}
5080
5081impl Clone for BufferSnapshot {
5082 fn clone(&self) -> Self {
5083 Self {
5084 text: self.text.clone(),
5085 syntax: self.syntax.clone(),
5086 file: self.file.clone(),
5087 remote_selections: self.remote_selections.clone(),
5088 diagnostics: self.diagnostics.clone(),
5089 language: self.language.clone(),
5090 tree_sitter_data: self.tree_sitter_data.clone(),
5091 non_text_state_update_count: self.non_text_state_update_count,
5092 }
5093 }
5094}
5095
5096impl Deref for BufferSnapshot {
5097 type Target = text::BufferSnapshot;
5098
5099 fn deref(&self) -> &Self::Target {
5100 &self.text
5101 }
5102}
5103
5104unsafe impl Send for BufferChunks<'_> {}
5105
5106impl<'a> BufferChunks<'a> {
5107 pub(crate) fn new(
5108 text: &'a Rope,
5109 range: Range<usize>,
5110 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5111 diagnostics: bool,
5112 buffer_snapshot: Option<&'a BufferSnapshot>,
5113 ) -> Self {
5114 let mut highlights = None;
5115 if let Some((captures, highlight_maps)) = syntax {
5116 highlights = Some(BufferChunkHighlights {
5117 captures,
5118 next_capture: None,
5119 stack: Default::default(),
5120 highlight_maps,
5121 })
5122 }
5123
5124 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5125 let chunks = text.chunks_in_range(range.clone());
5126
5127 let mut this = BufferChunks {
5128 range,
5129 buffer_snapshot,
5130 chunks,
5131 diagnostic_endpoints,
5132 error_depth: 0,
5133 warning_depth: 0,
5134 information_depth: 0,
5135 hint_depth: 0,
5136 unnecessary_depth: 0,
5137 underline: true,
5138 highlights,
5139 };
5140 this.initialize_diagnostic_endpoints();
5141 this
5142 }
5143
5144 /// Seeks to the given byte offset in the buffer.
5145 pub fn seek(&mut self, range: Range<usize>) {
5146 let old_range = std::mem::replace(&mut self.range, range.clone());
5147 self.chunks.set_range(self.range.clone());
5148 if let Some(highlights) = self.highlights.as_mut() {
5149 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5150 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5151 highlights
5152 .stack
5153 .retain(|(end_offset, _)| *end_offset > range.start);
5154 if let Some(capture) = &highlights.next_capture
5155 && range.start >= capture.node.start_byte()
5156 {
5157 let next_capture_end = capture.node.end_byte();
5158 if range.start < next_capture_end {
5159 highlights.stack.push((
5160 next_capture_end,
5161 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5162 ));
5163 }
5164 highlights.next_capture.take();
5165 }
5166 } else if let Some(snapshot) = self.buffer_snapshot {
5167 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5168 *highlights = BufferChunkHighlights {
5169 captures,
5170 next_capture: None,
5171 stack: Default::default(),
5172 highlight_maps,
5173 };
5174 } else {
5175 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5176 // Seeking such BufferChunks is not supported.
5177 debug_assert!(
5178 false,
5179 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5180 );
5181 }
5182
5183 highlights.captures.set_byte_range(self.range.clone());
5184 self.initialize_diagnostic_endpoints();
5185 }
5186 }
5187
5188 fn initialize_diagnostic_endpoints(&mut self) {
5189 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5190 && let Some(buffer) = self.buffer_snapshot
5191 {
5192 let mut diagnostic_endpoints = Vec::new();
5193 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5194 diagnostic_endpoints.push(DiagnosticEndpoint {
5195 offset: entry.range.start,
5196 is_start: true,
5197 severity: entry.diagnostic.severity,
5198 is_unnecessary: entry.diagnostic.is_unnecessary,
5199 underline: entry.diagnostic.underline,
5200 });
5201 diagnostic_endpoints.push(DiagnosticEndpoint {
5202 offset: entry.range.end,
5203 is_start: false,
5204 severity: entry.diagnostic.severity,
5205 is_unnecessary: entry.diagnostic.is_unnecessary,
5206 underline: entry.diagnostic.underline,
5207 });
5208 }
5209 diagnostic_endpoints
5210 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5211 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5212 self.hint_depth = 0;
5213 self.error_depth = 0;
5214 self.warning_depth = 0;
5215 self.information_depth = 0;
5216 }
5217 }
5218
5219 /// The current byte offset in the buffer.
5220 pub fn offset(&self) -> usize {
5221 self.range.start
5222 }
5223
5224 pub fn range(&self) -> Range<usize> {
5225 self.range.clone()
5226 }
5227
5228 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5229 let depth = match endpoint.severity {
5230 DiagnosticSeverity::ERROR => &mut self.error_depth,
5231 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5232 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5233 DiagnosticSeverity::HINT => &mut self.hint_depth,
5234 _ => return,
5235 };
5236 if endpoint.is_start {
5237 *depth += 1;
5238 } else {
5239 *depth -= 1;
5240 }
5241
5242 if endpoint.is_unnecessary {
5243 if endpoint.is_start {
5244 self.unnecessary_depth += 1;
5245 } else {
5246 self.unnecessary_depth -= 1;
5247 }
5248 }
5249 }
5250
5251 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5252 if self.error_depth > 0 {
5253 Some(DiagnosticSeverity::ERROR)
5254 } else if self.warning_depth > 0 {
5255 Some(DiagnosticSeverity::WARNING)
5256 } else if self.information_depth > 0 {
5257 Some(DiagnosticSeverity::INFORMATION)
5258 } else if self.hint_depth > 0 {
5259 Some(DiagnosticSeverity::HINT)
5260 } else {
5261 None
5262 }
5263 }
5264
5265 fn current_code_is_unnecessary(&self) -> bool {
5266 self.unnecessary_depth > 0
5267 }
5268}
5269
5270impl<'a> Iterator for BufferChunks<'a> {
5271 type Item = Chunk<'a>;
5272
5273 fn next(&mut self) -> Option<Self::Item> {
5274 let mut next_capture_start = usize::MAX;
5275 let mut next_diagnostic_endpoint = usize::MAX;
5276
5277 if let Some(highlights) = self.highlights.as_mut() {
5278 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5279 if *parent_capture_end <= self.range.start {
5280 highlights.stack.pop();
5281 } else {
5282 break;
5283 }
5284 }
5285
5286 if highlights.next_capture.is_none() {
5287 highlights.next_capture = highlights.captures.next();
5288 }
5289
5290 while let Some(capture) = highlights.next_capture.as_ref() {
5291 if self.range.start < capture.node.start_byte() {
5292 next_capture_start = capture.node.start_byte();
5293 break;
5294 } else {
5295 let highlight_id =
5296 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5297 highlights
5298 .stack
5299 .push((capture.node.end_byte(), highlight_id));
5300 highlights.next_capture = highlights.captures.next();
5301 }
5302 }
5303 }
5304
5305 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5306 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5307 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5308 if endpoint.offset <= self.range.start {
5309 self.update_diagnostic_depths(endpoint);
5310 diagnostic_endpoints.next();
5311 self.underline = endpoint.underline;
5312 } else {
5313 next_diagnostic_endpoint = endpoint.offset;
5314 break;
5315 }
5316 }
5317 }
5318 self.diagnostic_endpoints = diagnostic_endpoints;
5319
5320 if let Some(ChunkBitmaps {
5321 text: chunk,
5322 chars: chars_map,
5323 tabs,
5324 }) = self.chunks.peek_with_bitmaps()
5325 {
5326 let chunk_start = self.range.start;
5327 let mut chunk_end = (self.chunks.offset() + chunk.len())
5328 .min(next_capture_start)
5329 .min(next_diagnostic_endpoint);
5330 let mut highlight_id = None;
5331 if let Some(highlights) = self.highlights.as_ref()
5332 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5333 {
5334 chunk_end = chunk_end.min(*parent_capture_end);
5335 highlight_id = Some(*parent_highlight_id);
5336 }
5337 let bit_start = chunk_start - self.chunks.offset();
5338 let bit_end = chunk_end - self.chunks.offset();
5339
5340 let slice = &chunk[bit_start..bit_end];
5341
5342 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5343 let tabs = (tabs >> bit_start) & mask;
5344 let chars = (chars_map >> bit_start) & mask;
5345
5346 self.range.start = chunk_end;
5347 if self.range.start == self.chunks.offset() + chunk.len() {
5348 self.chunks.next().unwrap();
5349 }
5350
5351 Some(Chunk {
5352 text: slice,
5353 syntax_highlight_id: highlight_id,
5354 underline: self.underline,
5355 diagnostic_severity: self.current_diagnostic_severity(),
5356 is_unnecessary: self.current_code_is_unnecessary(),
5357 tabs,
5358 chars,
5359 ..Chunk::default()
5360 })
5361 } else {
5362 None
5363 }
5364 }
5365}
5366
5367impl operation_queue::Operation for Operation {
5368 fn lamport_timestamp(&self) -> clock::Lamport {
5369 match self {
5370 Operation::Buffer(_) => {
5371 unreachable!("buffer operations should never be deferred at this layer")
5372 }
5373 Operation::UpdateDiagnostics {
5374 lamport_timestamp, ..
5375 }
5376 | Operation::UpdateSelections {
5377 lamport_timestamp, ..
5378 }
5379 | Operation::UpdateCompletionTriggers {
5380 lamport_timestamp, ..
5381 }
5382 | Operation::UpdateLineEnding {
5383 lamport_timestamp, ..
5384 } => *lamport_timestamp,
5385 }
5386 }
5387}
5388
5389impl Default for Diagnostic {
5390 fn default() -> Self {
5391 Self {
5392 source: Default::default(),
5393 source_kind: DiagnosticSourceKind::Other,
5394 code: None,
5395 code_description: None,
5396 severity: DiagnosticSeverity::ERROR,
5397 message: Default::default(),
5398 markdown: None,
5399 group_id: 0,
5400 is_primary: false,
5401 is_disk_based: false,
5402 is_unnecessary: false,
5403 underline: true,
5404 data: None,
5405 }
5406 }
5407}
5408
5409impl IndentSize {
5410 /// Returns an [`IndentSize`] representing the given spaces.
5411 pub fn spaces(len: u32) -> Self {
5412 Self {
5413 len,
5414 kind: IndentKind::Space,
5415 }
5416 }
5417
5418 /// Returns an [`IndentSize`] representing a tab.
5419 pub fn tab() -> Self {
5420 Self {
5421 len: 1,
5422 kind: IndentKind::Tab,
5423 }
5424 }
5425
5426 /// An iterator over the characters represented by this [`IndentSize`].
5427 pub fn chars(&self) -> impl Iterator<Item = char> {
5428 iter::repeat(self.char()).take(self.len as usize)
5429 }
5430
5431 /// The character representation of this [`IndentSize`].
5432 pub fn char(&self) -> char {
5433 match self.kind {
5434 IndentKind::Space => ' ',
5435 IndentKind::Tab => '\t',
5436 }
5437 }
5438
5439 /// Consumes the current [`IndentSize`] and returns a new one that has
5440 /// been shrunk or enlarged by the given size along the given direction.
5441 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5442 match direction {
5443 Ordering::Less => {
5444 if self.kind == size.kind && self.len >= size.len {
5445 self.len -= size.len;
5446 }
5447 }
5448 Ordering::Equal => {}
5449 Ordering::Greater => {
5450 if self.len == 0 {
5451 self = size;
5452 } else if self.kind == size.kind {
5453 self.len += size.len;
5454 }
5455 }
5456 }
5457 self
5458 }
5459
5460 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5461 match self.kind {
5462 IndentKind::Space => self.len as usize,
5463 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5464 }
5465 }
5466}
5467
5468#[cfg(any(test, feature = "test-support"))]
5469pub struct TestFile {
5470 pub path: Arc<RelPath>,
5471 pub root_name: String,
5472 pub local_root: Option<PathBuf>,
5473}
5474
5475#[cfg(any(test, feature = "test-support"))]
5476impl File for TestFile {
5477 fn path(&self) -> &Arc<RelPath> {
5478 &self.path
5479 }
5480
5481 fn full_path(&self, _: &gpui::App) -> PathBuf {
5482 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5483 }
5484
5485 fn as_local(&self) -> Option<&dyn LocalFile> {
5486 if self.local_root.is_some() {
5487 Some(self)
5488 } else {
5489 None
5490 }
5491 }
5492
5493 fn disk_state(&self) -> DiskState {
5494 unimplemented!()
5495 }
5496
5497 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5498 self.path().file_name().unwrap_or(self.root_name.as_ref())
5499 }
5500
5501 fn worktree_id(&self, _: &App) -> WorktreeId {
5502 WorktreeId::from_usize(0)
5503 }
5504
5505 fn to_proto(&self, _: &App) -> rpc::proto::File {
5506 unimplemented!()
5507 }
5508
5509 fn is_private(&self) -> bool {
5510 false
5511 }
5512
5513 fn path_style(&self, _cx: &App) -> PathStyle {
5514 PathStyle::local()
5515 }
5516}
5517
5518#[cfg(any(test, feature = "test-support"))]
5519impl LocalFile for TestFile {
5520 fn abs_path(&self, _cx: &App) -> PathBuf {
5521 PathBuf::from(self.local_root.as_ref().unwrap())
5522 .join(&self.root_name)
5523 .join(self.path.as_std_path())
5524 }
5525
5526 fn load(&self, _cx: &App) -> Task<Result<String>> {
5527 unimplemented!()
5528 }
5529
5530 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5531 unimplemented!()
5532 }
5533}
5534
5535pub(crate) fn contiguous_ranges(
5536 values: impl Iterator<Item = u32>,
5537 max_len: usize,
5538) -> impl Iterator<Item = Range<u32>> {
5539 let mut values = values;
5540 let mut current_range: Option<Range<u32>> = None;
5541 std::iter::from_fn(move || {
5542 loop {
5543 if let Some(value) = values.next() {
5544 if let Some(range) = &mut current_range
5545 && value == range.end
5546 && range.len() < max_len
5547 {
5548 range.end += 1;
5549 continue;
5550 }
5551
5552 let prev_range = current_range.clone();
5553 current_range = Some(value..(value + 1));
5554 if prev_range.is_some() {
5555 return prev_range;
5556 }
5557 } else {
5558 return current_range.take();
5559 }
5560 }
5561 })
5562}
5563
5564#[derive(Default, Debug)]
5565pub struct CharClassifier {
5566 scope: Option<LanguageScope>,
5567 scope_context: Option<CharScopeContext>,
5568 ignore_punctuation: bool,
5569}
5570
5571impl CharClassifier {
5572 pub fn new(scope: Option<LanguageScope>) -> Self {
5573 Self {
5574 scope,
5575 scope_context: None,
5576 ignore_punctuation: false,
5577 }
5578 }
5579
5580 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5581 Self {
5582 scope_context,
5583 ..self
5584 }
5585 }
5586
5587 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5588 Self {
5589 ignore_punctuation,
5590 ..self
5591 }
5592 }
5593
5594 pub fn is_whitespace(&self, c: char) -> bool {
5595 self.kind(c) == CharKind::Whitespace
5596 }
5597
5598 pub fn is_word(&self, c: char) -> bool {
5599 self.kind(c) == CharKind::Word
5600 }
5601
5602 pub fn is_punctuation(&self, c: char) -> bool {
5603 self.kind(c) == CharKind::Punctuation
5604 }
5605
5606 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5607 if c.is_alphanumeric() || c == '_' {
5608 return CharKind::Word;
5609 }
5610
5611 if let Some(scope) = &self.scope {
5612 let characters = match self.scope_context {
5613 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5614 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5615 None => scope.word_characters(),
5616 };
5617 if let Some(characters) = characters
5618 && characters.contains(&c)
5619 {
5620 return CharKind::Word;
5621 }
5622 }
5623
5624 if c.is_whitespace() {
5625 return CharKind::Whitespace;
5626 }
5627
5628 if ignore_punctuation {
5629 CharKind::Word
5630 } else {
5631 CharKind::Punctuation
5632 }
5633 }
5634
5635 pub fn kind(&self, c: char) -> CharKind {
5636 self.kind_with(c, self.ignore_punctuation)
5637 }
5638}
5639
5640/// Find all of the ranges of whitespace that occur at the ends of lines
5641/// in the given rope.
5642///
5643/// This could also be done with a regex search, but this implementation
5644/// avoids copying text.
5645pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5646 let mut ranges = Vec::new();
5647
5648 let mut offset = 0;
5649 let mut prev_chunk_trailing_whitespace_range = 0..0;
5650 for chunk in rope.chunks() {
5651 let mut prev_line_trailing_whitespace_range = 0..0;
5652 for (i, line) in chunk.split('\n').enumerate() {
5653 let line_end_offset = offset + line.len();
5654 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5655 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5656
5657 if i == 0 && trimmed_line_len == 0 {
5658 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5659 }
5660 if !prev_line_trailing_whitespace_range.is_empty() {
5661 ranges.push(prev_line_trailing_whitespace_range);
5662 }
5663
5664 offset = line_end_offset + 1;
5665 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5666 }
5667
5668 offset -= 1;
5669 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5670 }
5671
5672 if !prev_chunk_trailing_whitespace_range.is_empty() {
5673 ranges.push(prev_chunk_trailing_whitespace_range);
5674 }
5675
5676 ranges
5677}