1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// A machine-readable code that identifies this diagnostic.
249 pub code: Option<NumberOrString>,
250 pub code_description: Option<lsp::Uri>,
251 /// Whether this diagnostic is a hint, warning, or error.
252 pub severity: DiagnosticSeverity,
253 /// The human-readable message associated with this diagnostic.
254 pub message: String,
255 /// The human-readable message (in markdown format)
256 pub markdown: Option<String>,
257 /// An id that identifies the group to which this diagnostic belongs.
258 ///
259 /// When a language server produces a diagnostic with
260 /// one or more associated diagnostics, those diagnostics are all
261 /// assigned a single group ID.
262 pub group_id: usize,
263 /// Whether this diagnostic is the primary diagnostic for its group.
264 ///
265 /// In a given group, the primary diagnostic is the top-level diagnostic
266 /// returned by the language server. The non-primary diagnostics are the
267 /// associated diagnostics.
268 pub is_primary: bool,
269 /// Whether this diagnostic is considered to originate from an analysis of
270 /// files on disk, as opposed to any unsaved buffer contents. This is a
271 /// property of a given diagnostic source, and is configured for a given
272 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
273 /// for the language server.
274 pub is_disk_based: bool,
275 /// Whether this diagnostic marks unnecessary code.
276 pub is_unnecessary: bool,
277 /// Quick separation of diagnostics groups based by their source.
278 pub source_kind: DiagnosticSourceKind,
279 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
280 pub data: Option<Value>,
281 /// Whether to underline the corresponding text range in the editor.
282 pub underline: bool,
283}
284
285#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
286pub enum DiagnosticSourceKind {
287 Pulled,
288 Pushed,
289 Other,
290}
291
292/// An operation used to synchronize this buffer with its other replicas.
293#[derive(Clone, Debug, PartialEq)]
294pub enum Operation {
295 /// A text operation.
296 Buffer(text::Operation),
297
298 /// An update to the buffer's diagnostics.
299 UpdateDiagnostics {
300 /// The id of the language server that produced the new diagnostics.
301 server_id: LanguageServerId,
302 /// The diagnostics.
303 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 },
307
308 /// An update to the most recent selections in this buffer.
309 UpdateSelections {
310 /// The selections.
311 selections: Arc<[Selection<Anchor>]>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// Whether the selections are in 'line mode'.
315 line_mode: bool,
316 /// The [`CursorShape`] associated with these selections.
317 cursor_shape: CursorShape,
318 },
319
320 /// An update to the characters that should trigger autocompletion
321 /// for this buffer.
322 UpdateCompletionTriggers {
323 /// The characters that trigger autocompletion.
324 triggers: Vec<String>,
325 /// The buffer's lamport timestamp.
326 lamport_timestamp: clock::Lamport,
327 /// The language server ID.
328 server_id: LanguageServerId,
329 },
330
331 /// An update to the line ending type of this buffer.
332 UpdateLineEnding {
333 /// The line ending type.
334 line_ending: LineEnding,
335 /// The buffer's lamport timestamp.
336 lamport_timestamp: clock::Lamport,
337 },
338}
339
340/// An event that occurs in a buffer.
341#[derive(Clone, Debug, PartialEq)]
342pub enum BufferEvent {
343 /// The buffer was changed in a way that must be
344 /// propagated to its other replicas.
345 Operation {
346 operation: Operation,
347 is_local: bool,
348 },
349 /// The buffer was edited.
350 Edited,
351 /// The buffer's `dirty` bit changed.
352 DirtyChanged,
353 /// The buffer was saved.
354 Saved,
355 /// The buffer's file was changed on disk.
356 FileHandleChanged,
357 /// The buffer was reloaded.
358 Reloaded,
359 /// The buffer is in need of a reload
360 ReloadNeeded,
361 /// The buffer's language was changed.
362 /// The boolean indicates whether this buffer did not have a language before, but does now.
363 LanguageChanged(bool),
364 /// The buffer's syntax trees were updated.
365 Reparsed,
366 /// The buffer's diagnostics were updated.
367 DiagnosticsUpdated,
368 /// The buffer gained or lost editing capabilities.
369 CapabilityChanged,
370}
371
372/// The file associated with a buffer.
373pub trait File: Send + Sync + Any {
374 /// Returns the [`LocalFile`] associated with this file, if the
375 /// file is local.
376 fn as_local(&self) -> Option<&dyn LocalFile>;
377
378 /// Returns whether this file is local.
379 fn is_local(&self) -> bool {
380 self.as_local().is_some()
381 }
382
383 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
384 /// only available in some states, such as modification time.
385 fn disk_state(&self) -> DiskState;
386
387 /// Returns the path of this file relative to the worktree's root directory.
388 fn path(&self) -> &Arc<RelPath>;
389
390 /// Returns the path of this file relative to the worktree's parent directory (this means it
391 /// includes the name of the worktree's root folder).
392 fn full_path(&self, cx: &App) -> PathBuf;
393
394 /// Returns the path style of this file.
395 fn path_style(&self, cx: &App) -> PathStyle;
396
397 /// Returns the last component of this handle's absolute path. If this handle refers to the root
398 /// of its worktree, then this method will return the name of the worktree itself.
399 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
400
401 /// Returns the id of the worktree to which this file belongs.
402 ///
403 /// This is needed for looking up project-specific settings.
404 fn worktree_id(&self, cx: &App) -> WorktreeId;
405
406 /// Converts this file into a protobuf message.
407 fn to_proto(&self, cx: &App) -> rpc::proto::File;
408
409 /// Return whether Zed considers this to be a private file.
410 fn is_private(&self) -> bool;
411}
412
413/// The file's storage status - whether it's stored (`Present`), and if so when it was last
414/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
415/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
416/// indicator for new files.
417#[derive(Copy, Clone, Debug, PartialEq)]
418pub enum DiskState {
419 /// File created in Zed that has not been saved.
420 New,
421 /// File present on the filesystem.
422 Present { mtime: MTime },
423 /// Deleted file that was previously present.
424 Deleted,
425}
426
427impl DiskState {
428 /// Returns the file's last known modification time on disk.
429 pub fn mtime(self) -> Option<MTime> {
430 match self {
431 DiskState::New => None,
432 DiskState::Present { mtime } => Some(mtime),
433 DiskState::Deleted => None,
434 }
435 }
436
437 pub fn exists(&self) -> bool {
438 match self {
439 DiskState::New => false,
440 DiskState::Present { .. } => true,
441 DiskState::Deleted => false,
442 }
443 }
444}
445
446/// The file associated with a buffer, in the case where the file is on the local disk.
447pub trait LocalFile: File {
448 /// Returns the absolute path of this file
449 fn abs_path(&self, cx: &App) -> PathBuf;
450
451 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
452 fn load(&self, cx: &App) -> Task<Result<String>>;
453
454 /// Loads the file's contents from disk.
455 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
456}
457
458/// The auto-indent behavior associated with an editing operation.
459/// For some editing operations, each affected line of text has its
460/// indentation recomputed. For other operations, the entire block
461/// of edited text is adjusted uniformly.
462#[derive(Clone, Debug)]
463pub enum AutoindentMode {
464 /// Indent each line of inserted text.
465 EachLine,
466 /// Apply the same indentation adjustment to all of the lines
467 /// in a given insertion.
468 Block {
469 /// The original indentation column of the first line of each
470 /// insertion, if it has been copied.
471 ///
472 /// Knowing this makes it possible to preserve the relative indentation
473 /// of every line in the insertion from when it was copied.
474 ///
475 /// If the original indent column is `a`, and the first line of insertion
476 /// is then auto-indented to column `b`, then every other line of
477 /// the insertion will be auto-indented to column `b - a`
478 original_indent_columns: Vec<Option<u32>>,
479 },
480}
481
482#[derive(Clone)]
483struct AutoindentRequest {
484 before_edit: BufferSnapshot,
485 entries: Vec<AutoindentRequestEntry>,
486 is_block_mode: bool,
487 ignore_empty_lines: bool,
488}
489
490#[derive(Debug, Clone)]
491struct AutoindentRequestEntry {
492 /// A range of the buffer whose indentation should be adjusted.
493 range: Range<Anchor>,
494 /// Whether or not these lines should be considered brand new, for the
495 /// purpose of auto-indent. When text is not new, its indentation will
496 /// only be adjusted if the suggested indentation level has *changed*
497 /// since the edit was made.
498 first_line_is_new: bool,
499 indent_size: IndentSize,
500 original_indent_column: Option<u32>,
501}
502
503#[derive(Debug)]
504struct IndentSuggestion {
505 basis_row: u32,
506 delta: Ordering,
507 within_error: bool,
508}
509
510struct BufferChunkHighlights<'a> {
511 captures: SyntaxMapCaptures<'a>,
512 next_capture: Option<SyntaxMapCapture<'a>>,
513 stack: Vec<(usize, HighlightId)>,
514 highlight_maps: Vec<HighlightMap>,
515}
516
517/// An iterator that yields chunks of a buffer's text, along with their
518/// syntax highlights and diagnostic status.
519pub struct BufferChunks<'a> {
520 buffer_snapshot: Option<&'a BufferSnapshot>,
521 range: Range<usize>,
522 chunks: text::Chunks<'a>,
523 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
524 error_depth: usize,
525 warning_depth: usize,
526 information_depth: usize,
527 hint_depth: usize,
528 unnecessary_depth: usize,
529 underline: bool,
530 highlights: Option<BufferChunkHighlights<'a>>,
531}
532
533/// A chunk of a buffer's text, along with its syntax highlight and
534/// diagnostic status.
535#[derive(Clone, Debug, Default)]
536pub struct Chunk<'a> {
537 /// The text of the chunk.
538 pub text: &'a str,
539 /// The syntax highlighting style of the chunk.
540 pub syntax_highlight_id: Option<HighlightId>,
541 /// The highlight style that has been applied to this chunk in
542 /// the editor.
543 pub highlight_style: Option<HighlightStyle>,
544 /// The severity of diagnostic associated with this chunk, if any.
545 pub diagnostic_severity: Option<DiagnosticSeverity>,
546 /// A bitset of which characters are tabs in this string.
547 pub tabs: u128,
548 /// Bitmap of character indices in this chunk
549 pub chars: u128,
550 /// Whether this chunk of text is marked as unnecessary.
551 pub is_unnecessary: bool,
552 /// Whether this chunk of text was originally a tab character.
553 pub is_tab: bool,
554 /// Whether this chunk of text was originally an inlay.
555 pub is_inlay: bool,
556 /// Whether to underline the corresponding text range in the editor.
557 pub underline: bool,
558}
559
560/// A set of edits to a given version of a buffer, computed asynchronously.
561#[derive(Debug)]
562pub struct Diff {
563 pub base_version: clock::Global,
564 pub line_ending: LineEnding,
565 pub edits: Vec<(Range<usize>, Arc<str>)>,
566}
567
568#[derive(Debug, Clone, Copy)]
569pub(crate) struct DiagnosticEndpoint {
570 offset: usize,
571 is_start: bool,
572 underline: bool,
573 severity: DiagnosticSeverity,
574 is_unnecessary: bool,
575}
576
577/// A class of characters, used for characterizing a run of text.
578#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
579pub enum CharKind {
580 /// Whitespace.
581 Whitespace,
582 /// Punctuation.
583 Punctuation,
584 /// Word.
585 Word,
586}
587
588/// Context for character classification within a specific scope.
589#[derive(Copy, Clone, Eq, PartialEq, Debug)]
590pub enum CharScopeContext {
591 /// Character classification for completion queries.
592 ///
593 /// This context treats certain characters as word constituents that would
594 /// normally be considered punctuation, such as '-' in Tailwind classes
595 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
596 Completion,
597 /// Character classification for linked edits.
598 ///
599 /// This context handles characters that should be treated as part of
600 /// identifiers during linked editing operations, such as '.' in JSX
601 /// component names like `<Animated.View>`.
602 LinkedEdit,
603}
604
605/// A runnable is a set of data about a region that could be resolved into a task
606pub struct Runnable {
607 pub tags: SmallVec<[RunnableTag; 1]>,
608 pub language: Arc<Language>,
609 pub buffer: BufferId,
610}
611
612#[derive(Default, Clone, Debug)]
613pub struct HighlightedText {
614 pub text: SharedString,
615 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
616}
617
618#[derive(Default, Debug)]
619struct HighlightedTextBuilder {
620 pub text: String,
621 highlights: Vec<(Range<usize>, HighlightStyle)>,
622}
623
624impl HighlightedText {
625 pub fn from_buffer_range<T: ToOffset>(
626 range: Range<T>,
627 snapshot: &text::BufferSnapshot,
628 syntax_snapshot: &SyntaxSnapshot,
629 override_style: Option<HighlightStyle>,
630 syntax_theme: &SyntaxTheme,
631 ) -> Self {
632 let mut highlighted_text = HighlightedTextBuilder::default();
633 highlighted_text.add_text_from_buffer_range(
634 range,
635 snapshot,
636 syntax_snapshot,
637 override_style,
638 syntax_theme,
639 );
640 highlighted_text.build()
641 }
642
643 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
644 gpui::StyledText::new(self.text.clone())
645 .with_default_highlights(default_style, self.highlights.iter().cloned())
646 }
647
648 /// Returns the first line without leading whitespace unless highlighted
649 /// and a boolean indicating if there are more lines after
650 pub fn first_line_preview(self) -> (Self, bool) {
651 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
652 let first_line = &self.text[..newline_ix];
653
654 // Trim leading whitespace, unless an edit starts prior to it.
655 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
656 if let Some((first_highlight_range, _)) = self.highlights.first() {
657 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
658 }
659
660 let preview_text = &first_line[preview_start_ix..];
661 let preview_highlights = self
662 .highlights
663 .into_iter()
664 .skip_while(|(range, _)| range.end <= preview_start_ix)
665 .take_while(|(range, _)| range.start < newline_ix)
666 .filter_map(|(mut range, highlight)| {
667 range.start = range.start.saturating_sub(preview_start_ix);
668 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
669 if range.is_empty() {
670 None
671 } else {
672 Some((range, highlight))
673 }
674 });
675
676 let preview = Self {
677 text: SharedString::new(preview_text),
678 highlights: preview_highlights.collect(),
679 };
680
681 (preview, self.text.len() > newline_ix)
682 }
683}
684
685impl HighlightedTextBuilder {
686 pub fn build(self) -> HighlightedText {
687 HighlightedText {
688 text: self.text.into(),
689 highlights: self.highlights,
690 }
691 }
692
693 pub fn add_text_from_buffer_range<T: ToOffset>(
694 &mut self,
695 range: Range<T>,
696 snapshot: &text::BufferSnapshot,
697 syntax_snapshot: &SyntaxSnapshot,
698 override_style: Option<HighlightStyle>,
699 syntax_theme: &SyntaxTheme,
700 ) {
701 let range = range.to_offset(snapshot);
702 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
703 let start = self.text.len();
704 self.text.push_str(chunk.text);
705 let end = self.text.len();
706
707 if let Some(highlight_style) = chunk
708 .syntax_highlight_id
709 .and_then(|id| id.style(syntax_theme))
710 {
711 let highlight_style = override_style.map_or(highlight_style, |override_style| {
712 highlight_style.highlight(override_style)
713 });
714 self.highlights.push((start..end, highlight_style));
715 } else if let Some(override_style) = override_style {
716 self.highlights.push((start..end, override_style));
717 }
718 }
719 }
720
721 fn highlighted_chunks<'a>(
722 range: Range<usize>,
723 snapshot: &'a text::BufferSnapshot,
724 syntax_snapshot: &'a SyntaxSnapshot,
725 ) -> BufferChunks<'a> {
726 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
727 grammar
728 .highlights_config
729 .as_ref()
730 .map(|config| &config.query)
731 });
732
733 let highlight_maps = captures
734 .grammars()
735 .iter()
736 .map(|grammar| grammar.highlight_map())
737 .collect();
738
739 BufferChunks::new(
740 snapshot.as_rope(),
741 range,
742 Some((captures, highlight_maps)),
743 false,
744 None,
745 )
746 }
747}
748
749#[derive(Clone)]
750pub struct EditPreview {
751 old_snapshot: text::BufferSnapshot,
752 applied_edits_snapshot: text::BufferSnapshot,
753 syntax_snapshot: SyntaxSnapshot,
754}
755
756impl EditPreview {
757 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
758 let (first, _) = edits.first()?;
759 let (last, _) = edits.last()?;
760
761 let start = first.start.to_point(&self.old_snapshot);
762 let old_end = last.end.to_point(&self.old_snapshot);
763 let new_end = last
764 .end
765 .bias_right(&self.old_snapshot)
766 .to_point(&self.applied_edits_snapshot);
767
768 let start = Point::new(start.row.saturating_sub(3), 0);
769 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
770 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
771
772 Some(unified_diff(
773 &self
774 .old_snapshot
775 .text_for_range(start..old_end)
776 .collect::<String>(),
777 &self
778 .applied_edits_snapshot
779 .text_for_range(start..new_end)
780 .collect::<String>(),
781 ))
782 }
783
784 pub fn highlight_edits(
785 &self,
786 current_snapshot: &BufferSnapshot,
787 edits: &[(Range<Anchor>, impl AsRef<str>)],
788 include_deletions: bool,
789 cx: &App,
790 ) -> HighlightedText {
791 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
792 return HighlightedText::default();
793 };
794
795 let mut highlighted_text = HighlightedTextBuilder::default();
796
797 let visible_range_in_preview_snapshot =
798 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
799 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
800
801 let insertion_highlight_style = HighlightStyle {
802 background_color: Some(cx.theme().status().created_background),
803 ..Default::default()
804 };
805 let deletion_highlight_style = HighlightStyle {
806 background_color: Some(cx.theme().status().deleted_background),
807 ..Default::default()
808 };
809 let syntax_theme = cx.theme().syntax();
810
811 for (range, edit_text) in edits {
812 let edit_new_end_in_preview_snapshot = range
813 .end
814 .bias_right(&self.old_snapshot)
815 .to_offset(&self.applied_edits_snapshot);
816 let edit_start_in_preview_snapshot =
817 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
818
819 let unchanged_range_in_preview_snapshot =
820 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
821 if !unchanged_range_in_preview_snapshot.is_empty() {
822 highlighted_text.add_text_from_buffer_range(
823 unchanged_range_in_preview_snapshot,
824 &self.applied_edits_snapshot,
825 &self.syntax_snapshot,
826 None,
827 syntax_theme,
828 );
829 }
830
831 let range_in_current_snapshot = range.to_offset(current_snapshot);
832 if include_deletions && !range_in_current_snapshot.is_empty() {
833 highlighted_text.add_text_from_buffer_range(
834 range_in_current_snapshot,
835 ¤t_snapshot.text,
836 ¤t_snapshot.syntax,
837 Some(deletion_highlight_style),
838 syntax_theme,
839 );
840 }
841
842 if !edit_text.as_ref().is_empty() {
843 highlighted_text.add_text_from_buffer_range(
844 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
845 &self.applied_edits_snapshot,
846 &self.syntax_snapshot,
847 Some(insertion_highlight_style),
848 syntax_theme,
849 );
850 }
851
852 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
853 }
854
855 highlighted_text.add_text_from_buffer_range(
856 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
857 &self.applied_edits_snapshot,
858 &self.syntax_snapshot,
859 None,
860 syntax_theme,
861 );
862
863 highlighted_text.build()
864 }
865
866 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
867 cx.new(|cx| {
868 let mut buffer = Buffer::local_normalized(
869 self.applied_edits_snapshot.as_rope().clone(),
870 self.applied_edits_snapshot.line_ending(),
871 cx,
872 );
873 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
874 buffer
875 })
876 }
877
878 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
879 let (first, _) = edits.first()?;
880 let (last, _) = edits.last()?;
881
882 let start = first
883 .start
884 .bias_left(&self.old_snapshot)
885 .to_point(&self.applied_edits_snapshot);
886 let end = last
887 .end
888 .bias_right(&self.old_snapshot)
889 .to_point(&self.applied_edits_snapshot);
890
891 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
892 let range = Point::new(start.row, 0)
893 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
894
895 Some(range)
896 }
897}
898
899#[derive(Clone, Debug, PartialEq, Eq)]
900pub struct BracketMatch<T> {
901 pub open_range: Range<T>,
902 pub close_range: Range<T>,
903 pub newline_only: bool,
904 pub syntax_layer_depth: usize,
905 pub color_index: Option<usize>,
906}
907
908impl<T> BracketMatch<T> {
909 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
910 (self.open_range, self.close_range)
911 }
912}
913
914impl Buffer {
915 /// Create a new buffer with the given base text.
916 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
917 Self::build(
918 TextBuffer::new(
919 ReplicaId::LOCAL,
920 cx.entity_id().as_non_zero_u64().into(),
921 base_text.into(),
922 ),
923 None,
924 Capability::ReadWrite,
925 )
926 }
927
928 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
929 pub fn local_normalized(
930 base_text_normalized: Rope,
931 line_ending: LineEnding,
932 cx: &Context<Self>,
933 ) -> Self {
934 Self::build(
935 TextBuffer::new_normalized(
936 ReplicaId::LOCAL,
937 cx.entity_id().as_non_zero_u64().into(),
938 line_ending,
939 base_text_normalized,
940 ),
941 None,
942 Capability::ReadWrite,
943 )
944 }
945
946 /// Create a new buffer that is a replica of a remote buffer.
947 pub fn remote(
948 remote_id: BufferId,
949 replica_id: ReplicaId,
950 capability: Capability,
951 base_text: impl Into<String>,
952 ) -> Self {
953 Self::build(
954 TextBuffer::new(replica_id, remote_id, base_text.into()),
955 None,
956 capability,
957 )
958 }
959
960 /// Create a new buffer that is a replica of a remote buffer, populating its
961 /// state from the given protobuf message.
962 pub fn from_proto(
963 replica_id: ReplicaId,
964 capability: Capability,
965 message: proto::BufferState,
966 file: Option<Arc<dyn File>>,
967 ) -> Result<Self> {
968 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
969 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
970 let mut this = Self::build(buffer, file, capability);
971 this.text.set_line_ending(proto::deserialize_line_ending(
972 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
973 ));
974 this.saved_version = proto::deserialize_version(&message.saved_version);
975 this.saved_mtime = message.saved_mtime.map(|time| time.into());
976 Ok(this)
977 }
978
979 /// Serialize the buffer's state to a protobuf message.
980 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
981 proto::BufferState {
982 id: self.remote_id().into(),
983 file: self.file.as_ref().map(|f| f.to_proto(cx)),
984 base_text: self.base_text().to_string(),
985 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
986 saved_version: proto::serialize_version(&self.saved_version),
987 saved_mtime: self.saved_mtime.map(|time| time.into()),
988 }
989 }
990
991 /// Serialize as protobufs all of the changes to the buffer since the given version.
992 pub fn serialize_ops(
993 &self,
994 since: Option<clock::Global>,
995 cx: &App,
996 ) -> Task<Vec<proto::Operation>> {
997 let mut operations = Vec::new();
998 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
999
1000 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1001 proto::serialize_operation(&Operation::UpdateSelections {
1002 selections: set.selections.clone(),
1003 lamport_timestamp: set.lamport_timestamp,
1004 line_mode: set.line_mode,
1005 cursor_shape: set.cursor_shape,
1006 })
1007 }));
1008
1009 for (server_id, diagnostics) in &self.diagnostics {
1010 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1011 lamport_timestamp: self.diagnostics_timestamp,
1012 server_id: *server_id,
1013 diagnostics: diagnostics.iter().cloned().collect(),
1014 }));
1015 }
1016
1017 for (server_id, completions) in &self.completion_triggers_per_language_server {
1018 operations.push(proto::serialize_operation(
1019 &Operation::UpdateCompletionTriggers {
1020 triggers: completions.iter().cloned().collect(),
1021 lamport_timestamp: self.completion_triggers_timestamp,
1022 server_id: *server_id,
1023 },
1024 ));
1025 }
1026
1027 let text_operations = self.text.operations().clone();
1028 cx.background_spawn(async move {
1029 let since = since.unwrap_or_default();
1030 operations.extend(
1031 text_operations
1032 .iter()
1033 .filter(|(_, op)| !since.observed(op.timestamp()))
1034 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1035 );
1036 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1037 operations
1038 })
1039 }
1040
1041 /// Assign a language to the buffer, returning the buffer.
1042 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1043 self.set_language_async(Some(language), cx);
1044 self
1045 }
1046
1047 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1048 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1049 self.set_language(Some(language), cx);
1050 self
1051 }
1052
1053 /// Returns the [`Capability`] of this buffer.
1054 pub fn capability(&self) -> Capability {
1055 self.capability
1056 }
1057
1058 /// Whether this buffer can only be read.
1059 pub fn read_only(&self) -> bool {
1060 self.capability == Capability::ReadOnly
1061 }
1062
1063 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1064 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1065 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1066 let snapshot = buffer.snapshot();
1067 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1068 let tree_sitter_data = TreeSitterData::new(snapshot);
1069 Self {
1070 saved_mtime,
1071 tree_sitter_data: Arc::new(tree_sitter_data),
1072 saved_version: buffer.version(),
1073 preview_version: buffer.version(),
1074 reload_task: None,
1075 transaction_depth: 0,
1076 was_dirty_before_starting_transaction: None,
1077 has_unsaved_edits: Cell::new((buffer.version(), false)),
1078 text: buffer,
1079 branch_state: None,
1080 file,
1081 capability,
1082 syntax_map,
1083 reparse: None,
1084 non_text_state_update_count: 0,
1085 sync_parse_timeout: Duration::from_millis(1),
1086 parse_status: watch::channel(ParseStatus::Idle),
1087 autoindent_requests: Default::default(),
1088 wait_for_autoindent_txs: Default::default(),
1089 pending_autoindent: Default::default(),
1090 language: None,
1091 remote_selections: Default::default(),
1092 diagnostics: Default::default(),
1093 diagnostics_timestamp: Lamport::MIN,
1094 completion_triggers: Default::default(),
1095 completion_triggers_per_language_server: Default::default(),
1096 completion_triggers_timestamp: Lamport::MIN,
1097 deferred_ops: OperationQueue::new(),
1098 has_conflict: false,
1099 change_bits: Default::default(),
1100 _subscriptions: Vec::new(),
1101 }
1102 }
1103
1104 pub fn build_snapshot(
1105 text: Rope,
1106 language: Option<Arc<Language>>,
1107 language_registry: Option<Arc<LanguageRegistry>>,
1108 cx: &mut App,
1109 ) -> impl Future<Output = BufferSnapshot> + use<> {
1110 let entity_id = cx.reserve_entity::<Self>().entity_id();
1111 let buffer_id = entity_id.as_non_zero_u64().into();
1112 async move {
1113 let text =
1114 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1115 .snapshot();
1116 let mut syntax = SyntaxMap::new(&text).snapshot();
1117 if let Some(language) = language.clone() {
1118 let language_registry = language_registry.clone();
1119 syntax.reparse(&text, language_registry, language);
1120 }
1121 let tree_sitter_data = TreeSitterData::new(text.clone());
1122 BufferSnapshot {
1123 text,
1124 syntax,
1125 file: None,
1126 diagnostics: Default::default(),
1127 remote_selections: Default::default(),
1128 tree_sitter_data: Arc::new(tree_sitter_data),
1129 language,
1130 non_text_state_update_count: 0,
1131 }
1132 }
1133 }
1134
1135 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1136 let entity_id = cx.reserve_entity::<Self>().entity_id();
1137 let buffer_id = entity_id.as_non_zero_u64().into();
1138 let text = TextBuffer::new_normalized(
1139 ReplicaId::LOCAL,
1140 buffer_id,
1141 Default::default(),
1142 Rope::new(),
1143 )
1144 .snapshot();
1145 let syntax = SyntaxMap::new(&text).snapshot();
1146 let tree_sitter_data = TreeSitterData::new(text.clone());
1147 BufferSnapshot {
1148 text,
1149 syntax,
1150 tree_sitter_data: Arc::new(tree_sitter_data),
1151 file: None,
1152 diagnostics: Default::default(),
1153 remote_selections: Default::default(),
1154 language: None,
1155 non_text_state_update_count: 0,
1156 }
1157 }
1158
1159 #[cfg(any(test, feature = "test-support"))]
1160 pub fn build_snapshot_sync(
1161 text: Rope,
1162 language: Option<Arc<Language>>,
1163 language_registry: Option<Arc<LanguageRegistry>>,
1164 cx: &mut App,
1165 ) -> BufferSnapshot {
1166 let entity_id = cx.reserve_entity::<Self>().entity_id();
1167 let buffer_id = entity_id.as_non_zero_u64().into();
1168 let text =
1169 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1170 .snapshot();
1171 let mut syntax = SyntaxMap::new(&text).snapshot();
1172 if let Some(language) = language.clone() {
1173 syntax.reparse(&text, language_registry, language);
1174 }
1175 let tree_sitter_data = TreeSitterData::new(text.clone());
1176 BufferSnapshot {
1177 text,
1178 syntax,
1179 tree_sitter_data: Arc::new(tree_sitter_data),
1180 file: None,
1181 diagnostics: Default::default(),
1182 remote_selections: Default::default(),
1183 language,
1184 non_text_state_update_count: 0,
1185 }
1186 }
1187
1188 /// Retrieve a snapshot of the buffer's current state. This is computationally
1189 /// cheap, and allows reading from the buffer on a background thread.
1190 pub fn snapshot(&self) -> BufferSnapshot {
1191 let text = self.text.snapshot();
1192 let mut syntax_map = self.syntax_map.lock();
1193 syntax_map.interpolate(&text);
1194 let syntax = syntax_map.snapshot();
1195
1196 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1197 Arc::new(TreeSitterData::new(text.clone()))
1198 } else {
1199 self.tree_sitter_data.clone()
1200 };
1201
1202 BufferSnapshot {
1203 text,
1204 syntax,
1205 tree_sitter_data,
1206 file: self.file.clone(),
1207 remote_selections: self.remote_selections.clone(),
1208 diagnostics: self.diagnostics.clone(),
1209 language: self.language.clone(),
1210 non_text_state_update_count: self.non_text_state_update_count,
1211 }
1212 }
1213
1214 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1215 let this = cx.entity();
1216 cx.new(|cx| {
1217 let mut branch = Self {
1218 branch_state: Some(BufferBranchState {
1219 base_buffer: this.clone(),
1220 merged_operations: Default::default(),
1221 }),
1222 language: self.language.clone(),
1223 has_conflict: self.has_conflict,
1224 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1225 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1226 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1227 };
1228 if let Some(language_registry) = self.language_registry() {
1229 branch.set_language_registry(language_registry);
1230 }
1231
1232 // Reparse the branch buffer so that we get syntax highlighting immediately.
1233 branch.reparse(cx, true);
1234
1235 branch
1236 })
1237 }
1238
1239 pub fn preview_edits(
1240 &self,
1241 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1242 cx: &App,
1243 ) -> Task<EditPreview> {
1244 let registry = self.language_registry();
1245 let language = self.language().cloned();
1246 let old_snapshot = self.text.snapshot();
1247 let mut branch_buffer = self.text.branch();
1248 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1249 cx.background_spawn(async move {
1250 if !edits.is_empty() {
1251 if let Some(language) = language.clone() {
1252 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1253 }
1254
1255 branch_buffer.edit(edits.iter().cloned());
1256 let snapshot = branch_buffer.snapshot();
1257 syntax_snapshot.interpolate(&snapshot);
1258
1259 if let Some(language) = language {
1260 syntax_snapshot.reparse(&snapshot, registry, language);
1261 }
1262 }
1263 EditPreview {
1264 old_snapshot,
1265 applied_edits_snapshot: branch_buffer.snapshot(),
1266 syntax_snapshot,
1267 }
1268 })
1269 }
1270
1271 /// Applies all of the changes in this buffer that intersect any of the
1272 /// given `ranges` to its base buffer.
1273 ///
1274 /// If `ranges` is empty, then all changes will be applied. This buffer must
1275 /// be a branch buffer to call this method.
1276 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1277 let Some(base_buffer) = self.base_buffer() else {
1278 debug_panic!("not a branch buffer");
1279 return;
1280 };
1281
1282 let mut ranges = if ranges.is_empty() {
1283 &[0..usize::MAX]
1284 } else {
1285 ranges.as_slice()
1286 }
1287 .iter()
1288 .peekable();
1289
1290 let mut edits = Vec::new();
1291 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1292 let mut is_included = false;
1293 while let Some(range) = ranges.peek() {
1294 if range.end < edit.new.start {
1295 ranges.next().unwrap();
1296 } else {
1297 if range.start <= edit.new.end {
1298 is_included = true;
1299 }
1300 break;
1301 }
1302 }
1303
1304 if is_included {
1305 edits.push((
1306 edit.old.clone(),
1307 self.text_for_range(edit.new.clone()).collect::<String>(),
1308 ));
1309 }
1310 }
1311
1312 let operation = base_buffer.update(cx, |base_buffer, cx| {
1313 // cx.emit(BufferEvent::DiffBaseChanged);
1314 base_buffer.edit(edits, None, cx)
1315 });
1316
1317 if let Some(operation) = operation
1318 && let Some(BufferBranchState {
1319 merged_operations, ..
1320 }) = &mut self.branch_state
1321 {
1322 merged_operations.push(operation);
1323 }
1324 }
1325
1326 fn on_base_buffer_event(
1327 &mut self,
1328 _: Entity<Buffer>,
1329 event: &BufferEvent,
1330 cx: &mut Context<Self>,
1331 ) {
1332 let BufferEvent::Operation { operation, .. } = event else {
1333 return;
1334 };
1335 let Some(BufferBranchState {
1336 merged_operations, ..
1337 }) = &mut self.branch_state
1338 else {
1339 return;
1340 };
1341
1342 let mut operation_to_undo = None;
1343 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1344 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1345 {
1346 merged_operations.remove(ix);
1347 operation_to_undo = Some(operation.timestamp);
1348 }
1349
1350 self.apply_ops([operation.clone()], cx);
1351
1352 if let Some(timestamp) = operation_to_undo {
1353 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1354 self.undo_operations(counts, cx);
1355 }
1356 }
1357
1358 #[cfg(test)]
1359 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1360 &self.text
1361 }
1362
1363 /// Retrieve a snapshot of the buffer's raw text, without any
1364 /// language-related state like the syntax tree or diagnostics.
1365 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1366 self.text.snapshot()
1367 }
1368
1369 /// The file associated with the buffer, if any.
1370 pub fn file(&self) -> Option<&Arc<dyn File>> {
1371 self.file.as_ref()
1372 }
1373
1374 /// The version of the buffer that was last saved or reloaded from disk.
1375 pub fn saved_version(&self) -> &clock::Global {
1376 &self.saved_version
1377 }
1378
1379 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1380 pub fn saved_mtime(&self) -> Option<MTime> {
1381 self.saved_mtime
1382 }
1383
1384 /// Assign a language to the buffer.
1385 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1386 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1387 }
1388
1389 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1390 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1391 self.set_language_(language, true, cx);
1392 }
1393
1394 fn set_language_(
1395 &mut self,
1396 language: Option<Arc<Language>>,
1397 may_block: bool,
1398 cx: &mut Context<Self>,
1399 ) {
1400 self.non_text_state_update_count += 1;
1401 self.syntax_map.lock().clear(&self.text);
1402 let old_language = std::mem::replace(&mut self.language, language);
1403 self.was_changed();
1404 self.reparse(cx, may_block);
1405 let has_fresh_language =
1406 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1407 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1408 }
1409
1410 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1411 /// other languages if parts of the buffer are written in different languages.
1412 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1413 self.syntax_map
1414 .lock()
1415 .set_language_registry(language_registry);
1416 }
1417
1418 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1419 self.syntax_map.lock().language_registry()
1420 }
1421
1422 /// Assign the line ending type to the buffer.
1423 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1424 self.text.set_line_ending(line_ending);
1425
1426 let lamport_timestamp = self.text.lamport_clock.tick();
1427 self.send_operation(
1428 Operation::UpdateLineEnding {
1429 line_ending,
1430 lamport_timestamp,
1431 },
1432 true,
1433 cx,
1434 );
1435 }
1436
1437 /// Assign the buffer a new [`Capability`].
1438 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1439 if self.capability != capability {
1440 self.capability = capability;
1441 cx.emit(BufferEvent::CapabilityChanged)
1442 }
1443 }
1444
1445 /// This method is called to signal that the buffer has been saved.
1446 pub fn did_save(
1447 &mut self,
1448 version: clock::Global,
1449 mtime: Option<MTime>,
1450 cx: &mut Context<Self>,
1451 ) {
1452 self.saved_version = version.clone();
1453 self.has_unsaved_edits.set((version, false));
1454 self.has_conflict = false;
1455 self.saved_mtime = mtime;
1456 self.was_changed();
1457 cx.emit(BufferEvent::Saved);
1458 cx.notify();
1459 }
1460
1461 /// Reloads the contents of the buffer from disk.
1462 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1463 let (tx, rx) = futures::channel::oneshot::channel();
1464 let prev_version = self.text.version();
1465 self.reload_task = Some(cx.spawn(async move |this, cx| {
1466 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1467 let file = this.file.as_ref()?.as_local()?;
1468
1469 Some((file.disk_state().mtime(), file.load(cx)))
1470 })?
1471 else {
1472 return Ok(());
1473 };
1474
1475 let new_text = new_text.await?;
1476 let diff = this
1477 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1478 .await;
1479 this.update(cx, |this, cx| {
1480 if this.version() == diff.base_version {
1481 this.finalize_last_transaction();
1482 this.apply_diff(diff, cx);
1483 tx.send(this.finalize_last_transaction().cloned()).ok();
1484 this.has_conflict = false;
1485 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1486 } else {
1487 if !diff.edits.is_empty()
1488 || this
1489 .edits_since::<usize>(&diff.base_version)
1490 .next()
1491 .is_some()
1492 {
1493 this.has_conflict = true;
1494 }
1495
1496 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1497 }
1498
1499 this.reload_task.take();
1500 })
1501 }));
1502 rx
1503 }
1504
1505 /// This method is called to signal that the buffer has been reloaded.
1506 pub fn did_reload(
1507 &mut self,
1508 version: clock::Global,
1509 line_ending: LineEnding,
1510 mtime: Option<MTime>,
1511 cx: &mut Context<Self>,
1512 ) {
1513 self.saved_version = version;
1514 self.has_unsaved_edits
1515 .set((self.saved_version.clone(), false));
1516 self.text.set_line_ending(line_ending);
1517 self.saved_mtime = mtime;
1518 cx.emit(BufferEvent::Reloaded);
1519 cx.notify();
1520 }
1521
1522 /// Updates the [`File`] backing this buffer. This should be called when
1523 /// the file has changed or has been deleted.
1524 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1525 let was_dirty = self.is_dirty();
1526 let mut file_changed = false;
1527
1528 if let Some(old_file) = self.file.as_ref() {
1529 if new_file.path() != old_file.path() {
1530 file_changed = true;
1531 }
1532
1533 let old_state = old_file.disk_state();
1534 let new_state = new_file.disk_state();
1535 if old_state != new_state {
1536 file_changed = true;
1537 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1538 cx.emit(BufferEvent::ReloadNeeded)
1539 }
1540 }
1541 } else {
1542 file_changed = true;
1543 };
1544
1545 self.file = Some(new_file);
1546 if file_changed {
1547 self.was_changed();
1548 self.non_text_state_update_count += 1;
1549 if was_dirty != self.is_dirty() {
1550 cx.emit(BufferEvent::DirtyChanged);
1551 }
1552 cx.emit(BufferEvent::FileHandleChanged);
1553 cx.notify();
1554 }
1555 }
1556
1557 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1558 Some(self.branch_state.as_ref()?.base_buffer.clone())
1559 }
1560
1561 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1562 pub fn language(&self) -> Option<&Arc<Language>> {
1563 self.language.as_ref()
1564 }
1565
1566 /// Returns the [`Language`] at the given location.
1567 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1568 let offset = position.to_offset(self);
1569 let mut is_first = true;
1570 let start_anchor = self.anchor_before(offset);
1571 let end_anchor = self.anchor_after(offset);
1572 self.syntax_map
1573 .lock()
1574 .layers_for_range(offset..offset, &self.text, false)
1575 .filter(|layer| {
1576 if is_first {
1577 is_first = false;
1578 return true;
1579 }
1580
1581 layer
1582 .included_sub_ranges
1583 .map(|sub_ranges| {
1584 sub_ranges.iter().any(|sub_range| {
1585 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1586 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1587 !is_before_start && !is_after_end
1588 })
1589 })
1590 .unwrap_or(true)
1591 })
1592 .last()
1593 .map(|info| info.language.clone())
1594 .or_else(|| self.language.clone())
1595 }
1596
1597 /// Returns each [`Language`] for the active syntax layers at the given location.
1598 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1599 let offset = position.to_offset(self);
1600 let mut languages: Vec<Arc<Language>> = self
1601 .syntax_map
1602 .lock()
1603 .layers_for_range(offset..offset, &self.text, false)
1604 .map(|info| info.language.clone())
1605 .collect();
1606
1607 if languages.is_empty()
1608 && let Some(buffer_language) = self.language()
1609 {
1610 languages.push(buffer_language.clone());
1611 }
1612
1613 languages
1614 }
1615
1616 /// An integer version number that accounts for all updates besides
1617 /// the buffer's text itself (which is versioned via a version vector).
1618 pub fn non_text_state_update_count(&self) -> usize {
1619 self.non_text_state_update_count
1620 }
1621
1622 /// Whether the buffer is being parsed in the background.
1623 #[cfg(any(test, feature = "test-support"))]
1624 pub fn is_parsing(&self) -> bool {
1625 self.reparse.is_some()
1626 }
1627
1628 /// Indicates whether the buffer contains any regions that may be
1629 /// written in a language that hasn't been loaded yet.
1630 pub fn contains_unknown_injections(&self) -> bool {
1631 self.syntax_map.lock().contains_unknown_injections()
1632 }
1633
1634 #[cfg(any(test, feature = "test-support"))]
1635 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1636 self.sync_parse_timeout = timeout;
1637 }
1638
1639 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1640 match Arc::get_mut(&mut self.tree_sitter_data) {
1641 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1642 None => {
1643 let tree_sitter_data = TreeSitterData::new(snapshot);
1644 self.tree_sitter_data = Arc::new(tree_sitter_data)
1645 }
1646 }
1647 }
1648
1649 /// Called after an edit to synchronize the buffer's main parse tree with
1650 /// the buffer's new underlying state.
1651 ///
1652 /// Locks the syntax map and interpolates the edits since the last reparse
1653 /// into the foreground syntax tree.
1654 ///
1655 /// Then takes a stable snapshot of the syntax map before unlocking it.
1656 /// The snapshot with the interpolated edits is sent to a background thread,
1657 /// where we ask Tree-sitter to perform an incremental parse.
1658 ///
1659 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1660 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1661 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1662 ///
1663 /// If we time out waiting on the parse, we spawn a second task waiting
1664 /// until the parse does complete and return with the interpolated tree still
1665 /// in the foreground. When the background parse completes, call back into
1666 /// the main thread and assign the foreground parse state.
1667 ///
1668 /// If the buffer or grammar changed since the start of the background parse,
1669 /// initiate an additional reparse recursively. To avoid concurrent parses
1670 /// for the same buffer, we only initiate a new parse if we are not already
1671 /// parsing in the background.
1672 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1673 if self.text.version() != *self.tree_sitter_data.version() {
1674 self.invalidate_tree_sitter_data(self.text.snapshot());
1675 }
1676 if self.reparse.is_some() {
1677 return;
1678 }
1679 let language = if let Some(language) = self.language.clone() {
1680 language
1681 } else {
1682 return;
1683 };
1684
1685 let text = self.text_snapshot();
1686 let parsed_version = self.version();
1687
1688 let mut syntax_map = self.syntax_map.lock();
1689 syntax_map.interpolate(&text);
1690 let language_registry = syntax_map.language_registry();
1691 let mut syntax_snapshot = syntax_map.snapshot();
1692 drop(syntax_map);
1693
1694 let parse_task = cx.background_spawn({
1695 let language = language.clone();
1696 let language_registry = language_registry.clone();
1697 async move {
1698 syntax_snapshot.reparse(&text, language_registry, language);
1699 syntax_snapshot
1700 }
1701 });
1702
1703 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1704 if may_block {
1705 match cx
1706 .background_executor()
1707 .block_with_timeout(self.sync_parse_timeout, parse_task)
1708 {
1709 Ok(new_syntax_snapshot) => {
1710 self.did_finish_parsing(new_syntax_snapshot, cx);
1711 self.reparse = None;
1712 }
1713 Err(parse_task) => {
1714 self.reparse = Some(cx.spawn(async move |this, cx| {
1715 let new_syntax_map = cx.background_spawn(parse_task).await;
1716 this.update(cx, move |this, cx| {
1717 let grammar_changed = || {
1718 this.language.as_ref().is_none_or(|current_language| {
1719 !Arc::ptr_eq(&language, current_language)
1720 })
1721 };
1722 let language_registry_changed = || {
1723 new_syntax_map.contains_unknown_injections()
1724 && language_registry.is_some_and(|registry| {
1725 registry.version()
1726 != new_syntax_map.language_registry_version()
1727 })
1728 };
1729 let parse_again = this.version.changed_since(&parsed_version)
1730 || language_registry_changed()
1731 || grammar_changed();
1732 this.did_finish_parsing(new_syntax_map, cx);
1733 this.reparse = None;
1734 if parse_again {
1735 this.reparse(cx, false);
1736 }
1737 })
1738 .ok();
1739 }));
1740 }
1741 }
1742 } else {
1743 self.reparse = Some(cx.spawn(async move |this, cx| {
1744 let new_syntax_map = cx.background_spawn(parse_task).await;
1745 this.update(cx, move |this, cx| {
1746 let grammar_changed = || {
1747 this.language.as_ref().is_none_or(|current_language| {
1748 !Arc::ptr_eq(&language, current_language)
1749 })
1750 };
1751 let language_registry_changed = || {
1752 new_syntax_map.contains_unknown_injections()
1753 && language_registry.is_some_and(|registry| {
1754 registry.version() != new_syntax_map.language_registry_version()
1755 })
1756 };
1757 let parse_again = this.version.changed_since(&parsed_version)
1758 || language_registry_changed()
1759 || grammar_changed();
1760 this.did_finish_parsing(new_syntax_map, cx);
1761 this.reparse = None;
1762 if parse_again {
1763 this.reparse(cx, false);
1764 }
1765 })
1766 .ok();
1767 }));
1768 }
1769 }
1770
1771 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1772 self.was_changed();
1773 self.non_text_state_update_count += 1;
1774 self.syntax_map.lock().did_parse(syntax_snapshot);
1775 self.request_autoindent(cx);
1776 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1777 if self.text.version() != *self.tree_sitter_data.version() {
1778 self.invalidate_tree_sitter_data(self.text.snapshot());
1779 }
1780 cx.emit(BufferEvent::Reparsed);
1781 cx.notify();
1782 }
1783
1784 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1785 self.parse_status.1.clone()
1786 }
1787
1788 /// Wait until the buffer is no longer parsing
1789 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1790 let mut parse_status = self.parse_status();
1791 async move {
1792 while *parse_status.borrow() != ParseStatus::Idle {
1793 if parse_status.changed().await.is_err() {
1794 break;
1795 }
1796 }
1797 }
1798 }
1799
1800 /// Assign to the buffer a set of diagnostics created by a given language server.
1801 pub fn update_diagnostics(
1802 &mut self,
1803 server_id: LanguageServerId,
1804 diagnostics: DiagnosticSet,
1805 cx: &mut Context<Self>,
1806 ) {
1807 let lamport_timestamp = self.text.lamport_clock.tick();
1808 let op = Operation::UpdateDiagnostics {
1809 server_id,
1810 diagnostics: diagnostics.iter().cloned().collect(),
1811 lamport_timestamp,
1812 };
1813
1814 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1815 self.send_operation(op, true, cx);
1816 }
1817
1818 pub fn buffer_diagnostics(
1819 &self,
1820 for_server: Option<LanguageServerId>,
1821 ) -> Vec<&DiagnosticEntry<Anchor>> {
1822 match for_server {
1823 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1824 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1825 Err(_) => Vec::new(),
1826 },
1827 None => self
1828 .diagnostics
1829 .iter()
1830 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1831 .collect(),
1832 }
1833 }
1834
1835 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1836 if let Some(indent_sizes) = self.compute_autoindents() {
1837 let indent_sizes = cx.background_spawn(indent_sizes);
1838 match cx
1839 .background_executor()
1840 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1841 {
1842 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1843 Err(indent_sizes) => {
1844 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1845 let indent_sizes = indent_sizes.await;
1846 this.update(cx, |this, cx| {
1847 this.apply_autoindents(indent_sizes, cx);
1848 })
1849 .ok();
1850 }));
1851 }
1852 }
1853 } else {
1854 self.autoindent_requests.clear();
1855 for tx in self.wait_for_autoindent_txs.drain(..) {
1856 tx.send(()).ok();
1857 }
1858 }
1859 }
1860
1861 fn compute_autoindents(
1862 &self,
1863 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1864 let max_rows_between_yields = 100;
1865 let snapshot = self.snapshot();
1866 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1867 return None;
1868 }
1869
1870 let autoindent_requests = self.autoindent_requests.clone();
1871 Some(async move {
1872 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1873 for request in autoindent_requests {
1874 // Resolve each edited range to its row in the current buffer and in the
1875 // buffer before this batch of edits.
1876 let mut row_ranges = Vec::new();
1877 let mut old_to_new_rows = BTreeMap::new();
1878 let mut language_indent_sizes_by_new_row = Vec::new();
1879 for entry in &request.entries {
1880 let position = entry.range.start;
1881 let new_row = position.to_point(&snapshot).row;
1882 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1883 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1884
1885 if !entry.first_line_is_new {
1886 let old_row = position.to_point(&request.before_edit).row;
1887 old_to_new_rows.insert(old_row, new_row);
1888 }
1889 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1890 }
1891
1892 // Build a map containing the suggested indentation for each of the edited lines
1893 // with respect to the state of the buffer before these edits. This map is keyed
1894 // by the rows for these lines in the current state of the buffer.
1895 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1896 let old_edited_ranges =
1897 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1898 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1899 let mut language_indent_size = IndentSize::default();
1900 for old_edited_range in old_edited_ranges {
1901 let suggestions = request
1902 .before_edit
1903 .suggest_autoindents(old_edited_range.clone())
1904 .into_iter()
1905 .flatten();
1906 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1907 if let Some(suggestion) = suggestion {
1908 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1909
1910 // Find the indent size based on the language for this row.
1911 while let Some((row, size)) = language_indent_sizes.peek() {
1912 if *row > new_row {
1913 break;
1914 }
1915 language_indent_size = *size;
1916 language_indent_sizes.next();
1917 }
1918
1919 let suggested_indent = old_to_new_rows
1920 .get(&suggestion.basis_row)
1921 .and_then(|from_row| {
1922 Some(old_suggestions.get(from_row).copied()?.0)
1923 })
1924 .unwrap_or_else(|| {
1925 request
1926 .before_edit
1927 .indent_size_for_line(suggestion.basis_row)
1928 })
1929 .with_delta(suggestion.delta, language_indent_size);
1930 old_suggestions
1931 .insert(new_row, (suggested_indent, suggestion.within_error));
1932 }
1933 }
1934 yield_now().await;
1935 }
1936
1937 // Compute new suggestions for each line, but only include them in the result
1938 // if they differ from the old suggestion for that line.
1939 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1940 let mut language_indent_size = IndentSize::default();
1941 for (row_range, original_indent_column) in row_ranges {
1942 let new_edited_row_range = if request.is_block_mode {
1943 row_range.start..row_range.start + 1
1944 } else {
1945 row_range.clone()
1946 };
1947
1948 let suggestions = snapshot
1949 .suggest_autoindents(new_edited_row_range.clone())
1950 .into_iter()
1951 .flatten();
1952 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1953 if let Some(suggestion) = suggestion {
1954 // Find the indent size based on the language for this row.
1955 while let Some((row, size)) = language_indent_sizes.peek() {
1956 if *row > new_row {
1957 break;
1958 }
1959 language_indent_size = *size;
1960 language_indent_sizes.next();
1961 }
1962
1963 let suggested_indent = indent_sizes
1964 .get(&suggestion.basis_row)
1965 .copied()
1966 .map(|e| e.0)
1967 .unwrap_or_else(|| {
1968 snapshot.indent_size_for_line(suggestion.basis_row)
1969 })
1970 .with_delta(suggestion.delta, language_indent_size);
1971
1972 if old_suggestions.get(&new_row).is_none_or(
1973 |(old_indentation, was_within_error)| {
1974 suggested_indent != *old_indentation
1975 && (!suggestion.within_error || *was_within_error)
1976 },
1977 ) {
1978 indent_sizes.insert(
1979 new_row,
1980 (suggested_indent, request.ignore_empty_lines),
1981 );
1982 }
1983 }
1984 }
1985
1986 if let (true, Some(original_indent_column)) =
1987 (request.is_block_mode, original_indent_column)
1988 {
1989 let new_indent =
1990 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1991 *indent
1992 } else {
1993 snapshot.indent_size_for_line(row_range.start)
1994 };
1995 let delta = new_indent.len as i64 - original_indent_column as i64;
1996 if delta != 0 {
1997 for row in row_range.skip(1) {
1998 indent_sizes.entry(row).or_insert_with(|| {
1999 let mut size = snapshot.indent_size_for_line(row);
2000 if size.kind == new_indent.kind {
2001 match delta.cmp(&0) {
2002 Ordering::Greater => size.len += delta as u32,
2003 Ordering::Less => {
2004 size.len = size.len.saturating_sub(-delta as u32)
2005 }
2006 Ordering::Equal => {}
2007 }
2008 }
2009 (size, request.ignore_empty_lines)
2010 });
2011 }
2012 }
2013 }
2014
2015 yield_now().await;
2016 }
2017 }
2018
2019 indent_sizes
2020 .into_iter()
2021 .filter_map(|(row, (indent, ignore_empty_lines))| {
2022 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2023 None
2024 } else {
2025 Some((row, indent))
2026 }
2027 })
2028 .collect()
2029 })
2030 }
2031
2032 fn apply_autoindents(
2033 &mut self,
2034 indent_sizes: BTreeMap<u32, IndentSize>,
2035 cx: &mut Context<Self>,
2036 ) {
2037 self.autoindent_requests.clear();
2038 for tx in self.wait_for_autoindent_txs.drain(..) {
2039 tx.send(()).ok();
2040 }
2041
2042 let edits: Vec<_> = indent_sizes
2043 .into_iter()
2044 .filter_map(|(row, indent_size)| {
2045 let current_size = indent_size_for_line(self, row);
2046 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2047 })
2048 .collect();
2049
2050 let preserve_preview = self.preserve_preview();
2051 self.edit(edits, None, cx);
2052 if preserve_preview {
2053 self.refresh_preview();
2054 }
2055 }
2056
2057 /// Create a minimal edit that will cause the given row to be indented
2058 /// with the given size. After applying this edit, the length of the line
2059 /// will always be at least `new_size.len`.
2060 pub fn edit_for_indent_size_adjustment(
2061 row: u32,
2062 current_size: IndentSize,
2063 new_size: IndentSize,
2064 ) -> Option<(Range<Point>, String)> {
2065 if new_size.kind == current_size.kind {
2066 match new_size.len.cmp(¤t_size.len) {
2067 Ordering::Greater => {
2068 let point = Point::new(row, 0);
2069 Some((
2070 point..point,
2071 iter::repeat(new_size.char())
2072 .take((new_size.len - current_size.len) as usize)
2073 .collect::<String>(),
2074 ))
2075 }
2076
2077 Ordering::Less => Some((
2078 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2079 String::new(),
2080 )),
2081
2082 Ordering::Equal => None,
2083 }
2084 } else {
2085 Some((
2086 Point::new(row, 0)..Point::new(row, current_size.len),
2087 iter::repeat(new_size.char())
2088 .take(new_size.len as usize)
2089 .collect::<String>(),
2090 ))
2091 }
2092 }
2093
2094 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2095 /// and the given new text.
2096 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2097 let old_text = self.as_rope().clone();
2098 let base_version = self.version();
2099 cx.background_executor()
2100 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2101 let old_text = old_text.to_string();
2102 let line_ending = LineEnding::detect(&new_text);
2103 LineEnding::normalize(&mut new_text);
2104 let edits = text_diff(&old_text, &new_text);
2105 Diff {
2106 base_version,
2107 line_ending,
2108 edits,
2109 }
2110 })
2111 }
2112
2113 /// Spawns a background task that searches the buffer for any whitespace
2114 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2115 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2116 let old_text = self.as_rope().clone();
2117 let line_ending = self.line_ending();
2118 let base_version = self.version();
2119 cx.background_spawn(async move {
2120 let ranges = trailing_whitespace_ranges(&old_text);
2121 let empty = Arc::<str>::from("");
2122 Diff {
2123 base_version,
2124 line_ending,
2125 edits: ranges
2126 .into_iter()
2127 .map(|range| (range, empty.clone()))
2128 .collect(),
2129 }
2130 })
2131 }
2132
2133 /// Ensures that the buffer ends with a single newline character, and
2134 /// no other whitespace. Skips if the buffer is empty.
2135 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2136 let len = self.len();
2137 if len == 0 {
2138 return;
2139 }
2140 let mut offset = len;
2141 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2142 let non_whitespace_len = chunk
2143 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2144 .len();
2145 offset -= chunk.len();
2146 offset += non_whitespace_len;
2147 if non_whitespace_len != 0 {
2148 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2149 return;
2150 }
2151 break;
2152 }
2153 }
2154 self.edit([(offset..len, "\n")], None, cx);
2155 }
2156
2157 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2158 /// calculated, then adjust the diff to account for those changes, and discard any
2159 /// parts of the diff that conflict with those changes.
2160 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2161 let snapshot = self.snapshot();
2162 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2163 let mut delta = 0;
2164 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2165 while let Some(edit_since) = edits_since.peek() {
2166 // If the edit occurs after a diff hunk, then it does not
2167 // affect that hunk.
2168 if edit_since.old.start > range.end {
2169 break;
2170 }
2171 // If the edit precedes the diff hunk, then adjust the hunk
2172 // to reflect the edit.
2173 else if edit_since.old.end < range.start {
2174 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2175 edits_since.next();
2176 }
2177 // If the edit intersects a diff hunk, then discard that hunk.
2178 else {
2179 return None;
2180 }
2181 }
2182
2183 let start = (range.start as i64 + delta) as usize;
2184 let end = (range.end as i64 + delta) as usize;
2185 Some((start..end, new_text))
2186 });
2187
2188 self.start_transaction();
2189 self.text.set_line_ending(diff.line_ending);
2190 self.edit(adjusted_edits, None, cx);
2191 self.end_transaction(cx)
2192 }
2193
2194 pub fn has_unsaved_edits(&self) -> bool {
2195 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2196
2197 if last_version == self.version {
2198 self.has_unsaved_edits
2199 .set((last_version, has_unsaved_edits));
2200 return has_unsaved_edits;
2201 }
2202
2203 let has_edits = self.has_edits_since(&self.saved_version);
2204 self.has_unsaved_edits
2205 .set((self.version.clone(), has_edits));
2206 has_edits
2207 }
2208
2209 /// Checks if the buffer has unsaved changes.
2210 pub fn is_dirty(&self) -> bool {
2211 if self.capability == Capability::ReadOnly {
2212 return false;
2213 }
2214 if self.has_conflict {
2215 return true;
2216 }
2217 match self.file.as_ref().map(|f| f.disk_state()) {
2218 Some(DiskState::New) | Some(DiskState::Deleted) => {
2219 !self.is_empty() && self.has_unsaved_edits()
2220 }
2221 _ => self.has_unsaved_edits(),
2222 }
2223 }
2224
2225 /// Marks the buffer as having a conflict regardless of current buffer state.
2226 pub fn set_conflict(&mut self) {
2227 self.has_conflict = true;
2228 }
2229
2230 /// Checks if the buffer and its file have both changed since the buffer
2231 /// was last saved or reloaded.
2232 pub fn has_conflict(&self) -> bool {
2233 if self.has_conflict {
2234 return true;
2235 }
2236 let Some(file) = self.file.as_ref() else {
2237 return false;
2238 };
2239 match file.disk_state() {
2240 DiskState::New => false,
2241 DiskState::Present { mtime } => match self.saved_mtime {
2242 Some(saved_mtime) => {
2243 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2244 }
2245 None => true,
2246 },
2247 DiskState::Deleted => false,
2248 }
2249 }
2250
2251 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2252 pub fn subscribe(&mut self) -> Subscription<usize> {
2253 self.text.subscribe()
2254 }
2255
2256 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2257 ///
2258 /// This allows downstream code to check if the buffer's text has changed without
2259 /// waiting for an effect cycle, which would be required if using eents.
2260 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2261 if let Err(ix) = self
2262 .change_bits
2263 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2264 {
2265 self.change_bits.insert(ix, bit);
2266 }
2267 }
2268
2269 /// Set the change bit for all "listeners".
2270 fn was_changed(&mut self) {
2271 self.change_bits.retain(|change_bit| {
2272 change_bit
2273 .upgrade()
2274 .inspect(|bit| {
2275 _ = bit.replace(true);
2276 })
2277 .is_some()
2278 });
2279 }
2280
2281 /// Starts a transaction, if one is not already in-progress. When undoing or
2282 /// redoing edits, all of the edits performed within a transaction are undone
2283 /// or redone together.
2284 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2285 self.start_transaction_at(Instant::now())
2286 }
2287
2288 /// Starts a transaction, providing the current time. Subsequent transactions
2289 /// that occur within a short period of time will be grouped together. This
2290 /// is controlled by the buffer's undo grouping duration.
2291 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2292 self.transaction_depth += 1;
2293 if self.was_dirty_before_starting_transaction.is_none() {
2294 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2295 }
2296 self.text.start_transaction_at(now)
2297 }
2298
2299 /// Terminates the current transaction, if this is the outermost transaction.
2300 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2301 self.end_transaction_at(Instant::now(), cx)
2302 }
2303
2304 /// Terminates the current transaction, providing the current time. Subsequent transactions
2305 /// that occur within a short period of time will be grouped together. This
2306 /// is controlled by the buffer's undo grouping duration.
2307 pub fn end_transaction_at(
2308 &mut self,
2309 now: Instant,
2310 cx: &mut Context<Self>,
2311 ) -> Option<TransactionId> {
2312 assert!(self.transaction_depth > 0);
2313 self.transaction_depth -= 1;
2314 let was_dirty = if self.transaction_depth == 0 {
2315 self.was_dirty_before_starting_transaction.take().unwrap()
2316 } else {
2317 false
2318 };
2319 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2320 self.did_edit(&start_version, was_dirty, cx);
2321 Some(transaction_id)
2322 } else {
2323 None
2324 }
2325 }
2326
2327 /// Manually add a transaction to the buffer's undo history.
2328 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2329 self.text.push_transaction(transaction, now);
2330 }
2331
2332 /// Differs from `push_transaction` in that it does not clear the redo
2333 /// stack. Intended to be used to create a parent transaction to merge
2334 /// potential child transactions into.
2335 ///
2336 /// The caller is responsible for removing it from the undo history using
2337 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2338 /// are merged into this transaction, the caller is responsible for ensuring
2339 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2340 /// cleared is to create transactions with the usual `start_transaction` and
2341 /// `end_transaction` methods and merging the resulting transactions into
2342 /// the transaction created by this method
2343 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2344 self.text.push_empty_transaction(now)
2345 }
2346
2347 /// Prevent the last transaction from being grouped with any subsequent transactions,
2348 /// even if they occur with the buffer's undo grouping duration.
2349 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2350 self.text.finalize_last_transaction()
2351 }
2352
2353 /// Manually group all changes since a given transaction.
2354 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2355 self.text.group_until_transaction(transaction_id);
2356 }
2357
2358 /// Manually remove a transaction from the buffer's undo history
2359 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2360 self.text.forget_transaction(transaction_id)
2361 }
2362
2363 /// Retrieve a transaction from the buffer's undo history
2364 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2365 self.text.get_transaction(transaction_id)
2366 }
2367
2368 /// Manually merge two transactions in the buffer's undo history.
2369 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2370 self.text.merge_transactions(transaction, destination);
2371 }
2372
2373 /// Waits for the buffer to receive operations with the given timestamps.
2374 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2375 &mut self,
2376 edit_ids: It,
2377 ) -> impl Future<Output = Result<()>> + use<It> {
2378 self.text.wait_for_edits(edit_ids)
2379 }
2380
2381 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2382 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2383 &mut self,
2384 anchors: It,
2385 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2386 self.text.wait_for_anchors(anchors)
2387 }
2388
2389 /// Waits for the buffer to receive operations up to the given version.
2390 pub fn wait_for_version(
2391 &mut self,
2392 version: clock::Global,
2393 ) -> impl Future<Output = Result<()>> + use<> {
2394 self.text.wait_for_version(version)
2395 }
2396
2397 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2398 /// [`Buffer::wait_for_version`] to resolve with an error.
2399 pub fn give_up_waiting(&mut self) {
2400 self.text.give_up_waiting();
2401 }
2402
2403 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2404 let mut rx = None;
2405 if !self.autoindent_requests.is_empty() {
2406 let channel = oneshot::channel();
2407 self.wait_for_autoindent_txs.push(channel.0);
2408 rx = Some(channel.1);
2409 }
2410 rx
2411 }
2412
2413 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2414 pub fn set_active_selections(
2415 &mut self,
2416 selections: Arc<[Selection<Anchor>]>,
2417 line_mode: bool,
2418 cursor_shape: CursorShape,
2419 cx: &mut Context<Self>,
2420 ) {
2421 let lamport_timestamp = self.text.lamport_clock.tick();
2422 self.remote_selections.insert(
2423 self.text.replica_id(),
2424 SelectionSet {
2425 selections: selections.clone(),
2426 lamport_timestamp,
2427 line_mode,
2428 cursor_shape,
2429 },
2430 );
2431 self.send_operation(
2432 Operation::UpdateSelections {
2433 selections,
2434 line_mode,
2435 lamport_timestamp,
2436 cursor_shape,
2437 },
2438 true,
2439 cx,
2440 );
2441 self.non_text_state_update_count += 1;
2442 cx.notify();
2443 }
2444
2445 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2446 /// this replica.
2447 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2448 if self
2449 .remote_selections
2450 .get(&self.text.replica_id())
2451 .is_none_or(|set| !set.selections.is_empty())
2452 {
2453 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2454 }
2455 }
2456
2457 pub fn set_agent_selections(
2458 &mut self,
2459 selections: Arc<[Selection<Anchor>]>,
2460 line_mode: bool,
2461 cursor_shape: CursorShape,
2462 cx: &mut Context<Self>,
2463 ) {
2464 let lamport_timestamp = self.text.lamport_clock.tick();
2465 self.remote_selections.insert(
2466 ReplicaId::AGENT,
2467 SelectionSet {
2468 selections,
2469 lamport_timestamp,
2470 line_mode,
2471 cursor_shape,
2472 },
2473 );
2474 self.non_text_state_update_count += 1;
2475 cx.notify();
2476 }
2477
2478 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2479 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2480 }
2481
2482 /// Replaces the buffer's entire text.
2483 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2484 where
2485 T: Into<Arc<str>>,
2486 {
2487 self.autoindent_requests.clear();
2488 self.edit([(0..self.len(), text)], None, cx)
2489 }
2490
2491 /// Appends the given text to the end of the buffer.
2492 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2493 where
2494 T: Into<Arc<str>>,
2495 {
2496 self.edit([(self.len()..self.len(), text)], None, cx)
2497 }
2498
2499 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2500 /// delete, and a string of text to insert at that location.
2501 ///
2502 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2503 /// request for the edited ranges, which will be processed when the buffer finishes
2504 /// parsing.
2505 ///
2506 /// Parsing takes place at the end of a transaction, and may compute synchronously
2507 /// or asynchronously, depending on the changes.
2508 pub fn edit<I, S, T>(
2509 &mut self,
2510 edits_iter: I,
2511 autoindent_mode: Option<AutoindentMode>,
2512 cx: &mut Context<Self>,
2513 ) -> Option<clock::Lamport>
2514 where
2515 I: IntoIterator<Item = (Range<S>, T)>,
2516 S: ToOffset,
2517 T: Into<Arc<str>>,
2518 {
2519 // Skip invalid edits and coalesce contiguous ones.
2520 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2521
2522 for (range, new_text) in edits_iter {
2523 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2524
2525 if range.start > range.end {
2526 mem::swap(&mut range.start, &mut range.end);
2527 }
2528 let new_text = new_text.into();
2529 if !new_text.is_empty() || !range.is_empty() {
2530 if let Some((prev_range, prev_text)) = edits.last_mut()
2531 && prev_range.end >= range.start
2532 {
2533 prev_range.end = cmp::max(prev_range.end, range.end);
2534 *prev_text = format!("{prev_text}{new_text}").into();
2535 } else {
2536 edits.push((range, new_text));
2537 }
2538 }
2539 }
2540 if edits.is_empty() {
2541 return None;
2542 }
2543
2544 self.start_transaction();
2545 self.pending_autoindent.take();
2546 let autoindent_request = autoindent_mode
2547 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2548
2549 let edit_operation = self.text.edit(edits.iter().cloned());
2550 let edit_id = edit_operation.timestamp();
2551
2552 if let Some((before_edit, mode)) = autoindent_request {
2553 let mut delta = 0isize;
2554 let mut previous_setting = None;
2555 let entries: Vec<_> = edits
2556 .into_iter()
2557 .enumerate()
2558 .zip(&edit_operation.as_edit().unwrap().new_text)
2559 .filter(|((_, (range, _)), _)| {
2560 let language = before_edit.language_at(range.start);
2561 let language_id = language.map(|l| l.id());
2562 if let Some((cached_language_id, auto_indent)) = previous_setting
2563 && cached_language_id == language_id
2564 {
2565 auto_indent
2566 } else {
2567 // The auto-indent setting is not present in editorconfigs, hence
2568 // we can avoid passing the file here.
2569 let auto_indent =
2570 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2571 previous_setting = Some((language_id, auto_indent));
2572 auto_indent
2573 }
2574 })
2575 .map(|((ix, (range, _)), new_text)| {
2576 let new_text_length = new_text.len();
2577 let old_start = range.start.to_point(&before_edit);
2578 let new_start = (delta + range.start as isize) as usize;
2579 let range_len = range.end - range.start;
2580 delta += new_text_length as isize - range_len as isize;
2581
2582 // Decide what range of the insertion to auto-indent, and whether
2583 // the first line of the insertion should be considered a newly-inserted line
2584 // or an edit to an existing line.
2585 let mut range_of_insertion_to_indent = 0..new_text_length;
2586 let mut first_line_is_new = true;
2587
2588 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2589 let old_line_end = before_edit.line_len(old_start.row);
2590
2591 if old_start.column > old_line_start {
2592 first_line_is_new = false;
2593 }
2594
2595 if !new_text.contains('\n')
2596 && (old_start.column + (range_len as u32) < old_line_end
2597 || old_line_end == old_line_start)
2598 {
2599 first_line_is_new = false;
2600 }
2601
2602 // When inserting text starting with a newline, avoid auto-indenting the
2603 // previous line.
2604 if new_text.starts_with('\n') {
2605 range_of_insertion_to_indent.start += 1;
2606 first_line_is_new = true;
2607 }
2608
2609 let mut original_indent_column = None;
2610 if let AutoindentMode::Block {
2611 original_indent_columns,
2612 } = &mode
2613 {
2614 original_indent_column = Some(if new_text.starts_with('\n') {
2615 indent_size_for_text(
2616 new_text[range_of_insertion_to_indent.clone()].chars(),
2617 )
2618 .len
2619 } else {
2620 original_indent_columns
2621 .get(ix)
2622 .copied()
2623 .flatten()
2624 .unwrap_or_else(|| {
2625 indent_size_for_text(
2626 new_text[range_of_insertion_to_indent.clone()].chars(),
2627 )
2628 .len
2629 })
2630 });
2631
2632 // Avoid auto-indenting the line after the edit.
2633 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2634 range_of_insertion_to_indent.end -= 1;
2635 }
2636 }
2637
2638 AutoindentRequestEntry {
2639 first_line_is_new,
2640 original_indent_column,
2641 indent_size: before_edit.language_indent_size_at(range.start, cx),
2642 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2643 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2644 }
2645 })
2646 .collect();
2647
2648 if !entries.is_empty() {
2649 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2650 before_edit,
2651 entries,
2652 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2653 ignore_empty_lines: false,
2654 }));
2655 }
2656 }
2657
2658 self.end_transaction(cx);
2659 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2660 Some(edit_id)
2661 }
2662
2663 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2664 self.was_changed();
2665
2666 if self.edits_since::<usize>(old_version).next().is_none() {
2667 return;
2668 }
2669
2670 self.reparse(cx, true);
2671 cx.emit(BufferEvent::Edited);
2672 if was_dirty != self.is_dirty() {
2673 cx.emit(BufferEvent::DirtyChanged);
2674 }
2675 cx.notify();
2676 }
2677
2678 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2679 where
2680 I: IntoIterator<Item = Range<T>>,
2681 T: ToOffset + Copy,
2682 {
2683 let before_edit = self.snapshot();
2684 let entries = ranges
2685 .into_iter()
2686 .map(|range| AutoindentRequestEntry {
2687 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2688 first_line_is_new: true,
2689 indent_size: before_edit.language_indent_size_at(range.start, cx),
2690 original_indent_column: None,
2691 })
2692 .collect();
2693 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2694 before_edit,
2695 entries,
2696 is_block_mode: false,
2697 ignore_empty_lines: true,
2698 }));
2699 self.request_autoindent(cx);
2700 }
2701
2702 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2703 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2704 pub fn insert_empty_line(
2705 &mut self,
2706 position: impl ToPoint,
2707 space_above: bool,
2708 space_below: bool,
2709 cx: &mut Context<Self>,
2710 ) -> Point {
2711 let mut position = position.to_point(self);
2712
2713 self.start_transaction();
2714
2715 self.edit(
2716 [(position..position, "\n")],
2717 Some(AutoindentMode::EachLine),
2718 cx,
2719 );
2720
2721 if position.column > 0 {
2722 position += Point::new(1, 0);
2723 }
2724
2725 if !self.is_line_blank(position.row) {
2726 self.edit(
2727 [(position..position, "\n")],
2728 Some(AutoindentMode::EachLine),
2729 cx,
2730 );
2731 }
2732
2733 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2734 self.edit(
2735 [(position..position, "\n")],
2736 Some(AutoindentMode::EachLine),
2737 cx,
2738 );
2739 position.row += 1;
2740 }
2741
2742 if space_below
2743 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2744 {
2745 self.edit(
2746 [(position..position, "\n")],
2747 Some(AutoindentMode::EachLine),
2748 cx,
2749 );
2750 }
2751
2752 self.end_transaction(cx);
2753
2754 position
2755 }
2756
2757 /// Applies the given remote operations to the buffer.
2758 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2759 self.pending_autoindent.take();
2760 let was_dirty = self.is_dirty();
2761 let old_version = self.version.clone();
2762 let mut deferred_ops = Vec::new();
2763 let buffer_ops = ops
2764 .into_iter()
2765 .filter_map(|op| match op {
2766 Operation::Buffer(op) => Some(op),
2767 _ => {
2768 if self.can_apply_op(&op) {
2769 self.apply_op(op, cx);
2770 } else {
2771 deferred_ops.push(op);
2772 }
2773 None
2774 }
2775 })
2776 .collect::<Vec<_>>();
2777 for operation in buffer_ops.iter() {
2778 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2779 }
2780 self.text.apply_ops(buffer_ops);
2781 self.deferred_ops.insert(deferred_ops);
2782 self.flush_deferred_ops(cx);
2783 self.did_edit(&old_version, was_dirty, cx);
2784 // Notify independently of whether the buffer was edited as the operations could include a
2785 // selection update.
2786 cx.notify();
2787 }
2788
2789 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2790 let mut deferred_ops = Vec::new();
2791 for op in self.deferred_ops.drain().iter().cloned() {
2792 if self.can_apply_op(&op) {
2793 self.apply_op(op, cx);
2794 } else {
2795 deferred_ops.push(op);
2796 }
2797 }
2798 self.deferred_ops.insert(deferred_ops);
2799 }
2800
2801 pub fn has_deferred_ops(&self) -> bool {
2802 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2803 }
2804
2805 fn can_apply_op(&self, operation: &Operation) -> bool {
2806 match operation {
2807 Operation::Buffer(_) => {
2808 unreachable!("buffer operations should never be applied at this layer")
2809 }
2810 Operation::UpdateDiagnostics {
2811 diagnostics: diagnostic_set,
2812 ..
2813 } => diagnostic_set.iter().all(|diagnostic| {
2814 self.text.can_resolve(&diagnostic.range.start)
2815 && self.text.can_resolve(&diagnostic.range.end)
2816 }),
2817 Operation::UpdateSelections { selections, .. } => selections
2818 .iter()
2819 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2820 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2821 }
2822 }
2823
2824 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2825 match operation {
2826 Operation::Buffer(_) => {
2827 unreachable!("buffer operations should never be applied at this layer")
2828 }
2829 Operation::UpdateDiagnostics {
2830 server_id,
2831 diagnostics: diagnostic_set,
2832 lamport_timestamp,
2833 } => {
2834 let snapshot = self.snapshot();
2835 self.apply_diagnostic_update(
2836 server_id,
2837 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2838 lamport_timestamp,
2839 cx,
2840 );
2841 }
2842 Operation::UpdateSelections {
2843 selections,
2844 lamport_timestamp,
2845 line_mode,
2846 cursor_shape,
2847 } => {
2848 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2849 && set.lamport_timestamp > lamport_timestamp
2850 {
2851 return;
2852 }
2853
2854 self.remote_selections.insert(
2855 lamport_timestamp.replica_id,
2856 SelectionSet {
2857 selections,
2858 lamport_timestamp,
2859 line_mode,
2860 cursor_shape,
2861 },
2862 );
2863 self.text.lamport_clock.observe(lamport_timestamp);
2864 self.non_text_state_update_count += 1;
2865 }
2866 Operation::UpdateCompletionTriggers {
2867 triggers,
2868 lamport_timestamp,
2869 server_id,
2870 } => {
2871 if triggers.is_empty() {
2872 self.completion_triggers_per_language_server
2873 .remove(&server_id);
2874 self.completion_triggers = self
2875 .completion_triggers_per_language_server
2876 .values()
2877 .flat_map(|triggers| triggers.iter().cloned())
2878 .collect();
2879 } else {
2880 self.completion_triggers_per_language_server
2881 .insert(server_id, triggers.iter().cloned().collect());
2882 self.completion_triggers.extend(triggers);
2883 }
2884 self.text.lamport_clock.observe(lamport_timestamp);
2885 }
2886 Operation::UpdateLineEnding {
2887 line_ending,
2888 lamport_timestamp,
2889 } => {
2890 self.text.set_line_ending(line_ending);
2891 self.text.lamport_clock.observe(lamport_timestamp);
2892 }
2893 }
2894 }
2895
2896 fn apply_diagnostic_update(
2897 &mut self,
2898 server_id: LanguageServerId,
2899 diagnostics: DiagnosticSet,
2900 lamport_timestamp: clock::Lamport,
2901 cx: &mut Context<Self>,
2902 ) {
2903 if lamport_timestamp > self.diagnostics_timestamp {
2904 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2905 if diagnostics.is_empty() {
2906 if let Ok(ix) = ix {
2907 self.diagnostics.remove(ix);
2908 }
2909 } else {
2910 match ix {
2911 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2912 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2913 };
2914 }
2915 self.diagnostics_timestamp = lamport_timestamp;
2916 self.non_text_state_update_count += 1;
2917 self.text.lamport_clock.observe(lamport_timestamp);
2918 cx.notify();
2919 cx.emit(BufferEvent::DiagnosticsUpdated);
2920 }
2921 }
2922
2923 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2924 self.was_changed();
2925 cx.emit(BufferEvent::Operation {
2926 operation,
2927 is_local,
2928 });
2929 }
2930
2931 /// Removes the selections for a given peer.
2932 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2933 self.remote_selections.remove(&replica_id);
2934 cx.notify();
2935 }
2936
2937 /// Undoes the most recent transaction.
2938 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2939 let was_dirty = self.is_dirty();
2940 let old_version = self.version.clone();
2941
2942 if let Some((transaction_id, operation)) = self.text.undo() {
2943 self.send_operation(Operation::Buffer(operation), true, cx);
2944 self.did_edit(&old_version, was_dirty, cx);
2945 Some(transaction_id)
2946 } else {
2947 None
2948 }
2949 }
2950
2951 /// Manually undoes a specific transaction in the buffer's undo history.
2952 pub fn undo_transaction(
2953 &mut self,
2954 transaction_id: TransactionId,
2955 cx: &mut Context<Self>,
2956 ) -> bool {
2957 let was_dirty = self.is_dirty();
2958 let old_version = self.version.clone();
2959 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2960 self.send_operation(Operation::Buffer(operation), true, cx);
2961 self.did_edit(&old_version, was_dirty, cx);
2962 true
2963 } else {
2964 false
2965 }
2966 }
2967
2968 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2969 pub fn undo_to_transaction(
2970 &mut self,
2971 transaction_id: TransactionId,
2972 cx: &mut Context<Self>,
2973 ) -> bool {
2974 let was_dirty = self.is_dirty();
2975 let old_version = self.version.clone();
2976
2977 let operations = self.text.undo_to_transaction(transaction_id);
2978 let undone = !operations.is_empty();
2979 for operation in operations {
2980 self.send_operation(Operation::Buffer(operation), true, cx);
2981 }
2982 if undone {
2983 self.did_edit(&old_version, was_dirty, cx)
2984 }
2985 undone
2986 }
2987
2988 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2989 let was_dirty = self.is_dirty();
2990 let operation = self.text.undo_operations(counts);
2991 let old_version = self.version.clone();
2992 self.send_operation(Operation::Buffer(operation), true, cx);
2993 self.did_edit(&old_version, was_dirty, cx);
2994 }
2995
2996 /// Manually redoes a specific transaction in the buffer's redo history.
2997 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2998 let was_dirty = self.is_dirty();
2999 let old_version = self.version.clone();
3000
3001 if let Some((transaction_id, operation)) = self.text.redo() {
3002 self.send_operation(Operation::Buffer(operation), true, cx);
3003 self.did_edit(&old_version, was_dirty, cx);
3004 Some(transaction_id)
3005 } else {
3006 None
3007 }
3008 }
3009
3010 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3011 pub fn redo_to_transaction(
3012 &mut self,
3013 transaction_id: TransactionId,
3014 cx: &mut Context<Self>,
3015 ) -> bool {
3016 let was_dirty = self.is_dirty();
3017 let old_version = self.version.clone();
3018
3019 let operations = self.text.redo_to_transaction(transaction_id);
3020 let redone = !operations.is_empty();
3021 for operation in operations {
3022 self.send_operation(Operation::Buffer(operation), true, cx);
3023 }
3024 if redone {
3025 self.did_edit(&old_version, was_dirty, cx)
3026 }
3027 redone
3028 }
3029
3030 /// Override current completion triggers with the user-provided completion triggers.
3031 pub fn set_completion_triggers(
3032 &mut self,
3033 server_id: LanguageServerId,
3034 triggers: BTreeSet<String>,
3035 cx: &mut Context<Self>,
3036 ) {
3037 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3038 if triggers.is_empty() {
3039 self.completion_triggers_per_language_server
3040 .remove(&server_id);
3041 self.completion_triggers = self
3042 .completion_triggers_per_language_server
3043 .values()
3044 .flat_map(|triggers| triggers.iter().cloned())
3045 .collect();
3046 } else {
3047 self.completion_triggers_per_language_server
3048 .insert(server_id, triggers.clone());
3049 self.completion_triggers.extend(triggers.iter().cloned());
3050 }
3051 self.send_operation(
3052 Operation::UpdateCompletionTriggers {
3053 triggers: triggers.into_iter().collect(),
3054 lamport_timestamp: self.completion_triggers_timestamp,
3055 server_id,
3056 },
3057 true,
3058 cx,
3059 );
3060 cx.notify();
3061 }
3062
3063 /// Returns a list of strings which trigger a completion menu for this language.
3064 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3065 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3066 &self.completion_triggers
3067 }
3068
3069 /// Call this directly after performing edits to prevent the preview tab
3070 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3071 /// to return false until there are additional edits.
3072 pub fn refresh_preview(&mut self) {
3073 self.preview_version = self.version.clone();
3074 }
3075
3076 /// Whether we should preserve the preview status of a tab containing this buffer.
3077 pub fn preserve_preview(&self) -> bool {
3078 !self.has_edits_since(&self.preview_version)
3079 }
3080}
3081
3082#[doc(hidden)]
3083#[cfg(any(test, feature = "test-support"))]
3084impl Buffer {
3085 pub fn edit_via_marked_text(
3086 &mut self,
3087 marked_string: &str,
3088 autoindent_mode: Option<AutoindentMode>,
3089 cx: &mut Context<Self>,
3090 ) {
3091 let edits = self.edits_for_marked_text(marked_string);
3092 self.edit(edits, autoindent_mode, cx);
3093 }
3094
3095 pub fn set_group_interval(&mut self, group_interval: Duration) {
3096 self.text.set_group_interval(group_interval);
3097 }
3098
3099 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3100 where
3101 T: rand::Rng,
3102 {
3103 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3104 let mut last_end = None;
3105 for _ in 0..old_range_count {
3106 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3107 break;
3108 }
3109
3110 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3111 let mut range = self.random_byte_range(new_start, rng);
3112 if rng.random_bool(0.2) {
3113 mem::swap(&mut range.start, &mut range.end);
3114 }
3115 last_end = Some(range.end);
3116
3117 let new_text_len = rng.random_range(0..10);
3118 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3119 new_text = new_text.to_uppercase();
3120
3121 edits.push((range, new_text));
3122 }
3123 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3124 self.edit(edits, None, cx);
3125 }
3126
3127 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3128 let was_dirty = self.is_dirty();
3129 let old_version = self.version.clone();
3130
3131 let ops = self.text.randomly_undo_redo(rng);
3132 if !ops.is_empty() {
3133 for op in ops {
3134 self.send_operation(Operation::Buffer(op), true, cx);
3135 self.did_edit(&old_version, was_dirty, cx);
3136 }
3137 }
3138 }
3139}
3140
3141impl EventEmitter<BufferEvent> for Buffer {}
3142
3143impl Deref for Buffer {
3144 type Target = TextBuffer;
3145
3146 fn deref(&self) -> &Self::Target {
3147 &self.text
3148 }
3149}
3150
3151impl BufferSnapshot {
3152 /// Returns [`IndentSize`] for a given line that respects user settings and
3153 /// language preferences.
3154 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3155 indent_size_for_line(self, row)
3156 }
3157
3158 /// Returns [`IndentSize`] for a given position that respects user settings
3159 /// and language preferences.
3160 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3161 let settings = language_settings(
3162 self.language_at(position).map(|l| l.name()),
3163 self.file(),
3164 cx,
3165 );
3166 if settings.hard_tabs {
3167 IndentSize::tab()
3168 } else {
3169 IndentSize::spaces(settings.tab_size.get())
3170 }
3171 }
3172
3173 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3174 /// is passed in as `single_indent_size`.
3175 pub fn suggested_indents(
3176 &self,
3177 rows: impl Iterator<Item = u32>,
3178 single_indent_size: IndentSize,
3179 ) -> BTreeMap<u32, IndentSize> {
3180 let mut result = BTreeMap::new();
3181
3182 for row_range in contiguous_ranges(rows, 10) {
3183 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3184 Some(suggestions) => suggestions,
3185 _ => break,
3186 };
3187
3188 for (row, suggestion) in row_range.zip(suggestions) {
3189 let indent_size = if let Some(suggestion) = suggestion {
3190 result
3191 .get(&suggestion.basis_row)
3192 .copied()
3193 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3194 .with_delta(suggestion.delta, single_indent_size)
3195 } else {
3196 self.indent_size_for_line(row)
3197 };
3198
3199 result.insert(row, indent_size);
3200 }
3201 }
3202
3203 result
3204 }
3205
3206 fn suggest_autoindents(
3207 &self,
3208 row_range: Range<u32>,
3209 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3210 let config = &self.language.as_ref()?.config;
3211 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3212
3213 #[derive(Debug, Clone)]
3214 struct StartPosition {
3215 start: Point,
3216 suffix: SharedString,
3217 }
3218
3219 // Find the suggested indentation ranges based on the syntax tree.
3220 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3221 let end = Point::new(row_range.end, 0);
3222 let range = (start..end).to_offset(&self.text);
3223 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3224 Some(&grammar.indents_config.as_ref()?.query)
3225 });
3226 let indent_configs = matches
3227 .grammars()
3228 .iter()
3229 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3230 .collect::<Vec<_>>();
3231
3232 let mut indent_ranges = Vec::<Range<Point>>::new();
3233 let mut start_positions = Vec::<StartPosition>::new();
3234 let mut outdent_positions = Vec::<Point>::new();
3235 while let Some(mat) = matches.peek() {
3236 let mut start: Option<Point> = None;
3237 let mut end: Option<Point> = None;
3238
3239 let config = indent_configs[mat.grammar_index];
3240 for capture in mat.captures {
3241 if capture.index == config.indent_capture_ix {
3242 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3243 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3244 } else if Some(capture.index) == config.start_capture_ix {
3245 start = Some(Point::from_ts_point(capture.node.end_position()));
3246 } else if Some(capture.index) == config.end_capture_ix {
3247 end = Some(Point::from_ts_point(capture.node.start_position()));
3248 } else if Some(capture.index) == config.outdent_capture_ix {
3249 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3250 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3251 start_positions.push(StartPosition {
3252 start: Point::from_ts_point(capture.node.start_position()),
3253 suffix: suffix.clone(),
3254 });
3255 }
3256 }
3257
3258 matches.advance();
3259 if let Some((start, end)) = start.zip(end) {
3260 if start.row == end.row {
3261 continue;
3262 }
3263 let range = start..end;
3264 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3265 Err(ix) => indent_ranges.insert(ix, range),
3266 Ok(ix) => {
3267 let prev_range = &mut indent_ranges[ix];
3268 prev_range.end = prev_range.end.max(range.end);
3269 }
3270 }
3271 }
3272 }
3273
3274 let mut error_ranges = Vec::<Range<Point>>::new();
3275 let mut matches = self
3276 .syntax
3277 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3278 while let Some(mat) = matches.peek() {
3279 let node = mat.captures[0].node;
3280 let start = Point::from_ts_point(node.start_position());
3281 let end = Point::from_ts_point(node.end_position());
3282 let range = start..end;
3283 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3284 Ok(ix) | Err(ix) => ix,
3285 };
3286 let mut end_ix = ix;
3287 while let Some(existing_range) = error_ranges.get(end_ix) {
3288 if existing_range.end < end {
3289 end_ix += 1;
3290 } else {
3291 break;
3292 }
3293 }
3294 error_ranges.splice(ix..end_ix, [range]);
3295 matches.advance();
3296 }
3297
3298 outdent_positions.sort();
3299 for outdent_position in outdent_positions {
3300 // find the innermost indent range containing this outdent_position
3301 // set its end to the outdent position
3302 if let Some(range_to_truncate) = indent_ranges
3303 .iter_mut()
3304 .filter(|indent_range| indent_range.contains(&outdent_position))
3305 .next_back()
3306 {
3307 range_to_truncate.end = outdent_position;
3308 }
3309 }
3310
3311 start_positions.sort_by_key(|b| b.start);
3312
3313 // Find the suggested indentation increases and decreased based on regexes.
3314 let mut regex_outdent_map = HashMap::default();
3315 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3316 let mut start_positions_iter = start_positions.iter().peekable();
3317
3318 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3319 self.for_each_line(
3320 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3321 ..Point::new(row_range.end, 0),
3322 |row, line| {
3323 if config
3324 .decrease_indent_pattern
3325 .as_ref()
3326 .is_some_and(|regex| regex.is_match(line))
3327 {
3328 indent_change_rows.push((row, Ordering::Less));
3329 }
3330 if config
3331 .increase_indent_pattern
3332 .as_ref()
3333 .is_some_and(|regex| regex.is_match(line))
3334 {
3335 indent_change_rows.push((row + 1, Ordering::Greater));
3336 }
3337 while let Some(pos) = start_positions_iter.peek() {
3338 if pos.start.row < row {
3339 let pos = start_positions_iter.next().unwrap();
3340 last_seen_suffix
3341 .entry(pos.suffix.to_string())
3342 .or_default()
3343 .push(pos.start);
3344 } else {
3345 break;
3346 }
3347 }
3348 for rule in &config.decrease_indent_patterns {
3349 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3350 let row_start_column = self.indent_size_for_line(row).len;
3351 let basis_row = rule
3352 .valid_after
3353 .iter()
3354 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3355 .flatten()
3356 .filter(|start_point| start_point.column <= row_start_column)
3357 .max_by_key(|start_point| start_point.row);
3358 if let Some(outdent_to_row) = basis_row {
3359 regex_outdent_map.insert(row, outdent_to_row.row);
3360 }
3361 break;
3362 }
3363 }
3364 },
3365 );
3366
3367 let mut indent_changes = indent_change_rows.into_iter().peekable();
3368 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3369 prev_non_blank_row.unwrap_or(0)
3370 } else {
3371 row_range.start.saturating_sub(1)
3372 };
3373
3374 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3375 Some(row_range.map(move |row| {
3376 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3377
3378 let mut indent_from_prev_row = false;
3379 let mut outdent_from_prev_row = false;
3380 let mut outdent_to_row = u32::MAX;
3381 let mut from_regex = false;
3382
3383 while let Some((indent_row, delta)) = indent_changes.peek() {
3384 match indent_row.cmp(&row) {
3385 Ordering::Equal => match delta {
3386 Ordering::Less => {
3387 from_regex = true;
3388 outdent_from_prev_row = true
3389 }
3390 Ordering::Greater => {
3391 indent_from_prev_row = true;
3392 from_regex = true
3393 }
3394 _ => {}
3395 },
3396
3397 Ordering::Greater => break,
3398 Ordering::Less => {}
3399 }
3400
3401 indent_changes.next();
3402 }
3403
3404 for range in &indent_ranges {
3405 if range.start.row >= row {
3406 break;
3407 }
3408 if range.start.row == prev_row && range.end > row_start {
3409 indent_from_prev_row = true;
3410 }
3411 if range.end > prev_row_start && range.end <= row_start {
3412 outdent_to_row = outdent_to_row.min(range.start.row);
3413 }
3414 }
3415
3416 if let Some(basis_row) = regex_outdent_map.get(&row) {
3417 indent_from_prev_row = false;
3418 outdent_to_row = *basis_row;
3419 from_regex = true;
3420 }
3421
3422 let within_error = error_ranges
3423 .iter()
3424 .any(|e| e.start.row < row && e.end > row_start);
3425
3426 let suggestion = if outdent_to_row == prev_row
3427 || (outdent_from_prev_row && indent_from_prev_row)
3428 {
3429 Some(IndentSuggestion {
3430 basis_row: prev_row,
3431 delta: Ordering::Equal,
3432 within_error: within_error && !from_regex,
3433 })
3434 } else if indent_from_prev_row {
3435 Some(IndentSuggestion {
3436 basis_row: prev_row,
3437 delta: Ordering::Greater,
3438 within_error: within_error && !from_regex,
3439 })
3440 } else if outdent_to_row < prev_row {
3441 Some(IndentSuggestion {
3442 basis_row: outdent_to_row,
3443 delta: Ordering::Equal,
3444 within_error: within_error && !from_regex,
3445 })
3446 } else if outdent_from_prev_row {
3447 Some(IndentSuggestion {
3448 basis_row: prev_row,
3449 delta: Ordering::Less,
3450 within_error: within_error && !from_regex,
3451 })
3452 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3453 {
3454 Some(IndentSuggestion {
3455 basis_row: prev_row,
3456 delta: Ordering::Equal,
3457 within_error: within_error && !from_regex,
3458 })
3459 } else {
3460 None
3461 };
3462
3463 prev_row = row;
3464 prev_row_start = row_start;
3465 suggestion
3466 }))
3467 }
3468
3469 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3470 while row > 0 {
3471 row -= 1;
3472 if !self.is_line_blank(row) {
3473 return Some(row);
3474 }
3475 }
3476 None
3477 }
3478
3479 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3480 let captures = self.syntax.captures(range, &self.text, |grammar| {
3481 grammar
3482 .highlights_config
3483 .as_ref()
3484 .map(|config| &config.query)
3485 });
3486 let highlight_maps = captures
3487 .grammars()
3488 .iter()
3489 .map(|grammar| grammar.highlight_map())
3490 .collect();
3491 (captures, highlight_maps)
3492 }
3493
3494 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3495 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3496 /// returned in chunks where each chunk has a single syntax highlighting style and
3497 /// diagnostic status.
3498 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3499 let range = range.start.to_offset(self)..range.end.to_offset(self);
3500
3501 let mut syntax = None;
3502 if language_aware {
3503 syntax = Some(self.get_highlights(range.clone()));
3504 }
3505 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3506 let diagnostics = language_aware;
3507 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3508 }
3509
3510 pub fn highlighted_text_for_range<T: ToOffset>(
3511 &self,
3512 range: Range<T>,
3513 override_style: Option<HighlightStyle>,
3514 syntax_theme: &SyntaxTheme,
3515 ) -> HighlightedText {
3516 HighlightedText::from_buffer_range(
3517 range,
3518 &self.text,
3519 &self.syntax,
3520 override_style,
3521 syntax_theme,
3522 )
3523 }
3524
3525 /// Invokes the given callback for each line of text in the given range of the buffer.
3526 /// Uses callback to avoid allocating a string for each line.
3527 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3528 let mut line = String::new();
3529 let mut row = range.start.row;
3530 for chunk in self
3531 .as_rope()
3532 .chunks_in_range(range.to_offset(self))
3533 .chain(["\n"])
3534 {
3535 for (newline_ix, text) in chunk.split('\n').enumerate() {
3536 if newline_ix > 0 {
3537 callback(row, &line);
3538 row += 1;
3539 line.clear();
3540 }
3541 line.push_str(text);
3542 }
3543 }
3544 }
3545
3546 /// Iterates over every [`SyntaxLayer`] in the buffer.
3547 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3548 self.syntax_layers_for_range(0..self.len(), true)
3549 }
3550
3551 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3552 let offset = position.to_offset(self);
3553 self.syntax_layers_for_range(offset..offset, false)
3554 .filter(|l| {
3555 if let Some(ranges) = l.included_sub_ranges {
3556 ranges.iter().any(|range| {
3557 let start = range.start.to_offset(self);
3558 start <= offset && {
3559 let end = range.end.to_offset(self);
3560 offset < end
3561 }
3562 })
3563 } else {
3564 l.node().start_byte() <= offset && l.node().end_byte() > offset
3565 }
3566 })
3567 .last()
3568 }
3569
3570 pub fn syntax_layers_for_range<D: ToOffset>(
3571 &self,
3572 range: Range<D>,
3573 include_hidden: bool,
3574 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3575 self.syntax
3576 .layers_for_range(range, &self.text, include_hidden)
3577 }
3578
3579 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3580 &self,
3581 range: Range<D>,
3582 ) -> Option<SyntaxLayer<'_>> {
3583 let range = range.to_offset(self);
3584 self.syntax
3585 .layers_for_range(range, &self.text, false)
3586 .max_by(|a, b| {
3587 if a.depth != b.depth {
3588 a.depth.cmp(&b.depth)
3589 } else if a.offset.0 != b.offset.0 {
3590 a.offset.0.cmp(&b.offset.0)
3591 } else {
3592 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3593 }
3594 })
3595 }
3596
3597 /// Returns the main [`Language`].
3598 pub fn language(&self) -> Option<&Arc<Language>> {
3599 self.language.as_ref()
3600 }
3601
3602 /// Returns the [`Language`] at the given location.
3603 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3604 self.syntax_layer_at(position)
3605 .map(|info| info.language)
3606 .or(self.language.as_ref())
3607 }
3608
3609 /// Returns the settings for the language at the given location.
3610 pub fn settings_at<'a, D: ToOffset>(
3611 &'a self,
3612 position: D,
3613 cx: &'a App,
3614 ) -> Cow<'a, LanguageSettings> {
3615 language_settings(
3616 self.language_at(position).map(|l| l.name()),
3617 self.file.as_ref(),
3618 cx,
3619 )
3620 }
3621
3622 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3623 CharClassifier::new(self.language_scope_at(point))
3624 }
3625
3626 /// Returns the [`LanguageScope`] at the given location.
3627 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3628 let offset = position.to_offset(self);
3629 let mut scope = None;
3630 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3631
3632 // Use the layer that has the smallest node intersecting the given point.
3633 for layer in self
3634 .syntax
3635 .layers_for_range(offset..offset, &self.text, false)
3636 {
3637 let mut cursor = layer.node().walk();
3638
3639 let mut range = None;
3640 loop {
3641 let child_range = cursor.node().byte_range();
3642 if !child_range.contains(&offset) {
3643 break;
3644 }
3645
3646 range = Some(child_range);
3647 if cursor.goto_first_child_for_byte(offset).is_none() {
3648 break;
3649 }
3650 }
3651
3652 if let Some(range) = range
3653 && smallest_range_and_depth.as_ref().is_none_or(
3654 |(smallest_range, smallest_range_depth)| {
3655 if layer.depth > *smallest_range_depth {
3656 true
3657 } else if layer.depth == *smallest_range_depth {
3658 range.len() < smallest_range.len()
3659 } else {
3660 false
3661 }
3662 },
3663 )
3664 {
3665 smallest_range_and_depth = Some((range, layer.depth));
3666 scope = Some(LanguageScope {
3667 language: layer.language.clone(),
3668 override_id: layer.override_id(offset, &self.text),
3669 });
3670 }
3671 }
3672
3673 scope.or_else(|| {
3674 self.language.clone().map(|language| LanguageScope {
3675 language,
3676 override_id: None,
3677 })
3678 })
3679 }
3680
3681 /// Returns a tuple of the range and character kind of the word
3682 /// surrounding the given position.
3683 pub fn surrounding_word<T: ToOffset>(
3684 &self,
3685 start: T,
3686 scope_context: Option<CharScopeContext>,
3687 ) -> (Range<usize>, Option<CharKind>) {
3688 let mut start = start.to_offset(self);
3689 let mut end = start;
3690 let mut next_chars = self.chars_at(start).take(128).peekable();
3691 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3692
3693 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3694 let word_kind = cmp::max(
3695 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3696 next_chars.peek().copied().map(|c| classifier.kind(c)),
3697 );
3698
3699 for ch in prev_chars {
3700 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3701 start -= ch.len_utf8();
3702 } else {
3703 break;
3704 }
3705 }
3706
3707 for ch in next_chars {
3708 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3709 end += ch.len_utf8();
3710 } else {
3711 break;
3712 }
3713 }
3714
3715 (start..end, word_kind)
3716 }
3717
3718 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3719 /// range. When `require_larger` is true, the node found must be larger than the query range.
3720 ///
3721 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3722 /// be moved to the root of the tree.
3723 fn goto_node_enclosing_range(
3724 cursor: &mut tree_sitter::TreeCursor,
3725 query_range: &Range<usize>,
3726 require_larger: bool,
3727 ) -> bool {
3728 let mut ascending = false;
3729 loop {
3730 let mut range = cursor.node().byte_range();
3731 if query_range.is_empty() {
3732 // When the query range is empty and the current node starts after it, move to the
3733 // previous sibling to find the node the containing node.
3734 if range.start > query_range.start {
3735 cursor.goto_previous_sibling();
3736 range = cursor.node().byte_range();
3737 }
3738 } else {
3739 // When the query range is non-empty and the current node ends exactly at the start,
3740 // move to the next sibling to find a node that extends beyond the start.
3741 if range.end == query_range.start {
3742 cursor.goto_next_sibling();
3743 range = cursor.node().byte_range();
3744 }
3745 }
3746
3747 let encloses = range.contains_inclusive(query_range)
3748 && (!require_larger || range.len() > query_range.len());
3749 if !encloses {
3750 ascending = true;
3751 if !cursor.goto_parent() {
3752 return false;
3753 }
3754 continue;
3755 } else if ascending {
3756 return true;
3757 }
3758
3759 // Descend into the current node.
3760 if cursor
3761 .goto_first_child_for_byte(query_range.start)
3762 .is_none()
3763 {
3764 return true;
3765 }
3766 }
3767 }
3768
3769 pub fn syntax_ancestor<'a, T: ToOffset>(
3770 &'a self,
3771 range: Range<T>,
3772 ) -> Option<tree_sitter::Node<'a>> {
3773 let range = range.start.to_offset(self)..range.end.to_offset(self);
3774 let mut result: Option<tree_sitter::Node<'a>> = None;
3775 for layer in self
3776 .syntax
3777 .layers_for_range(range.clone(), &self.text, true)
3778 {
3779 let mut cursor = layer.node().walk();
3780
3781 // Find the node that both contains the range and is larger than it.
3782 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3783 continue;
3784 }
3785
3786 let left_node = cursor.node();
3787 let mut layer_result = left_node;
3788
3789 // For an empty range, try to find another node immediately to the right of the range.
3790 if left_node.end_byte() == range.start {
3791 let mut right_node = None;
3792 while !cursor.goto_next_sibling() {
3793 if !cursor.goto_parent() {
3794 break;
3795 }
3796 }
3797
3798 while cursor.node().start_byte() == range.start {
3799 right_node = Some(cursor.node());
3800 if !cursor.goto_first_child() {
3801 break;
3802 }
3803 }
3804
3805 // If there is a candidate node on both sides of the (empty) range, then
3806 // decide between the two by favoring a named node over an anonymous token.
3807 // If both nodes are the same in that regard, favor the right one.
3808 if let Some(right_node) = right_node
3809 && (right_node.is_named() || !left_node.is_named())
3810 {
3811 layer_result = right_node;
3812 }
3813 }
3814
3815 if let Some(previous_result) = &result
3816 && previous_result.byte_range().len() < layer_result.byte_range().len()
3817 {
3818 continue;
3819 }
3820 result = Some(layer_result);
3821 }
3822
3823 result
3824 }
3825
3826 /// Find the previous sibling syntax node at the given range.
3827 ///
3828 /// This function locates the syntax node that precedes the node containing
3829 /// the given range. It searches hierarchically by:
3830 /// 1. Finding the node that contains the given range
3831 /// 2. Looking for the previous sibling at the same tree level
3832 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3833 ///
3834 /// Returns `None` if there is no previous sibling at any ancestor level.
3835 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3836 &'a self,
3837 range: Range<T>,
3838 ) -> Option<tree_sitter::Node<'a>> {
3839 let range = range.start.to_offset(self)..range.end.to_offset(self);
3840 let mut result: Option<tree_sitter::Node<'a>> = None;
3841
3842 for layer in self
3843 .syntax
3844 .layers_for_range(range.clone(), &self.text, true)
3845 {
3846 let mut cursor = layer.node().walk();
3847
3848 // Find the node that contains the range
3849 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3850 continue;
3851 }
3852
3853 // Look for the previous sibling, moving up ancestor levels if needed
3854 loop {
3855 if cursor.goto_previous_sibling() {
3856 let layer_result = cursor.node();
3857
3858 if let Some(previous_result) = &result {
3859 if previous_result.byte_range().end < layer_result.byte_range().end {
3860 continue;
3861 }
3862 }
3863 result = Some(layer_result);
3864 break;
3865 }
3866
3867 // No sibling found at this level, try moving up to parent
3868 if !cursor.goto_parent() {
3869 break;
3870 }
3871 }
3872 }
3873
3874 result
3875 }
3876
3877 /// Find the next sibling syntax node at the given range.
3878 ///
3879 /// This function locates the syntax node that follows the node containing
3880 /// the given range. It searches hierarchically by:
3881 /// 1. Finding the node that contains the given range
3882 /// 2. Looking for the next sibling at the same tree level
3883 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3884 ///
3885 /// Returns `None` if there is no next sibling at any ancestor level.
3886 pub fn syntax_next_sibling<'a, T: ToOffset>(
3887 &'a self,
3888 range: Range<T>,
3889 ) -> Option<tree_sitter::Node<'a>> {
3890 let range = range.start.to_offset(self)..range.end.to_offset(self);
3891 let mut result: Option<tree_sitter::Node<'a>> = None;
3892
3893 for layer in self
3894 .syntax
3895 .layers_for_range(range.clone(), &self.text, true)
3896 {
3897 let mut cursor = layer.node().walk();
3898
3899 // Find the node that contains the range
3900 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3901 continue;
3902 }
3903
3904 // Look for the next sibling, moving up ancestor levels if needed
3905 loop {
3906 if cursor.goto_next_sibling() {
3907 let layer_result = cursor.node();
3908
3909 if let Some(previous_result) = &result {
3910 if previous_result.byte_range().start > layer_result.byte_range().start {
3911 continue;
3912 }
3913 }
3914 result = Some(layer_result);
3915 break;
3916 }
3917
3918 // No sibling found at this level, try moving up to parent
3919 if !cursor.goto_parent() {
3920 break;
3921 }
3922 }
3923 }
3924
3925 result
3926 }
3927
3928 /// Returns the root syntax node within the given row
3929 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3930 let start_offset = position.to_offset(self);
3931
3932 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3933
3934 let layer = self
3935 .syntax
3936 .layers_for_range(start_offset..start_offset, &self.text, true)
3937 .next()?;
3938
3939 let mut cursor = layer.node().walk();
3940
3941 // Descend to the first leaf that touches the start of the range.
3942 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3943 if cursor.node().end_byte() == start_offset {
3944 cursor.goto_next_sibling();
3945 }
3946 }
3947
3948 // Ascend to the root node within the same row.
3949 while cursor.goto_parent() {
3950 if cursor.node().start_position().row != row {
3951 break;
3952 }
3953 }
3954
3955 Some(cursor.node())
3956 }
3957
3958 /// Returns the outline for the buffer.
3959 ///
3960 /// This method allows passing an optional [`SyntaxTheme`] to
3961 /// syntax-highlight the returned symbols.
3962 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3963 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3964 }
3965
3966 /// Returns all the symbols that contain the given position.
3967 ///
3968 /// This method allows passing an optional [`SyntaxTheme`] to
3969 /// syntax-highlight the returned symbols.
3970 pub fn symbols_containing<T: ToOffset>(
3971 &self,
3972 position: T,
3973 theme: Option<&SyntaxTheme>,
3974 ) -> Vec<OutlineItem<Anchor>> {
3975 let position = position.to_offset(self);
3976 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3977 let end = self.clip_offset(position + 1, Bias::Right);
3978 let mut items = self.outline_items_containing(start..end, false, theme);
3979 let mut prev_depth = None;
3980 items.retain(|item| {
3981 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3982 prev_depth = Some(item.depth);
3983 result
3984 });
3985 items
3986 }
3987
3988 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3989 let range = range.to_offset(self);
3990 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3991 grammar.outline_config.as_ref().map(|c| &c.query)
3992 });
3993 let configs = matches
3994 .grammars()
3995 .iter()
3996 .map(|g| g.outline_config.as_ref().unwrap())
3997 .collect::<Vec<_>>();
3998
3999 while let Some(mat) = matches.peek() {
4000 let config = &configs[mat.grammar_index];
4001 let containing_item_node = maybe!({
4002 let item_node = mat.captures.iter().find_map(|cap| {
4003 if cap.index == config.item_capture_ix {
4004 Some(cap.node)
4005 } else {
4006 None
4007 }
4008 })?;
4009
4010 let item_byte_range = item_node.byte_range();
4011 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4012 None
4013 } else {
4014 Some(item_node)
4015 }
4016 });
4017
4018 if let Some(item_node) = containing_item_node {
4019 return Some(
4020 Point::from_ts_point(item_node.start_position())
4021 ..Point::from_ts_point(item_node.end_position()),
4022 );
4023 }
4024
4025 matches.advance();
4026 }
4027 None
4028 }
4029
4030 pub fn outline_items_containing<T: ToOffset>(
4031 &self,
4032 range: Range<T>,
4033 include_extra_context: bool,
4034 theme: Option<&SyntaxTheme>,
4035 ) -> Vec<OutlineItem<Anchor>> {
4036 self.outline_items_containing_internal(
4037 range,
4038 include_extra_context,
4039 theme,
4040 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4041 )
4042 }
4043
4044 pub fn outline_items_as_points_containing<T: ToOffset>(
4045 &self,
4046 range: Range<T>,
4047 include_extra_context: bool,
4048 theme: Option<&SyntaxTheme>,
4049 ) -> Vec<OutlineItem<Point>> {
4050 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4051 range
4052 })
4053 }
4054
4055 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4056 &self,
4057 range: Range<T>,
4058 include_extra_context: bool,
4059 theme: Option<&SyntaxTheme>,
4060 ) -> Vec<OutlineItem<usize>> {
4061 self.outline_items_containing_internal(
4062 range,
4063 include_extra_context,
4064 theme,
4065 |buffer, range| range.to_offset(buffer),
4066 )
4067 }
4068
4069 fn outline_items_containing_internal<T: ToOffset, U>(
4070 &self,
4071 range: Range<T>,
4072 include_extra_context: bool,
4073 theme: Option<&SyntaxTheme>,
4074 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4075 ) -> Vec<OutlineItem<U>> {
4076 let range = range.to_offset(self);
4077 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4078 grammar.outline_config.as_ref().map(|c| &c.query)
4079 });
4080
4081 let mut items = Vec::new();
4082 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4083 while let Some(mat) = matches.peek() {
4084 let config = matches.grammars()[mat.grammar_index]
4085 .outline_config
4086 .as_ref()
4087 .unwrap();
4088 if let Some(item) =
4089 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4090 {
4091 items.push(item);
4092 } else if let Some(capture) = mat
4093 .captures
4094 .iter()
4095 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4096 {
4097 let capture_range = capture.node.start_position()..capture.node.end_position();
4098 let mut capture_row_range =
4099 capture_range.start.row as u32..capture_range.end.row as u32;
4100 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4101 {
4102 capture_row_range.end -= 1;
4103 }
4104 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4105 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4106 last_row_range.end = capture_row_range.end;
4107 } else {
4108 annotation_row_ranges.push(capture_row_range);
4109 }
4110 } else {
4111 annotation_row_ranges.push(capture_row_range);
4112 }
4113 }
4114 matches.advance();
4115 }
4116
4117 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4118
4119 // Assign depths based on containment relationships and convert to anchors.
4120 let mut item_ends_stack = Vec::<Point>::new();
4121 let mut anchor_items = Vec::new();
4122 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4123 for item in items {
4124 while let Some(last_end) = item_ends_stack.last().copied() {
4125 if last_end < item.range.end {
4126 item_ends_stack.pop();
4127 } else {
4128 break;
4129 }
4130 }
4131
4132 let mut annotation_row_range = None;
4133 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4134 let row_preceding_item = item.range.start.row.saturating_sub(1);
4135 if next_annotation_row_range.end < row_preceding_item {
4136 annotation_row_ranges.next();
4137 } else {
4138 if next_annotation_row_range.end == row_preceding_item {
4139 annotation_row_range = Some(next_annotation_row_range.clone());
4140 annotation_row_ranges.next();
4141 }
4142 break;
4143 }
4144 }
4145
4146 anchor_items.push(OutlineItem {
4147 depth: item_ends_stack.len(),
4148 range: range_callback(self, item.range.clone()),
4149 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4150 text: item.text,
4151 highlight_ranges: item.highlight_ranges,
4152 name_ranges: item.name_ranges,
4153 body_range: item.body_range.map(|r| range_callback(self, r)),
4154 annotation_range: annotation_row_range.map(|annotation_range| {
4155 let point_range = Point::new(annotation_range.start, 0)
4156 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4157 range_callback(self, point_range)
4158 }),
4159 });
4160 item_ends_stack.push(item.range.end);
4161 }
4162
4163 anchor_items
4164 }
4165
4166 fn next_outline_item(
4167 &self,
4168 config: &OutlineConfig,
4169 mat: &SyntaxMapMatch,
4170 range: &Range<usize>,
4171 include_extra_context: bool,
4172 theme: Option<&SyntaxTheme>,
4173 ) -> Option<OutlineItem<Point>> {
4174 let item_node = mat.captures.iter().find_map(|cap| {
4175 if cap.index == config.item_capture_ix {
4176 Some(cap.node)
4177 } else {
4178 None
4179 }
4180 })?;
4181
4182 let item_byte_range = item_node.byte_range();
4183 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4184 return None;
4185 }
4186 let item_point_range = Point::from_ts_point(item_node.start_position())
4187 ..Point::from_ts_point(item_node.end_position());
4188
4189 let mut open_point = None;
4190 let mut close_point = None;
4191
4192 let mut buffer_ranges = Vec::new();
4193 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4194 let mut range = node.start_byte()..node.end_byte();
4195 let start = node.start_position();
4196 if node.end_position().row > start.row {
4197 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4198 }
4199
4200 if !range.is_empty() {
4201 buffer_ranges.push((range, node_is_name));
4202 }
4203 };
4204
4205 for capture in mat.captures {
4206 if capture.index == config.name_capture_ix {
4207 add_to_buffer_ranges(capture.node, true);
4208 } else if Some(capture.index) == config.context_capture_ix
4209 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4210 {
4211 add_to_buffer_ranges(capture.node, false);
4212 } else {
4213 if Some(capture.index) == config.open_capture_ix {
4214 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4215 } else if Some(capture.index) == config.close_capture_ix {
4216 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4217 }
4218 }
4219 }
4220
4221 if buffer_ranges.is_empty() {
4222 return None;
4223 }
4224 let source_range_for_text =
4225 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4226
4227 let mut text = String::new();
4228 let mut highlight_ranges = Vec::new();
4229 let mut name_ranges = Vec::new();
4230 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4231 let mut last_buffer_range_end = 0;
4232 for (buffer_range, is_name) in buffer_ranges {
4233 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4234 if space_added {
4235 text.push(' ');
4236 }
4237 let before_append_len = text.len();
4238 let mut offset = buffer_range.start;
4239 chunks.seek(buffer_range.clone());
4240 for mut chunk in chunks.by_ref() {
4241 if chunk.text.len() > buffer_range.end - offset {
4242 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4243 offset = buffer_range.end;
4244 } else {
4245 offset += chunk.text.len();
4246 }
4247 let style = chunk
4248 .syntax_highlight_id
4249 .zip(theme)
4250 .and_then(|(highlight, theme)| highlight.style(theme));
4251 if let Some(style) = style {
4252 let start = text.len();
4253 let end = start + chunk.text.len();
4254 highlight_ranges.push((start..end, style));
4255 }
4256 text.push_str(chunk.text);
4257 if offset >= buffer_range.end {
4258 break;
4259 }
4260 }
4261 if is_name {
4262 let after_append_len = text.len();
4263 let start = if space_added && !name_ranges.is_empty() {
4264 before_append_len - 1
4265 } else {
4266 before_append_len
4267 };
4268 name_ranges.push(start..after_append_len);
4269 }
4270 last_buffer_range_end = buffer_range.end;
4271 }
4272
4273 Some(OutlineItem {
4274 depth: 0, // We'll calculate the depth later
4275 range: item_point_range,
4276 source_range_for_text: source_range_for_text.to_point(self),
4277 text,
4278 highlight_ranges,
4279 name_ranges,
4280 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4281 annotation_range: None,
4282 })
4283 }
4284
4285 pub fn function_body_fold_ranges<T: ToOffset>(
4286 &self,
4287 within: Range<T>,
4288 ) -> impl Iterator<Item = Range<usize>> + '_ {
4289 self.text_object_ranges(within, TreeSitterOptions::default())
4290 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4291 }
4292
4293 /// For each grammar in the language, runs the provided
4294 /// [`tree_sitter::Query`] against the given range.
4295 pub fn matches(
4296 &self,
4297 range: Range<usize>,
4298 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4299 ) -> SyntaxMapMatches<'_> {
4300 self.syntax.matches(range, self, query)
4301 }
4302
4303 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4304 /// Hence, may return more bracket pairs than the range contains.
4305 ///
4306 /// Will omit known chunks.
4307 /// The resulting bracket match collections are not ordered.
4308 pub fn fetch_bracket_ranges(
4309 &self,
4310 range: Range<usize>,
4311 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4312 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4313 let mut all_bracket_matches = HashMap::default();
4314
4315 for chunk in self
4316 .tree_sitter_data
4317 .chunks
4318 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4319 {
4320 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4321 continue;
4322 }
4323 let Some(chunk_range) = self.tree_sitter_data.chunks.chunk_range(chunk) else {
4324 continue;
4325 };
4326 let chunk_range = chunk_range.to_offset(&self);
4327
4328 if let Some(cached_brackets) =
4329 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4330 {
4331 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4332 continue;
4333 }
4334
4335 let mut all_brackets = Vec::new();
4336 let mut opens = Vec::new();
4337 let mut color_pairs = Vec::new();
4338
4339 let mut matches = self
4340 .syntax
4341 .matches(chunk_range.clone(), &self.text, |grammar| {
4342 grammar.brackets_config.as_ref().map(|c| &c.query)
4343 });
4344 let configs = matches
4345 .grammars()
4346 .iter()
4347 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4348 .collect::<Vec<_>>();
4349
4350 while let Some(mat) = matches.peek() {
4351 let mut open = None;
4352 let mut close = None;
4353 let syntax_layer_depth = mat.depth;
4354 let config = configs[mat.grammar_index];
4355 let pattern = &config.patterns[mat.pattern_index];
4356 for capture in mat.captures {
4357 if capture.index == config.open_capture_ix {
4358 open = Some(capture.node.byte_range());
4359 } else if capture.index == config.close_capture_ix {
4360 close = Some(capture.node.byte_range());
4361 }
4362 }
4363
4364 matches.advance();
4365
4366 let Some((open_range, close_range)) = open.zip(close) else {
4367 continue;
4368 };
4369
4370 let bracket_range = open_range.start..=close_range.end;
4371 if !bracket_range.overlaps(&chunk_range) {
4372 continue;
4373 }
4374
4375 let index = all_brackets.len();
4376 all_brackets.push(BracketMatch {
4377 open_range: open_range.clone(),
4378 close_range: close_range.clone(),
4379 newline_only: pattern.newline_only,
4380 syntax_layer_depth,
4381 color_index: None,
4382 });
4383
4384 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4385 // bracket will match the entire tag with all text inside.
4386 // For now, avoid highlighting any pair that has more than single char in each bracket.
4387 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4388 let should_color =
4389 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4390 if should_color {
4391 opens.push(open_range.clone());
4392 color_pairs.push((open_range, close_range, index));
4393 }
4394 }
4395
4396 opens.sort_by_key(|r| (r.start, r.end));
4397 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4398 color_pairs.sort_by_key(|(_, close, _)| close.end);
4399
4400 let mut open_stack = Vec::new();
4401 let mut open_index = 0;
4402 for (open, close, index) in color_pairs {
4403 while open_index < opens.len() && opens[open_index].start < close.start {
4404 open_stack.push(opens[open_index].clone());
4405 open_index += 1;
4406 }
4407
4408 if open_stack.last() == Some(&open) {
4409 let depth_index = open_stack.len() - 1;
4410 all_brackets[index].color_index = Some(depth_index);
4411 open_stack.pop();
4412 }
4413 }
4414
4415 all_brackets.sort_by_key(|bracket_match| {
4416 (bracket_match.open_range.start, bracket_match.open_range.end)
4417 });
4418
4419 if let empty_slot @ None =
4420 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4421 {
4422 *empty_slot = Some(all_brackets.clone());
4423 }
4424 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4425 }
4426
4427 all_bracket_matches
4428 }
4429
4430 pub fn all_bracket_ranges(
4431 &self,
4432 range: Range<usize>,
4433 ) -> impl Iterator<Item = BracketMatch<usize>> {
4434 self.fetch_bracket_ranges(range.clone(), None)
4435 .into_values()
4436 .flatten()
4437 .filter(move |bracket_match| {
4438 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4439 bracket_range.overlaps(&range)
4440 })
4441 }
4442
4443 /// Returns bracket range pairs overlapping or adjacent to `range`
4444 pub fn bracket_ranges<T: ToOffset>(
4445 &self,
4446 range: Range<T>,
4447 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4448 // Find bracket pairs that *inclusively* contain the given range.
4449 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4450 self.all_bracket_ranges(range)
4451 .filter(|pair| !pair.newline_only)
4452 }
4453
4454 pub fn debug_variables_query<T: ToOffset>(
4455 &self,
4456 range: Range<T>,
4457 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4458 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4459
4460 let mut matches = self.syntax.matches_with_options(
4461 range.clone(),
4462 &self.text,
4463 TreeSitterOptions::default(),
4464 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4465 );
4466
4467 let configs = matches
4468 .grammars()
4469 .iter()
4470 .map(|grammar| grammar.debug_variables_config.as_ref())
4471 .collect::<Vec<_>>();
4472
4473 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4474
4475 iter::from_fn(move || {
4476 loop {
4477 while let Some(capture) = captures.pop() {
4478 if capture.0.overlaps(&range) {
4479 return Some(capture);
4480 }
4481 }
4482
4483 let mat = matches.peek()?;
4484
4485 let Some(config) = configs[mat.grammar_index].as_ref() else {
4486 matches.advance();
4487 continue;
4488 };
4489
4490 for capture in mat.captures {
4491 let Some(ix) = config
4492 .objects_by_capture_ix
4493 .binary_search_by_key(&capture.index, |e| e.0)
4494 .ok()
4495 else {
4496 continue;
4497 };
4498 let text_object = config.objects_by_capture_ix[ix].1;
4499 let byte_range = capture.node.byte_range();
4500
4501 let mut found = false;
4502 for (range, existing) in captures.iter_mut() {
4503 if existing == &text_object {
4504 range.start = range.start.min(byte_range.start);
4505 range.end = range.end.max(byte_range.end);
4506 found = true;
4507 break;
4508 }
4509 }
4510
4511 if !found {
4512 captures.push((byte_range, text_object));
4513 }
4514 }
4515
4516 matches.advance();
4517 }
4518 })
4519 }
4520
4521 pub fn text_object_ranges<T: ToOffset>(
4522 &self,
4523 range: Range<T>,
4524 options: TreeSitterOptions,
4525 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4526 let range =
4527 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4528
4529 let mut matches =
4530 self.syntax
4531 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4532 grammar.text_object_config.as_ref().map(|c| &c.query)
4533 });
4534
4535 let configs = matches
4536 .grammars()
4537 .iter()
4538 .map(|grammar| grammar.text_object_config.as_ref())
4539 .collect::<Vec<_>>();
4540
4541 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4542
4543 iter::from_fn(move || {
4544 loop {
4545 while let Some(capture) = captures.pop() {
4546 if capture.0.overlaps(&range) {
4547 return Some(capture);
4548 }
4549 }
4550
4551 let mat = matches.peek()?;
4552
4553 let Some(config) = configs[mat.grammar_index].as_ref() else {
4554 matches.advance();
4555 continue;
4556 };
4557
4558 for capture in mat.captures {
4559 let Some(ix) = config
4560 .text_objects_by_capture_ix
4561 .binary_search_by_key(&capture.index, |e| e.0)
4562 .ok()
4563 else {
4564 continue;
4565 };
4566 let text_object = config.text_objects_by_capture_ix[ix].1;
4567 let byte_range = capture.node.byte_range();
4568
4569 let mut found = false;
4570 for (range, existing) in captures.iter_mut() {
4571 if existing == &text_object {
4572 range.start = range.start.min(byte_range.start);
4573 range.end = range.end.max(byte_range.end);
4574 found = true;
4575 break;
4576 }
4577 }
4578
4579 if !found {
4580 captures.push((byte_range, text_object));
4581 }
4582 }
4583
4584 matches.advance();
4585 }
4586 })
4587 }
4588
4589 /// Returns enclosing bracket ranges containing the given range
4590 pub fn enclosing_bracket_ranges<T: ToOffset>(
4591 &self,
4592 range: Range<T>,
4593 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4594 let range = range.start.to_offset(self)..range.end.to_offset(self);
4595
4596 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4597 let max_depth = result
4598 .iter()
4599 .map(|mat| mat.syntax_layer_depth)
4600 .max()
4601 .unwrap_or(0);
4602 result.into_iter().filter(move |pair| {
4603 pair.open_range.start <= range.start
4604 && pair.close_range.end >= range.end
4605 && pair.syntax_layer_depth == max_depth
4606 })
4607 }
4608
4609 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4610 ///
4611 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4612 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4613 &self,
4614 range: Range<T>,
4615 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4616 ) -> Option<(Range<usize>, Range<usize>)> {
4617 let range = range.start.to_offset(self)..range.end.to_offset(self);
4618
4619 // Get the ranges of the innermost pair of brackets.
4620 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4621
4622 for pair in self.enclosing_bracket_ranges(range) {
4623 if let Some(range_filter) = range_filter
4624 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4625 {
4626 continue;
4627 }
4628
4629 let len = pair.close_range.end - pair.open_range.start;
4630
4631 if let Some((existing_open, existing_close)) = &result {
4632 let existing_len = existing_close.end - existing_open.start;
4633 if len > existing_len {
4634 continue;
4635 }
4636 }
4637
4638 result = Some((pair.open_range, pair.close_range));
4639 }
4640
4641 result
4642 }
4643
4644 /// Returns anchor ranges for any matches of the redaction query.
4645 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4646 /// will be run on the relevant section of the buffer.
4647 pub fn redacted_ranges<T: ToOffset>(
4648 &self,
4649 range: Range<T>,
4650 ) -> impl Iterator<Item = Range<usize>> + '_ {
4651 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4652 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4653 grammar
4654 .redactions_config
4655 .as_ref()
4656 .map(|config| &config.query)
4657 });
4658
4659 let configs = syntax_matches
4660 .grammars()
4661 .iter()
4662 .map(|grammar| grammar.redactions_config.as_ref())
4663 .collect::<Vec<_>>();
4664
4665 iter::from_fn(move || {
4666 let redacted_range = syntax_matches
4667 .peek()
4668 .and_then(|mat| {
4669 configs[mat.grammar_index].and_then(|config| {
4670 mat.captures
4671 .iter()
4672 .find(|capture| capture.index == config.redaction_capture_ix)
4673 })
4674 })
4675 .map(|mat| mat.node.byte_range());
4676 syntax_matches.advance();
4677 redacted_range
4678 })
4679 }
4680
4681 pub fn injections_intersecting_range<T: ToOffset>(
4682 &self,
4683 range: Range<T>,
4684 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4685 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4686
4687 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4688 grammar
4689 .injection_config
4690 .as_ref()
4691 .map(|config| &config.query)
4692 });
4693
4694 let configs = syntax_matches
4695 .grammars()
4696 .iter()
4697 .map(|grammar| grammar.injection_config.as_ref())
4698 .collect::<Vec<_>>();
4699
4700 iter::from_fn(move || {
4701 let ranges = syntax_matches.peek().and_then(|mat| {
4702 let config = &configs[mat.grammar_index]?;
4703 let content_capture_range = mat.captures.iter().find_map(|capture| {
4704 if capture.index == config.content_capture_ix {
4705 Some(capture.node.byte_range())
4706 } else {
4707 None
4708 }
4709 })?;
4710 let language = self.language_at(content_capture_range.start)?;
4711 Some((content_capture_range, language))
4712 });
4713 syntax_matches.advance();
4714 ranges
4715 })
4716 }
4717
4718 pub fn runnable_ranges(
4719 &self,
4720 offset_range: Range<usize>,
4721 ) -> impl Iterator<Item = RunnableRange> + '_ {
4722 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4723 grammar.runnable_config.as_ref().map(|config| &config.query)
4724 });
4725
4726 let test_configs = syntax_matches
4727 .grammars()
4728 .iter()
4729 .map(|grammar| grammar.runnable_config.as_ref())
4730 .collect::<Vec<_>>();
4731
4732 iter::from_fn(move || {
4733 loop {
4734 let mat = syntax_matches.peek()?;
4735
4736 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4737 let mut run_range = None;
4738 let full_range = mat.captures.iter().fold(
4739 Range {
4740 start: usize::MAX,
4741 end: 0,
4742 },
4743 |mut acc, next| {
4744 let byte_range = next.node.byte_range();
4745 if acc.start > byte_range.start {
4746 acc.start = byte_range.start;
4747 }
4748 if acc.end < byte_range.end {
4749 acc.end = byte_range.end;
4750 }
4751 acc
4752 },
4753 );
4754 if full_range.start > full_range.end {
4755 // We did not find a full spanning range of this match.
4756 return None;
4757 }
4758 let extra_captures: SmallVec<[_; 1]> =
4759 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4760 test_configs
4761 .extra_captures
4762 .get(capture.index as usize)
4763 .cloned()
4764 .and_then(|tag_name| match tag_name {
4765 RunnableCapture::Named(name) => {
4766 Some((capture.node.byte_range(), name))
4767 }
4768 RunnableCapture::Run => {
4769 let _ = run_range.insert(capture.node.byte_range());
4770 None
4771 }
4772 })
4773 }));
4774 let run_range = run_range?;
4775 let tags = test_configs
4776 .query
4777 .property_settings(mat.pattern_index)
4778 .iter()
4779 .filter_map(|property| {
4780 if *property.key == *"tag" {
4781 property
4782 .value
4783 .as_ref()
4784 .map(|value| RunnableTag(value.to_string().into()))
4785 } else {
4786 None
4787 }
4788 })
4789 .collect();
4790 let extra_captures = extra_captures
4791 .into_iter()
4792 .map(|(range, name)| {
4793 (
4794 name.to_string(),
4795 self.text_for_range(range).collect::<String>(),
4796 )
4797 })
4798 .collect();
4799 // All tags should have the same range.
4800 Some(RunnableRange {
4801 run_range,
4802 full_range,
4803 runnable: Runnable {
4804 tags,
4805 language: mat.language,
4806 buffer: self.remote_id(),
4807 },
4808 extra_captures,
4809 buffer_id: self.remote_id(),
4810 })
4811 });
4812
4813 syntax_matches.advance();
4814 if test_range.is_some() {
4815 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4816 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4817 return test_range;
4818 }
4819 }
4820 })
4821 }
4822
4823 /// Returns selections for remote peers intersecting the given range.
4824 #[allow(clippy::type_complexity)]
4825 pub fn selections_in_range(
4826 &self,
4827 range: Range<Anchor>,
4828 include_local: bool,
4829 ) -> impl Iterator<
4830 Item = (
4831 ReplicaId,
4832 bool,
4833 CursorShape,
4834 impl Iterator<Item = &Selection<Anchor>> + '_,
4835 ),
4836 > + '_ {
4837 self.remote_selections
4838 .iter()
4839 .filter(move |(replica_id, set)| {
4840 (include_local || **replica_id != self.text.replica_id())
4841 && !set.selections.is_empty()
4842 })
4843 .map(move |(replica_id, set)| {
4844 let start_ix = match set.selections.binary_search_by(|probe| {
4845 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4846 }) {
4847 Ok(ix) | Err(ix) => ix,
4848 };
4849 let end_ix = match set.selections.binary_search_by(|probe| {
4850 probe.start.cmp(&range.end, self).then(Ordering::Less)
4851 }) {
4852 Ok(ix) | Err(ix) => ix,
4853 };
4854
4855 (
4856 *replica_id,
4857 set.line_mode,
4858 set.cursor_shape,
4859 set.selections[start_ix..end_ix].iter(),
4860 )
4861 })
4862 }
4863
4864 /// Returns if the buffer contains any diagnostics.
4865 pub fn has_diagnostics(&self) -> bool {
4866 !self.diagnostics.is_empty()
4867 }
4868
4869 /// Returns all the diagnostics intersecting the given range.
4870 pub fn diagnostics_in_range<'a, T, O>(
4871 &'a self,
4872 search_range: Range<T>,
4873 reversed: bool,
4874 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4875 where
4876 T: 'a + Clone + ToOffset,
4877 O: 'a + FromAnchor,
4878 {
4879 let mut iterators: Vec<_> = self
4880 .diagnostics
4881 .iter()
4882 .map(|(_, collection)| {
4883 collection
4884 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4885 .peekable()
4886 })
4887 .collect();
4888
4889 std::iter::from_fn(move || {
4890 let (next_ix, _) = iterators
4891 .iter_mut()
4892 .enumerate()
4893 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4894 .min_by(|(_, a), (_, b)| {
4895 let cmp = a
4896 .range
4897 .start
4898 .cmp(&b.range.start, self)
4899 // when range is equal, sort by diagnostic severity
4900 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4901 // and stabilize order with group_id
4902 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4903 if reversed { cmp.reverse() } else { cmp }
4904 })?;
4905 iterators[next_ix]
4906 .next()
4907 .map(
4908 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4909 diagnostic,
4910 range: FromAnchor::from_anchor(&range.start, self)
4911 ..FromAnchor::from_anchor(&range.end, self),
4912 },
4913 )
4914 })
4915 }
4916
4917 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4918 /// should be used instead.
4919 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4920 &self.diagnostics
4921 }
4922
4923 /// Returns all the diagnostic groups associated with the given
4924 /// language server ID. If no language server ID is provided,
4925 /// all diagnostics groups are returned.
4926 pub fn diagnostic_groups(
4927 &self,
4928 language_server_id: Option<LanguageServerId>,
4929 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4930 let mut groups = Vec::new();
4931
4932 if let Some(language_server_id) = language_server_id {
4933 if let Ok(ix) = self
4934 .diagnostics
4935 .binary_search_by_key(&language_server_id, |e| e.0)
4936 {
4937 self.diagnostics[ix]
4938 .1
4939 .groups(language_server_id, &mut groups, self);
4940 }
4941 } else {
4942 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4943 diagnostics.groups(*language_server_id, &mut groups, self);
4944 }
4945 }
4946
4947 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4948 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4949 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4950 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4951 });
4952
4953 groups
4954 }
4955
4956 /// Returns an iterator over the diagnostics for the given group.
4957 pub fn diagnostic_group<O>(
4958 &self,
4959 group_id: usize,
4960 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4961 where
4962 O: FromAnchor + 'static,
4963 {
4964 self.diagnostics
4965 .iter()
4966 .flat_map(move |(_, set)| set.group(group_id, self))
4967 }
4968
4969 /// An integer version number that accounts for all updates besides
4970 /// the buffer's text itself (which is versioned via a version vector).
4971 pub fn non_text_state_update_count(&self) -> usize {
4972 self.non_text_state_update_count
4973 }
4974
4975 /// An integer version that changes when the buffer's syntax changes.
4976 pub fn syntax_update_count(&self) -> usize {
4977 self.syntax.update_count()
4978 }
4979
4980 /// Returns a snapshot of underlying file.
4981 pub fn file(&self) -> Option<&Arc<dyn File>> {
4982 self.file.as_ref()
4983 }
4984
4985 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4986 if let Some(file) = self.file() {
4987 if file.path().file_name().is_none() || include_root {
4988 Some(file.full_path(cx).to_string_lossy().into_owned())
4989 } else {
4990 Some(file.path().display(file.path_style(cx)).to_string())
4991 }
4992 } else {
4993 None
4994 }
4995 }
4996
4997 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4998 let query_str = query.fuzzy_contents;
4999 if query_str.is_some_and(|query| query.is_empty()) {
5000 return BTreeMap::default();
5001 }
5002
5003 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5004 language,
5005 override_id: None,
5006 }));
5007
5008 let mut query_ix = 0;
5009 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5010 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5011
5012 let mut words = BTreeMap::default();
5013 let mut current_word_start_ix = None;
5014 let mut chunk_ix = query.range.start;
5015 for chunk in self.chunks(query.range, false) {
5016 for (i, c) in chunk.text.char_indices() {
5017 let ix = chunk_ix + i;
5018 if classifier.is_word(c) {
5019 if current_word_start_ix.is_none() {
5020 current_word_start_ix = Some(ix);
5021 }
5022
5023 if let Some(query_chars) = &query_chars
5024 && query_ix < query_len
5025 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5026 {
5027 query_ix += 1;
5028 }
5029 continue;
5030 } else if let Some(word_start) = current_word_start_ix.take()
5031 && query_ix == query_len
5032 {
5033 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5034 let mut word_text = self.text_for_range(word_start..ix).peekable();
5035 let first_char = word_text
5036 .peek()
5037 .and_then(|first_chunk| first_chunk.chars().next());
5038 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5039 if !query.skip_digits
5040 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5041 {
5042 words.insert(word_text.collect(), word_range);
5043 }
5044 }
5045 query_ix = 0;
5046 }
5047 chunk_ix += chunk.text.len();
5048 }
5049
5050 words
5051 }
5052}
5053
5054pub struct WordsQuery<'a> {
5055 /// Only returns words with all chars from the fuzzy string in them.
5056 pub fuzzy_contents: Option<&'a str>,
5057 /// Skips words that start with a digit.
5058 pub skip_digits: bool,
5059 /// Buffer offset range, to look for words.
5060 pub range: Range<usize>,
5061}
5062
5063fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5064 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5065}
5066
5067fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5068 let mut result = IndentSize::spaces(0);
5069 for c in text {
5070 let kind = match c {
5071 ' ' => IndentKind::Space,
5072 '\t' => IndentKind::Tab,
5073 _ => break,
5074 };
5075 if result.len == 0 {
5076 result.kind = kind;
5077 }
5078 result.len += 1;
5079 }
5080 result
5081}
5082
5083impl Clone for BufferSnapshot {
5084 fn clone(&self) -> Self {
5085 Self {
5086 text: self.text.clone(),
5087 syntax: self.syntax.clone(),
5088 file: self.file.clone(),
5089 remote_selections: self.remote_selections.clone(),
5090 diagnostics: self.diagnostics.clone(),
5091 language: self.language.clone(),
5092 tree_sitter_data: self.tree_sitter_data.clone(),
5093 non_text_state_update_count: self.non_text_state_update_count,
5094 }
5095 }
5096}
5097
5098impl Deref for BufferSnapshot {
5099 type Target = text::BufferSnapshot;
5100
5101 fn deref(&self) -> &Self::Target {
5102 &self.text
5103 }
5104}
5105
5106unsafe impl Send for BufferChunks<'_> {}
5107
5108impl<'a> BufferChunks<'a> {
5109 pub(crate) fn new(
5110 text: &'a Rope,
5111 range: Range<usize>,
5112 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5113 diagnostics: bool,
5114 buffer_snapshot: Option<&'a BufferSnapshot>,
5115 ) -> Self {
5116 let mut highlights = None;
5117 if let Some((captures, highlight_maps)) = syntax {
5118 highlights = Some(BufferChunkHighlights {
5119 captures,
5120 next_capture: None,
5121 stack: Default::default(),
5122 highlight_maps,
5123 })
5124 }
5125
5126 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5127 let chunks = text.chunks_in_range(range.clone());
5128
5129 let mut this = BufferChunks {
5130 range,
5131 buffer_snapshot,
5132 chunks,
5133 diagnostic_endpoints,
5134 error_depth: 0,
5135 warning_depth: 0,
5136 information_depth: 0,
5137 hint_depth: 0,
5138 unnecessary_depth: 0,
5139 underline: true,
5140 highlights,
5141 };
5142 this.initialize_diagnostic_endpoints();
5143 this
5144 }
5145
5146 /// Seeks to the given byte offset in the buffer.
5147 pub fn seek(&mut self, range: Range<usize>) {
5148 let old_range = std::mem::replace(&mut self.range, range.clone());
5149 self.chunks.set_range(self.range.clone());
5150 if let Some(highlights) = self.highlights.as_mut() {
5151 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5152 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5153 highlights
5154 .stack
5155 .retain(|(end_offset, _)| *end_offset > range.start);
5156 if let Some(capture) = &highlights.next_capture
5157 && range.start >= capture.node.start_byte()
5158 {
5159 let next_capture_end = capture.node.end_byte();
5160 if range.start < next_capture_end {
5161 highlights.stack.push((
5162 next_capture_end,
5163 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5164 ));
5165 }
5166 highlights.next_capture.take();
5167 }
5168 } else if let Some(snapshot) = self.buffer_snapshot {
5169 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5170 *highlights = BufferChunkHighlights {
5171 captures,
5172 next_capture: None,
5173 stack: Default::default(),
5174 highlight_maps,
5175 };
5176 } else {
5177 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5178 // Seeking such BufferChunks is not supported.
5179 debug_assert!(
5180 false,
5181 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5182 );
5183 }
5184
5185 highlights.captures.set_byte_range(self.range.clone());
5186 self.initialize_diagnostic_endpoints();
5187 }
5188 }
5189
5190 fn initialize_diagnostic_endpoints(&mut self) {
5191 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5192 && let Some(buffer) = self.buffer_snapshot
5193 {
5194 let mut diagnostic_endpoints = Vec::new();
5195 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5196 diagnostic_endpoints.push(DiagnosticEndpoint {
5197 offset: entry.range.start,
5198 is_start: true,
5199 severity: entry.diagnostic.severity,
5200 is_unnecessary: entry.diagnostic.is_unnecessary,
5201 underline: entry.diagnostic.underline,
5202 });
5203 diagnostic_endpoints.push(DiagnosticEndpoint {
5204 offset: entry.range.end,
5205 is_start: false,
5206 severity: entry.diagnostic.severity,
5207 is_unnecessary: entry.diagnostic.is_unnecessary,
5208 underline: entry.diagnostic.underline,
5209 });
5210 }
5211 diagnostic_endpoints
5212 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5213 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5214 self.hint_depth = 0;
5215 self.error_depth = 0;
5216 self.warning_depth = 0;
5217 self.information_depth = 0;
5218 }
5219 }
5220
5221 /// The current byte offset in the buffer.
5222 pub fn offset(&self) -> usize {
5223 self.range.start
5224 }
5225
5226 pub fn range(&self) -> Range<usize> {
5227 self.range.clone()
5228 }
5229
5230 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5231 let depth = match endpoint.severity {
5232 DiagnosticSeverity::ERROR => &mut self.error_depth,
5233 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5234 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5235 DiagnosticSeverity::HINT => &mut self.hint_depth,
5236 _ => return,
5237 };
5238 if endpoint.is_start {
5239 *depth += 1;
5240 } else {
5241 *depth -= 1;
5242 }
5243
5244 if endpoint.is_unnecessary {
5245 if endpoint.is_start {
5246 self.unnecessary_depth += 1;
5247 } else {
5248 self.unnecessary_depth -= 1;
5249 }
5250 }
5251 }
5252
5253 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5254 if self.error_depth > 0 {
5255 Some(DiagnosticSeverity::ERROR)
5256 } else if self.warning_depth > 0 {
5257 Some(DiagnosticSeverity::WARNING)
5258 } else if self.information_depth > 0 {
5259 Some(DiagnosticSeverity::INFORMATION)
5260 } else if self.hint_depth > 0 {
5261 Some(DiagnosticSeverity::HINT)
5262 } else {
5263 None
5264 }
5265 }
5266
5267 fn current_code_is_unnecessary(&self) -> bool {
5268 self.unnecessary_depth > 0
5269 }
5270}
5271
5272impl<'a> Iterator for BufferChunks<'a> {
5273 type Item = Chunk<'a>;
5274
5275 fn next(&mut self) -> Option<Self::Item> {
5276 let mut next_capture_start = usize::MAX;
5277 let mut next_diagnostic_endpoint = usize::MAX;
5278
5279 if let Some(highlights) = self.highlights.as_mut() {
5280 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5281 if *parent_capture_end <= self.range.start {
5282 highlights.stack.pop();
5283 } else {
5284 break;
5285 }
5286 }
5287
5288 if highlights.next_capture.is_none() {
5289 highlights.next_capture = highlights.captures.next();
5290 }
5291
5292 while let Some(capture) = highlights.next_capture.as_ref() {
5293 if self.range.start < capture.node.start_byte() {
5294 next_capture_start = capture.node.start_byte();
5295 break;
5296 } else {
5297 let highlight_id =
5298 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5299 highlights
5300 .stack
5301 .push((capture.node.end_byte(), highlight_id));
5302 highlights.next_capture = highlights.captures.next();
5303 }
5304 }
5305 }
5306
5307 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5308 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5309 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5310 if endpoint.offset <= self.range.start {
5311 self.update_diagnostic_depths(endpoint);
5312 diagnostic_endpoints.next();
5313 self.underline = endpoint.underline;
5314 } else {
5315 next_diagnostic_endpoint = endpoint.offset;
5316 break;
5317 }
5318 }
5319 }
5320 self.diagnostic_endpoints = diagnostic_endpoints;
5321
5322 if let Some(ChunkBitmaps {
5323 text: chunk,
5324 chars: chars_map,
5325 tabs,
5326 }) = self.chunks.peek_with_bitmaps()
5327 {
5328 let chunk_start = self.range.start;
5329 let mut chunk_end = (self.chunks.offset() + chunk.len())
5330 .min(next_capture_start)
5331 .min(next_diagnostic_endpoint);
5332 let mut highlight_id = None;
5333 if let Some(highlights) = self.highlights.as_ref()
5334 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5335 {
5336 chunk_end = chunk_end.min(*parent_capture_end);
5337 highlight_id = Some(*parent_highlight_id);
5338 }
5339 let bit_start = chunk_start - self.chunks.offset();
5340 let bit_end = chunk_end - self.chunks.offset();
5341
5342 let slice = &chunk[bit_start..bit_end];
5343
5344 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5345 let tabs = (tabs >> bit_start) & mask;
5346 let chars = (chars_map >> bit_start) & mask;
5347
5348 self.range.start = chunk_end;
5349 if self.range.start == self.chunks.offset() + chunk.len() {
5350 self.chunks.next().unwrap();
5351 }
5352
5353 Some(Chunk {
5354 text: slice,
5355 syntax_highlight_id: highlight_id,
5356 underline: self.underline,
5357 diagnostic_severity: self.current_diagnostic_severity(),
5358 is_unnecessary: self.current_code_is_unnecessary(),
5359 tabs,
5360 chars,
5361 ..Chunk::default()
5362 })
5363 } else {
5364 None
5365 }
5366 }
5367}
5368
5369impl operation_queue::Operation for Operation {
5370 fn lamport_timestamp(&self) -> clock::Lamport {
5371 match self {
5372 Operation::Buffer(_) => {
5373 unreachable!("buffer operations should never be deferred at this layer")
5374 }
5375 Operation::UpdateDiagnostics {
5376 lamport_timestamp, ..
5377 }
5378 | Operation::UpdateSelections {
5379 lamport_timestamp, ..
5380 }
5381 | Operation::UpdateCompletionTriggers {
5382 lamport_timestamp, ..
5383 }
5384 | Operation::UpdateLineEnding {
5385 lamport_timestamp, ..
5386 } => *lamport_timestamp,
5387 }
5388 }
5389}
5390
5391impl Default for Diagnostic {
5392 fn default() -> Self {
5393 Self {
5394 source: Default::default(),
5395 source_kind: DiagnosticSourceKind::Other,
5396 code: None,
5397 code_description: None,
5398 severity: DiagnosticSeverity::ERROR,
5399 message: Default::default(),
5400 markdown: None,
5401 group_id: 0,
5402 is_primary: false,
5403 is_disk_based: false,
5404 is_unnecessary: false,
5405 underline: true,
5406 data: None,
5407 }
5408 }
5409}
5410
5411impl IndentSize {
5412 /// Returns an [`IndentSize`] representing the given spaces.
5413 pub fn spaces(len: u32) -> Self {
5414 Self {
5415 len,
5416 kind: IndentKind::Space,
5417 }
5418 }
5419
5420 /// Returns an [`IndentSize`] representing a tab.
5421 pub fn tab() -> Self {
5422 Self {
5423 len: 1,
5424 kind: IndentKind::Tab,
5425 }
5426 }
5427
5428 /// An iterator over the characters represented by this [`IndentSize`].
5429 pub fn chars(&self) -> impl Iterator<Item = char> {
5430 iter::repeat(self.char()).take(self.len as usize)
5431 }
5432
5433 /// The character representation of this [`IndentSize`].
5434 pub fn char(&self) -> char {
5435 match self.kind {
5436 IndentKind::Space => ' ',
5437 IndentKind::Tab => '\t',
5438 }
5439 }
5440
5441 /// Consumes the current [`IndentSize`] and returns a new one that has
5442 /// been shrunk or enlarged by the given size along the given direction.
5443 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5444 match direction {
5445 Ordering::Less => {
5446 if self.kind == size.kind && self.len >= size.len {
5447 self.len -= size.len;
5448 }
5449 }
5450 Ordering::Equal => {}
5451 Ordering::Greater => {
5452 if self.len == 0 {
5453 self = size;
5454 } else if self.kind == size.kind {
5455 self.len += size.len;
5456 }
5457 }
5458 }
5459 self
5460 }
5461
5462 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5463 match self.kind {
5464 IndentKind::Space => self.len as usize,
5465 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5466 }
5467 }
5468}
5469
5470#[cfg(any(test, feature = "test-support"))]
5471pub struct TestFile {
5472 pub path: Arc<RelPath>,
5473 pub root_name: String,
5474 pub local_root: Option<PathBuf>,
5475}
5476
5477#[cfg(any(test, feature = "test-support"))]
5478impl File for TestFile {
5479 fn path(&self) -> &Arc<RelPath> {
5480 &self.path
5481 }
5482
5483 fn full_path(&self, _: &gpui::App) -> PathBuf {
5484 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5485 }
5486
5487 fn as_local(&self) -> Option<&dyn LocalFile> {
5488 if self.local_root.is_some() {
5489 Some(self)
5490 } else {
5491 None
5492 }
5493 }
5494
5495 fn disk_state(&self) -> DiskState {
5496 unimplemented!()
5497 }
5498
5499 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5500 self.path().file_name().unwrap_or(self.root_name.as_ref())
5501 }
5502
5503 fn worktree_id(&self, _: &App) -> WorktreeId {
5504 WorktreeId::from_usize(0)
5505 }
5506
5507 fn to_proto(&self, _: &App) -> rpc::proto::File {
5508 unimplemented!()
5509 }
5510
5511 fn is_private(&self) -> bool {
5512 false
5513 }
5514
5515 fn path_style(&self, _cx: &App) -> PathStyle {
5516 PathStyle::local()
5517 }
5518}
5519
5520#[cfg(any(test, feature = "test-support"))]
5521impl LocalFile for TestFile {
5522 fn abs_path(&self, _cx: &App) -> PathBuf {
5523 PathBuf::from(self.local_root.as_ref().unwrap())
5524 .join(&self.root_name)
5525 .join(self.path.as_std_path())
5526 }
5527
5528 fn load(&self, _cx: &App) -> Task<Result<String>> {
5529 unimplemented!()
5530 }
5531
5532 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5533 unimplemented!()
5534 }
5535}
5536
5537pub(crate) fn contiguous_ranges(
5538 values: impl Iterator<Item = u32>,
5539 max_len: usize,
5540) -> impl Iterator<Item = Range<u32>> {
5541 let mut values = values;
5542 let mut current_range: Option<Range<u32>> = None;
5543 std::iter::from_fn(move || {
5544 loop {
5545 if let Some(value) = values.next() {
5546 if let Some(range) = &mut current_range
5547 && value == range.end
5548 && range.len() < max_len
5549 {
5550 range.end += 1;
5551 continue;
5552 }
5553
5554 let prev_range = current_range.clone();
5555 current_range = Some(value..(value + 1));
5556 if prev_range.is_some() {
5557 return prev_range;
5558 }
5559 } else {
5560 return current_range.take();
5561 }
5562 }
5563 })
5564}
5565
5566#[derive(Default, Debug)]
5567pub struct CharClassifier {
5568 scope: Option<LanguageScope>,
5569 scope_context: Option<CharScopeContext>,
5570 ignore_punctuation: bool,
5571}
5572
5573impl CharClassifier {
5574 pub fn new(scope: Option<LanguageScope>) -> Self {
5575 Self {
5576 scope,
5577 scope_context: None,
5578 ignore_punctuation: false,
5579 }
5580 }
5581
5582 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5583 Self {
5584 scope_context,
5585 ..self
5586 }
5587 }
5588
5589 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5590 Self {
5591 ignore_punctuation,
5592 ..self
5593 }
5594 }
5595
5596 pub fn is_whitespace(&self, c: char) -> bool {
5597 self.kind(c) == CharKind::Whitespace
5598 }
5599
5600 pub fn is_word(&self, c: char) -> bool {
5601 self.kind(c) == CharKind::Word
5602 }
5603
5604 pub fn is_punctuation(&self, c: char) -> bool {
5605 self.kind(c) == CharKind::Punctuation
5606 }
5607
5608 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5609 if c.is_alphanumeric() || c == '_' {
5610 return CharKind::Word;
5611 }
5612
5613 if let Some(scope) = &self.scope {
5614 let characters = match self.scope_context {
5615 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5616 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5617 None => scope.word_characters(),
5618 };
5619 if let Some(characters) = characters
5620 && characters.contains(&c)
5621 {
5622 return CharKind::Word;
5623 }
5624 }
5625
5626 if c.is_whitespace() {
5627 return CharKind::Whitespace;
5628 }
5629
5630 if ignore_punctuation {
5631 CharKind::Word
5632 } else {
5633 CharKind::Punctuation
5634 }
5635 }
5636
5637 pub fn kind(&self, c: char) -> CharKind {
5638 self.kind_with(c, self.ignore_punctuation)
5639 }
5640}
5641
5642/// Find all of the ranges of whitespace that occur at the ends of lines
5643/// in the given rope.
5644///
5645/// This could also be done with a regex search, but this implementation
5646/// avoids copying text.
5647pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5648 let mut ranges = Vec::new();
5649
5650 let mut offset = 0;
5651 let mut prev_chunk_trailing_whitespace_range = 0..0;
5652 for chunk in rope.chunks() {
5653 let mut prev_line_trailing_whitespace_range = 0..0;
5654 for (i, line) in chunk.split('\n').enumerate() {
5655 let line_end_offset = offset + line.len();
5656 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5657 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5658
5659 if i == 0 && trimmed_line_len == 0 {
5660 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5661 }
5662 if !prev_line_trailing_whitespace_range.is_empty() {
5663 ranges.push(prev_line_trailing_whitespace_range);
5664 }
5665
5666 offset = line_end_offset + 1;
5667 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5668 }
5669
5670 offset -= 1;
5671 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5672 }
5673
5674 if !prev_chunk_trailing_whitespace_range.is_empty() {
5675 ranges.push(prev_chunk_trailing_whitespace_range);
5676 }
5677
5678 ranges
5679}