1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a mutable replica, but toggled to read-only.
89 Read,
90 /// The buffer is a read-only replica.
91 ReadOnly,
92}
93
94impl Capability {
95 /// Returns `true` if the capability is `ReadWrite`.
96 pub fn editable(self) -> bool {
97 matches!(self, Capability::ReadWrite)
98 }
99}
100
101pub type BufferRow = u32;
102
103/// An in-memory representation of a source code file, including its text,
104/// syntax trees, git status, and diagnostics.
105pub struct Buffer {
106 text: TextBuffer,
107 branch_state: Option<BufferBranchState>,
108 /// Filesystem state, `None` when there is no path.
109 file: Option<Arc<dyn File>>,
110 /// The mtime of the file when this buffer was last loaded from
111 /// or saved to disk.
112 saved_mtime: Option<MTime>,
113 /// The version vector when this buffer was last loaded from
114 /// or saved to disk.
115 saved_version: clock::Global,
116 preview_version: clock::Global,
117 transaction_depth: usize,
118 was_dirty_before_starting_transaction: Option<bool>,
119 reload_task: Option<Task<Result<()>>>,
120 language: Option<Arc<Language>>,
121 autoindent_requests: Vec<Arc<AutoindentRequest>>,
122 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
123 pending_autoindent: Option<Task<()>>,
124 sync_parse_timeout: Duration,
125 syntax_map: Mutex<SyntaxMap>,
126 reparse: Option<Task<()>>,
127 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
128 non_text_state_update_count: usize,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 diagnostics_timestamp: clock::Lamport,
132 completion_triggers: BTreeSet<String>,
133 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
134 completion_triggers_timestamp: clock::Lamport,
135 deferred_ops: OperationQueue<Operation>,
136 capability: Capability,
137 has_conflict: bool,
138 /// Memoize calls to has_changes_since(saved_version).
139 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
140 has_unsaved_edits: Cell<(clock::Global, bool)>,
141 change_bits: Vec<rc::Weak<Cell<bool>>>,
142 _subscriptions: Vec<gpui::Subscription>,
143 tree_sitter_data: Arc<TreeSitterData>,
144 encoding: &'static Encoding,
145 has_bom: bool,
146}
147
148#[derive(Debug)]
149pub struct TreeSitterData {
150 chunks: RowChunks,
151 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
152}
153
154const MAX_ROWS_IN_A_CHUNK: u32 = 50;
155
156impl TreeSitterData {
157 fn clear(&mut self, snapshot: text::BufferSnapshot) {
158 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
159 self.brackets_by_chunks.get_mut().clear();
160 self.brackets_by_chunks
161 .get_mut()
162 .resize(self.chunks.len(), None);
163 }
164
165 fn new(snapshot: text::BufferSnapshot) -> Self {
166 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
167 Self {
168 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
169 chunks,
170 }
171 }
172
173 fn version(&self) -> &clock::Global {
174 self.chunks.version()
175 }
176}
177
178#[derive(Copy, Clone, Debug, PartialEq, Eq)]
179pub enum ParseStatus {
180 Idle,
181 Parsing,
182}
183
184struct BufferBranchState {
185 base_buffer: Entity<Buffer>,
186 merged_operations: Vec<Lamport>,
187}
188
189/// An immutable, cheaply cloneable representation of a fixed
190/// state of a buffer.
191pub struct BufferSnapshot {
192 pub text: text::BufferSnapshot,
193 pub syntax: SyntaxSnapshot,
194 file: Option<Arc<dyn File>>,
195 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
196 remote_selections: TreeMap<ReplicaId, SelectionSet>,
197 language: Option<Arc<Language>>,
198 non_text_state_update_count: usize,
199 tree_sitter_data: Arc<TreeSitterData>,
200 pub capability: Capability,
201}
202
203/// The kind and amount of indentation in a particular line. For now,
204/// assumes that indentation is all the same character.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub struct IndentSize {
207 /// The number of bytes that comprise the indentation.
208 pub len: u32,
209 /// The kind of whitespace used for indentation.
210 pub kind: IndentKind,
211}
212
213/// A whitespace character that's used for indentation.
214#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
215pub enum IndentKind {
216 /// An ASCII space character.
217 #[default]
218 Space,
219 /// An ASCII tab character.
220 Tab,
221}
222
223/// The shape of a selection cursor.
224#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
225pub enum CursorShape {
226 /// A vertical bar
227 #[default]
228 Bar,
229 /// A block that surrounds the following character
230 Block,
231 /// An underline that runs along the following character
232 Underline,
233 /// A box drawn around the following character
234 Hollow,
235}
236
237impl From<settings::CursorShape> for CursorShape {
238 fn from(shape: settings::CursorShape) -> Self {
239 match shape {
240 settings::CursorShape::Bar => CursorShape::Bar,
241 settings::CursorShape::Block => CursorShape::Block,
242 settings::CursorShape::Underline => CursorShape::Underline,
243 settings::CursorShape::Hollow => CursorShape::Hollow,
244 }
245 }
246}
247
248#[derive(Clone, Debug)]
249struct SelectionSet {
250 line_mode: bool,
251 cursor_shape: CursorShape,
252 selections: Arc<[Selection<Anchor>]>,
253 lamport_timestamp: clock::Lamport,
254}
255
256/// A diagnostic associated with a certain range of a buffer.
257#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
258pub struct Diagnostic {
259 /// The name of the service that produced this diagnostic.
260 pub source: Option<String>,
261 /// The ID provided by the dynamic registration that produced this diagnostic.
262 pub registration_id: Option<SharedString>,
263 /// A machine-readable code that identifies this diagnostic.
264 pub code: Option<NumberOrString>,
265 pub code_description: Option<lsp::Uri>,
266 /// Whether this diagnostic is a hint, warning, or error.
267 pub severity: DiagnosticSeverity,
268 /// The human-readable message associated with this diagnostic.
269 pub message: String,
270 /// The human-readable message (in markdown format)
271 pub markdown: Option<String>,
272 /// An id that identifies the group to which this diagnostic belongs.
273 ///
274 /// When a language server produces a diagnostic with
275 /// one or more associated diagnostics, those diagnostics are all
276 /// assigned a single group ID.
277 pub group_id: usize,
278 /// Whether this diagnostic is the primary diagnostic for its group.
279 ///
280 /// In a given group, the primary diagnostic is the top-level diagnostic
281 /// returned by the language server. The non-primary diagnostics are the
282 /// associated diagnostics.
283 pub is_primary: bool,
284 /// Whether this diagnostic is considered to originate from an analysis of
285 /// files on disk, as opposed to any unsaved buffer contents. This is a
286 /// property of a given diagnostic source, and is configured for a given
287 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
288 /// for the language server.
289 pub is_disk_based: bool,
290 /// Whether this diagnostic marks unnecessary code.
291 pub is_unnecessary: bool,
292 /// Quick separation of diagnostics groups based by their source.
293 pub source_kind: DiagnosticSourceKind,
294 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
295 pub data: Option<Value>,
296 /// Whether to underline the corresponding text range in the editor.
297 pub underline: bool,
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
301pub enum DiagnosticSourceKind {
302 Pulled,
303 Pushed,
304 Other,
305}
306
307/// An operation used to synchronize this buffer with its other replicas.
308#[derive(Clone, Debug, PartialEq)]
309pub enum Operation {
310 /// A text operation.
311 Buffer(text::Operation),
312
313 /// An update to the buffer's diagnostics.
314 UpdateDiagnostics {
315 /// The id of the language server that produced the new diagnostics.
316 server_id: LanguageServerId,
317 /// The diagnostics.
318 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 },
322
323 /// An update to the most recent selections in this buffer.
324 UpdateSelections {
325 /// The selections.
326 selections: Arc<[Selection<Anchor>]>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// Whether the selections are in 'line mode'.
330 line_mode: bool,
331 /// The [`CursorShape`] associated with these selections.
332 cursor_shape: CursorShape,
333 },
334
335 /// An update to the characters that should trigger autocompletion
336 /// for this buffer.
337 UpdateCompletionTriggers {
338 /// The characters that trigger autocompletion.
339 triggers: Vec<String>,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 /// The language server ID.
343 server_id: LanguageServerId,
344 },
345
346 /// An update to the line ending type of this buffer.
347 UpdateLineEnding {
348 /// The line ending type.
349 line_ending: LineEnding,
350 /// The buffer's lamport timestamp.
351 lamport_timestamp: clock::Lamport,
352 },
353}
354
355/// An event that occurs in a buffer.
356#[derive(Clone, Debug, PartialEq)]
357pub enum BufferEvent {
358 /// The buffer was changed in a way that must be
359 /// propagated to its other replicas.
360 Operation {
361 operation: Operation,
362 is_local: bool,
363 },
364 /// The buffer was edited.
365 Edited,
366 /// The buffer's `dirty` bit changed.
367 DirtyChanged,
368 /// The buffer was saved.
369 Saved,
370 /// The buffer's file was changed on disk.
371 FileHandleChanged,
372 /// The buffer was reloaded.
373 Reloaded,
374 /// The buffer is in need of a reload
375 ReloadNeeded,
376 /// The buffer's language was changed.
377 /// The boolean indicates whether this buffer did not have a language before, but does now.
378 LanguageChanged(bool),
379 /// The buffer's syntax trees were updated.
380 Reparsed,
381 /// The buffer's diagnostics were updated.
382 DiagnosticsUpdated,
383 /// The buffer gained or lost editing capabilities.
384 CapabilityChanged,
385}
386
387/// The file associated with a buffer.
388pub trait File: Send + Sync + Any {
389 /// Returns the [`LocalFile`] associated with this file, if the
390 /// file is local.
391 fn as_local(&self) -> Option<&dyn LocalFile>;
392
393 /// Returns whether this file is local.
394 fn is_local(&self) -> bool {
395 self.as_local().is_some()
396 }
397
398 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
399 /// only available in some states, such as modification time.
400 fn disk_state(&self) -> DiskState;
401
402 /// Returns the path of this file relative to the worktree's root directory.
403 fn path(&self) -> &Arc<RelPath>;
404
405 /// Returns the path of this file relative to the worktree's parent directory (this means it
406 /// includes the name of the worktree's root folder).
407 fn full_path(&self, cx: &App) -> PathBuf;
408
409 /// Returns the path style of this file.
410 fn path_style(&self, cx: &App) -> PathStyle;
411
412 /// Returns the last component of this handle's absolute path. If this handle refers to the root
413 /// of its worktree, then this method will return the name of the worktree itself.
414 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
415
416 /// Returns the id of the worktree to which this file belongs.
417 ///
418 /// This is needed for looking up project-specific settings.
419 fn worktree_id(&self, cx: &App) -> WorktreeId;
420
421 /// Converts this file into a protobuf message.
422 fn to_proto(&self, cx: &App) -> rpc::proto::File;
423
424 /// Return whether Zed considers this to be a private file.
425 fn is_private(&self) -> bool;
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// Whether or not these lines should be considered brand new, for the
524 /// purpose of auto-indent. When text is not new, its indentation will
525 /// only be adjusted if the suggested indentation level has *changed*
526 /// since the edit was made.
527 first_line_is_new: bool,
528 indent_size: IndentSize,
529 original_indent_column: Option<u32>,
530}
531
532#[derive(Debug)]
533struct IndentSuggestion {
534 basis_row: u32,
535 delta: Ordering,
536 within_error: bool,
537}
538
539struct BufferChunkHighlights<'a> {
540 captures: SyntaxMapCaptures<'a>,
541 next_capture: Option<SyntaxMapCapture<'a>>,
542 stack: Vec<(usize, HighlightId)>,
543 highlight_maps: Vec<HighlightMap>,
544}
545
546/// An iterator that yields chunks of a buffer's text, along with their
547/// syntax highlights and diagnostic status.
548pub struct BufferChunks<'a> {
549 buffer_snapshot: Option<&'a BufferSnapshot>,
550 range: Range<usize>,
551 chunks: text::Chunks<'a>,
552 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
553 error_depth: usize,
554 warning_depth: usize,
555 information_depth: usize,
556 hint_depth: usize,
557 unnecessary_depth: usize,
558 underline: bool,
559 highlights: Option<BufferChunkHighlights<'a>>,
560}
561
562/// A chunk of a buffer's text, along with its syntax highlight and
563/// diagnostic status.
564#[derive(Clone, Debug, Default)]
565pub struct Chunk<'a> {
566 /// The text of the chunk.
567 pub text: &'a str,
568 /// The syntax highlighting style of the chunk.
569 pub syntax_highlight_id: Option<HighlightId>,
570 /// The highlight style that has been applied to this chunk in
571 /// the editor.
572 pub highlight_style: Option<HighlightStyle>,
573 /// The severity of diagnostic associated with this chunk, if any.
574 pub diagnostic_severity: Option<DiagnosticSeverity>,
575 /// A bitset of which characters are tabs in this string.
576 pub tabs: u128,
577 /// Bitmap of character indices in this chunk
578 pub chars: u128,
579 /// Whether this chunk of text is marked as unnecessary.
580 pub is_unnecessary: bool,
581 /// Whether this chunk of text was originally a tab character.
582 pub is_tab: bool,
583 /// Whether this chunk of text was originally an inlay.
584 pub is_inlay: bool,
585 /// Whether to underline the corresponding text range in the editor.
586 pub underline: bool,
587}
588
589/// A set of edits to a given version of a buffer, computed asynchronously.
590#[derive(Debug)]
591pub struct Diff {
592 pub base_version: clock::Global,
593 pub line_ending: LineEnding,
594 pub edits: Vec<(Range<usize>, Arc<str>)>,
595}
596
597#[derive(Debug, Clone, Copy)]
598pub(crate) struct DiagnosticEndpoint {
599 offset: usize,
600 is_start: bool,
601 underline: bool,
602 severity: DiagnosticSeverity,
603 is_unnecessary: bool,
604}
605
606/// A class of characters, used for characterizing a run of text.
607#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
608pub enum CharKind {
609 /// Whitespace.
610 Whitespace,
611 /// Punctuation.
612 Punctuation,
613 /// Word.
614 Word,
615}
616
617/// Context for character classification within a specific scope.
618#[derive(Copy, Clone, Eq, PartialEq, Debug)]
619pub enum CharScopeContext {
620 /// Character classification for completion queries.
621 ///
622 /// This context treats certain characters as word constituents that would
623 /// normally be considered punctuation, such as '-' in Tailwind classes
624 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
625 Completion,
626 /// Character classification for linked edits.
627 ///
628 /// This context handles characters that should be treated as part of
629 /// identifiers during linked editing operations, such as '.' in JSX
630 /// component names like `<Animated.View>`.
631 LinkedEdit,
632}
633
634/// A runnable is a set of data about a region that could be resolved into a task
635pub struct Runnable {
636 pub tags: SmallVec<[RunnableTag; 1]>,
637 pub language: Arc<Language>,
638 pub buffer: BufferId,
639}
640
641#[derive(Default, Clone, Debug)]
642pub struct HighlightedText {
643 pub text: SharedString,
644 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
645}
646
647#[derive(Default, Debug)]
648struct HighlightedTextBuilder {
649 pub text: String,
650 highlights: Vec<(Range<usize>, HighlightStyle)>,
651}
652
653impl HighlightedText {
654 pub fn from_buffer_range<T: ToOffset>(
655 range: Range<T>,
656 snapshot: &text::BufferSnapshot,
657 syntax_snapshot: &SyntaxSnapshot,
658 override_style: Option<HighlightStyle>,
659 syntax_theme: &SyntaxTheme,
660 ) -> Self {
661 let mut highlighted_text = HighlightedTextBuilder::default();
662 highlighted_text.add_text_from_buffer_range(
663 range,
664 snapshot,
665 syntax_snapshot,
666 override_style,
667 syntax_theme,
668 );
669 highlighted_text.build()
670 }
671
672 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
673 gpui::StyledText::new(self.text.clone())
674 .with_default_highlights(default_style, self.highlights.iter().cloned())
675 }
676
677 /// Returns the first line without leading whitespace unless highlighted
678 /// and a boolean indicating if there are more lines after
679 pub fn first_line_preview(self) -> (Self, bool) {
680 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
681 let first_line = &self.text[..newline_ix];
682
683 // Trim leading whitespace, unless an edit starts prior to it.
684 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
685 if let Some((first_highlight_range, _)) = self.highlights.first() {
686 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
687 }
688
689 let preview_text = &first_line[preview_start_ix..];
690 let preview_highlights = self
691 .highlights
692 .into_iter()
693 .skip_while(|(range, _)| range.end <= preview_start_ix)
694 .take_while(|(range, _)| range.start < newline_ix)
695 .filter_map(|(mut range, highlight)| {
696 range.start = range.start.saturating_sub(preview_start_ix);
697 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
698 if range.is_empty() {
699 None
700 } else {
701 Some((range, highlight))
702 }
703 });
704
705 let preview = Self {
706 text: SharedString::new(preview_text),
707 highlights: preview_highlights.collect(),
708 };
709
710 (preview, self.text.len() > newline_ix)
711 }
712}
713
714impl HighlightedTextBuilder {
715 pub fn build(self) -> HighlightedText {
716 HighlightedText {
717 text: self.text.into(),
718 highlights: self.highlights,
719 }
720 }
721
722 pub fn add_text_from_buffer_range<T: ToOffset>(
723 &mut self,
724 range: Range<T>,
725 snapshot: &text::BufferSnapshot,
726 syntax_snapshot: &SyntaxSnapshot,
727 override_style: Option<HighlightStyle>,
728 syntax_theme: &SyntaxTheme,
729 ) {
730 let range = range.to_offset(snapshot);
731 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
732 let start = self.text.len();
733 self.text.push_str(chunk.text);
734 let end = self.text.len();
735
736 if let Some(highlight_style) = chunk
737 .syntax_highlight_id
738 .and_then(|id| id.style(syntax_theme))
739 {
740 let highlight_style = override_style.map_or(highlight_style, |override_style| {
741 highlight_style.highlight(override_style)
742 });
743 self.highlights.push((start..end, highlight_style));
744 } else if let Some(override_style) = override_style {
745 self.highlights.push((start..end, override_style));
746 }
747 }
748 }
749
750 fn highlighted_chunks<'a>(
751 range: Range<usize>,
752 snapshot: &'a text::BufferSnapshot,
753 syntax_snapshot: &'a SyntaxSnapshot,
754 ) -> BufferChunks<'a> {
755 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
756 grammar
757 .highlights_config
758 .as_ref()
759 .map(|config| &config.query)
760 });
761
762 let highlight_maps = captures
763 .grammars()
764 .iter()
765 .map(|grammar| grammar.highlight_map())
766 .collect();
767
768 BufferChunks::new(
769 snapshot.as_rope(),
770 range,
771 Some((captures, highlight_maps)),
772 false,
773 None,
774 )
775 }
776}
777
778#[derive(Clone)]
779pub struct EditPreview {
780 old_snapshot: text::BufferSnapshot,
781 applied_edits_snapshot: text::BufferSnapshot,
782 syntax_snapshot: SyntaxSnapshot,
783}
784
785impl EditPreview {
786 pub fn as_unified_diff(
787 &self,
788 file: Option<&Arc<dyn File>>,
789 edits: &[(Range<Anchor>, impl AsRef<str>)],
790 ) -> Option<String> {
791 let (first, _) = edits.first()?;
792 let (last, _) = edits.last()?;
793
794 let start = first.start.to_point(&self.old_snapshot);
795 let old_end = last.end.to_point(&self.old_snapshot);
796 let new_end = last
797 .end
798 .bias_right(&self.old_snapshot)
799 .to_point(&self.applied_edits_snapshot);
800
801 let start = Point::new(start.row.saturating_sub(3), 0);
802 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
803 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
804
805 let diff_body = unified_diff_with_offsets(
806 &self
807 .old_snapshot
808 .text_for_range(start..old_end)
809 .collect::<String>(),
810 &self
811 .applied_edits_snapshot
812 .text_for_range(start..new_end)
813 .collect::<String>(),
814 start.row,
815 start.row,
816 );
817
818 let path = file.map(|f| f.path().as_unix_str());
819 let header = match path {
820 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
821 None => String::new(),
822 };
823
824 Some(format!("{}{}", header, diff_body))
825 }
826
827 pub fn highlight_edits(
828 &self,
829 current_snapshot: &BufferSnapshot,
830 edits: &[(Range<Anchor>, impl AsRef<str>)],
831 include_deletions: bool,
832 cx: &App,
833 ) -> HighlightedText {
834 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
835 return HighlightedText::default();
836 };
837
838 let mut highlighted_text = HighlightedTextBuilder::default();
839
840 let visible_range_in_preview_snapshot =
841 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
842 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
843
844 let insertion_highlight_style = HighlightStyle {
845 background_color: Some(cx.theme().status().created_background),
846 ..Default::default()
847 };
848 let deletion_highlight_style = HighlightStyle {
849 background_color: Some(cx.theme().status().deleted_background),
850 ..Default::default()
851 };
852 let syntax_theme = cx.theme().syntax();
853
854 for (range, edit_text) in edits {
855 let edit_new_end_in_preview_snapshot = range
856 .end
857 .bias_right(&self.old_snapshot)
858 .to_offset(&self.applied_edits_snapshot);
859 let edit_start_in_preview_snapshot =
860 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
861
862 let unchanged_range_in_preview_snapshot =
863 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
864 if !unchanged_range_in_preview_snapshot.is_empty() {
865 highlighted_text.add_text_from_buffer_range(
866 unchanged_range_in_preview_snapshot,
867 &self.applied_edits_snapshot,
868 &self.syntax_snapshot,
869 None,
870 syntax_theme,
871 );
872 }
873
874 let range_in_current_snapshot = range.to_offset(current_snapshot);
875 if include_deletions && !range_in_current_snapshot.is_empty() {
876 highlighted_text.add_text_from_buffer_range(
877 range_in_current_snapshot,
878 ¤t_snapshot.text,
879 ¤t_snapshot.syntax,
880 Some(deletion_highlight_style),
881 syntax_theme,
882 );
883 }
884
885 if !edit_text.as_ref().is_empty() {
886 highlighted_text.add_text_from_buffer_range(
887 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
888 &self.applied_edits_snapshot,
889 &self.syntax_snapshot,
890 Some(insertion_highlight_style),
891 syntax_theme,
892 );
893 }
894
895 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
896 }
897
898 highlighted_text.add_text_from_buffer_range(
899 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
900 &self.applied_edits_snapshot,
901 &self.syntax_snapshot,
902 None,
903 syntax_theme,
904 );
905
906 highlighted_text.build()
907 }
908
909 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
910 cx.new(|cx| {
911 let mut buffer = Buffer::local_normalized(
912 self.applied_edits_snapshot.as_rope().clone(),
913 self.applied_edits_snapshot.line_ending(),
914 cx,
915 );
916 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
917 buffer
918 })
919 }
920
921 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
922 let (first, _) = edits.first()?;
923 let (last, _) = edits.last()?;
924
925 let start = first
926 .start
927 .bias_left(&self.old_snapshot)
928 .to_point(&self.applied_edits_snapshot);
929 let end = last
930 .end
931 .bias_right(&self.old_snapshot)
932 .to_point(&self.applied_edits_snapshot);
933
934 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
935 let range = Point::new(start.row, 0)
936 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
937
938 Some(range)
939 }
940}
941
942#[derive(Clone, Debug, PartialEq, Eq)]
943pub struct BracketMatch<T> {
944 pub open_range: Range<T>,
945 pub close_range: Range<T>,
946 pub newline_only: bool,
947 pub syntax_layer_depth: usize,
948 pub color_index: Option<usize>,
949}
950
951impl<T> BracketMatch<T> {
952 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
953 (self.open_range, self.close_range)
954 }
955}
956
957impl Buffer {
958 /// Create a new buffer with the given base text.
959 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
960 Self::build(
961 TextBuffer::new(
962 ReplicaId::LOCAL,
963 cx.entity_id().as_non_zero_u64().into(),
964 base_text.into(),
965 ),
966 None,
967 Capability::ReadWrite,
968 )
969 }
970
971 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
972 pub fn local_normalized(
973 base_text_normalized: Rope,
974 line_ending: LineEnding,
975 cx: &Context<Self>,
976 ) -> Self {
977 Self::build(
978 TextBuffer::new_normalized(
979 ReplicaId::LOCAL,
980 cx.entity_id().as_non_zero_u64().into(),
981 line_ending,
982 base_text_normalized,
983 ),
984 None,
985 Capability::ReadWrite,
986 )
987 }
988
989 /// Create a new buffer that is a replica of a remote buffer.
990 pub fn remote(
991 remote_id: BufferId,
992 replica_id: ReplicaId,
993 capability: Capability,
994 base_text: impl Into<String>,
995 ) -> Self {
996 Self::build(
997 TextBuffer::new(replica_id, remote_id, base_text.into()),
998 None,
999 capability,
1000 )
1001 }
1002
1003 /// Create a new buffer that is a replica of a remote buffer, populating its
1004 /// state from the given protobuf message.
1005 pub fn from_proto(
1006 replica_id: ReplicaId,
1007 capability: Capability,
1008 message: proto::BufferState,
1009 file: Option<Arc<dyn File>>,
1010 ) -> Result<Self> {
1011 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1012 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1013 let mut this = Self::build(buffer, file, capability);
1014 this.text.set_line_ending(proto::deserialize_line_ending(
1015 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1016 ));
1017 this.saved_version = proto::deserialize_version(&message.saved_version);
1018 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1019 Ok(this)
1020 }
1021
1022 /// Serialize the buffer's state to a protobuf message.
1023 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1024 proto::BufferState {
1025 id: self.remote_id().into(),
1026 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1027 base_text: self.base_text().to_string(),
1028 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1029 saved_version: proto::serialize_version(&self.saved_version),
1030 saved_mtime: self.saved_mtime.map(|time| time.into()),
1031 }
1032 }
1033
1034 /// Serialize as protobufs all of the changes to the buffer since the given version.
1035 pub fn serialize_ops(
1036 &self,
1037 since: Option<clock::Global>,
1038 cx: &App,
1039 ) -> Task<Vec<proto::Operation>> {
1040 let mut operations = Vec::new();
1041 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1042
1043 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1044 proto::serialize_operation(&Operation::UpdateSelections {
1045 selections: set.selections.clone(),
1046 lamport_timestamp: set.lamport_timestamp,
1047 line_mode: set.line_mode,
1048 cursor_shape: set.cursor_shape,
1049 })
1050 }));
1051
1052 for (server_id, diagnostics) in &self.diagnostics {
1053 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1054 lamport_timestamp: self.diagnostics_timestamp,
1055 server_id: *server_id,
1056 diagnostics: diagnostics.iter().cloned().collect(),
1057 }));
1058 }
1059
1060 for (server_id, completions) in &self.completion_triggers_per_language_server {
1061 operations.push(proto::serialize_operation(
1062 &Operation::UpdateCompletionTriggers {
1063 triggers: completions.iter().cloned().collect(),
1064 lamport_timestamp: self.completion_triggers_timestamp,
1065 server_id: *server_id,
1066 },
1067 ));
1068 }
1069
1070 let text_operations = self.text.operations().clone();
1071 cx.background_spawn(async move {
1072 let since = since.unwrap_or_default();
1073 operations.extend(
1074 text_operations
1075 .iter()
1076 .filter(|(_, op)| !since.observed(op.timestamp()))
1077 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1078 );
1079 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1080 operations
1081 })
1082 }
1083
1084 /// Assign a language to the buffer, returning the buffer.
1085 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1086 self.set_language_async(Some(language), cx);
1087 self
1088 }
1089
1090 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1091 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1092 self.set_language(Some(language), cx);
1093 self
1094 }
1095
1096 /// Returns the [`Capability`] of this buffer.
1097 pub fn capability(&self) -> Capability {
1098 self.capability
1099 }
1100
1101 /// Whether this buffer can only be read.
1102 pub fn read_only(&self) -> bool {
1103 !self.capability.editable()
1104 }
1105
1106 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1107 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1108 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1109 let snapshot = buffer.snapshot();
1110 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1111 let tree_sitter_data = TreeSitterData::new(snapshot);
1112 Self {
1113 saved_mtime,
1114 tree_sitter_data: Arc::new(tree_sitter_data),
1115 saved_version: buffer.version(),
1116 preview_version: buffer.version(),
1117 reload_task: None,
1118 transaction_depth: 0,
1119 was_dirty_before_starting_transaction: None,
1120 has_unsaved_edits: Cell::new((buffer.version(), false)),
1121 text: buffer,
1122 branch_state: None,
1123 file,
1124 capability,
1125 syntax_map,
1126 reparse: None,
1127 non_text_state_update_count: 0,
1128 sync_parse_timeout: Duration::from_millis(1),
1129 parse_status: watch::channel(ParseStatus::Idle),
1130 autoindent_requests: Default::default(),
1131 wait_for_autoindent_txs: Default::default(),
1132 pending_autoindent: Default::default(),
1133 language: None,
1134 remote_selections: Default::default(),
1135 diagnostics: Default::default(),
1136 diagnostics_timestamp: Lamport::MIN,
1137 completion_triggers: Default::default(),
1138 completion_triggers_per_language_server: Default::default(),
1139 completion_triggers_timestamp: Lamport::MIN,
1140 deferred_ops: OperationQueue::new(),
1141 has_conflict: false,
1142 change_bits: Default::default(),
1143 _subscriptions: Vec::new(),
1144 encoding: encoding_rs::UTF_8,
1145 has_bom: false,
1146 }
1147 }
1148
1149 pub fn build_snapshot(
1150 text: Rope,
1151 language: Option<Arc<Language>>,
1152 language_registry: Option<Arc<LanguageRegistry>>,
1153 cx: &mut App,
1154 ) -> impl Future<Output = BufferSnapshot> + use<> {
1155 let entity_id = cx.reserve_entity::<Self>().entity_id();
1156 let buffer_id = entity_id.as_non_zero_u64().into();
1157 async move {
1158 let text =
1159 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1160 .snapshot();
1161 let mut syntax = SyntaxMap::new(&text).snapshot();
1162 if let Some(language) = language.clone() {
1163 let language_registry = language_registry.clone();
1164 syntax.reparse(&text, language_registry, language);
1165 }
1166 let tree_sitter_data = TreeSitterData::new(text.clone());
1167 BufferSnapshot {
1168 text,
1169 syntax,
1170 file: None,
1171 diagnostics: Default::default(),
1172 remote_selections: Default::default(),
1173 tree_sitter_data: Arc::new(tree_sitter_data),
1174 language,
1175 non_text_state_update_count: 0,
1176 capability: Capability::ReadOnly,
1177 }
1178 }
1179 }
1180
1181 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1182 let entity_id = cx.reserve_entity::<Self>().entity_id();
1183 let buffer_id = entity_id.as_non_zero_u64().into();
1184 let text = TextBuffer::new_normalized(
1185 ReplicaId::LOCAL,
1186 buffer_id,
1187 Default::default(),
1188 Rope::new(),
1189 )
1190 .snapshot();
1191 let syntax = SyntaxMap::new(&text).snapshot();
1192 let tree_sitter_data = TreeSitterData::new(text.clone());
1193 BufferSnapshot {
1194 text,
1195 syntax,
1196 tree_sitter_data: Arc::new(tree_sitter_data),
1197 file: None,
1198 diagnostics: Default::default(),
1199 remote_selections: Default::default(),
1200 language: None,
1201 non_text_state_update_count: 0,
1202 capability: Capability::ReadOnly,
1203 }
1204 }
1205
1206 #[cfg(any(test, feature = "test-support"))]
1207 pub fn build_snapshot_sync(
1208 text: Rope,
1209 language: Option<Arc<Language>>,
1210 language_registry: Option<Arc<LanguageRegistry>>,
1211 cx: &mut App,
1212 ) -> BufferSnapshot {
1213 let entity_id = cx.reserve_entity::<Self>().entity_id();
1214 let buffer_id = entity_id.as_non_zero_u64().into();
1215 let text =
1216 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1217 .snapshot();
1218 let mut syntax = SyntaxMap::new(&text).snapshot();
1219 if let Some(language) = language.clone() {
1220 syntax.reparse(&text, language_registry, language);
1221 }
1222 let tree_sitter_data = TreeSitterData::new(text.clone());
1223 BufferSnapshot {
1224 text,
1225 syntax,
1226 tree_sitter_data: Arc::new(tree_sitter_data),
1227 file: None,
1228 diagnostics: Default::default(),
1229 remote_selections: Default::default(),
1230 language,
1231 non_text_state_update_count: 0,
1232 capability: Capability::ReadOnly,
1233 }
1234 }
1235
1236 /// Retrieve a snapshot of the buffer's current state. This is computationally
1237 /// cheap, and allows reading from the buffer on a background thread.
1238 pub fn snapshot(&self) -> BufferSnapshot {
1239 let text = self.text.snapshot();
1240 let mut syntax_map = self.syntax_map.lock();
1241 syntax_map.interpolate(&text);
1242 let syntax = syntax_map.snapshot();
1243
1244 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1245 Arc::new(TreeSitterData::new(text.clone()))
1246 } else {
1247 self.tree_sitter_data.clone()
1248 };
1249
1250 BufferSnapshot {
1251 text,
1252 syntax,
1253 tree_sitter_data,
1254 file: self.file.clone(),
1255 remote_selections: self.remote_selections.clone(),
1256 diagnostics: self.diagnostics.clone(),
1257 language: self.language.clone(),
1258 non_text_state_update_count: self.non_text_state_update_count,
1259 capability: self.capability,
1260 }
1261 }
1262
1263 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1264 let this = cx.entity();
1265 cx.new(|cx| {
1266 let mut branch = Self {
1267 branch_state: Some(BufferBranchState {
1268 base_buffer: this.clone(),
1269 merged_operations: Default::default(),
1270 }),
1271 language: self.language.clone(),
1272 has_conflict: self.has_conflict,
1273 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1274 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1275 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1276 };
1277 if let Some(language_registry) = self.language_registry() {
1278 branch.set_language_registry(language_registry);
1279 }
1280
1281 // Reparse the branch buffer so that we get syntax highlighting immediately.
1282 branch.reparse(cx, true);
1283
1284 branch
1285 })
1286 }
1287
1288 pub fn preview_edits(
1289 &self,
1290 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1291 cx: &App,
1292 ) -> Task<EditPreview> {
1293 let registry = self.language_registry();
1294 let language = self.language().cloned();
1295 let old_snapshot = self.text.snapshot();
1296 let mut branch_buffer = self.text.branch();
1297 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1298 cx.background_spawn(async move {
1299 if !edits.is_empty() {
1300 if let Some(language) = language.clone() {
1301 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1302 }
1303
1304 branch_buffer.edit(edits.iter().cloned());
1305 let snapshot = branch_buffer.snapshot();
1306 syntax_snapshot.interpolate(&snapshot);
1307
1308 if let Some(language) = language {
1309 syntax_snapshot.reparse(&snapshot, registry, language);
1310 }
1311 }
1312 EditPreview {
1313 old_snapshot,
1314 applied_edits_snapshot: branch_buffer.snapshot(),
1315 syntax_snapshot,
1316 }
1317 })
1318 }
1319
1320 /// Applies all of the changes in this buffer that intersect any of the
1321 /// given `ranges` to its base buffer.
1322 ///
1323 /// If `ranges` is empty, then all changes will be applied. This buffer must
1324 /// be a branch buffer to call this method.
1325 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1326 let Some(base_buffer) = self.base_buffer() else {
1327 debug_panic!("not a branch buffer");
1328 return;
1329 };
1330
1331 let mut ranges = if ranges.is_empty() {
1332 &[0..usize::MAX]
1333 } else {
1334 ranges.as_slice()
1335 }
1336 .iter()
1337 .peekable();
1338
1339 let mut edits = Vec::new();
1340 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1341 let mut is_included = false;
1342 while let Some(range) = ranges.peek() {
1343 if range.end < edit.new.start {
1344 ranges.next().unwrap();
1345 } else {
1346 if range.start <= edit.new.end {
1347 is_included = true;
1348 }
1349 break;
1350 }
1351 }
1352
1353 if is_included {
1354 edits.push((
1355 edit.old.clone(),
1356 self.text_for_range(edit.new.clone()).collect::<String>(),
1357 ));
1358 }
1359 }
1360
1361 let operation = base_buffer.update(cx, |base_buffer, cx| {
1362 // cx.emit(BufferEvent::DiffBaseChanged);
1363 base_buffer.edit(edits, None, cx)
1364 });
1365
1366 if let Some(operation) = operation
1367 && let Some(BufferBranchState {
1368 merged_operations, ..
1369 }) = &mut self.branch_state
1370 {
1371 merged_operations.push(operation);
1372 }
1373 }
1374
1375 fn on_base_buffer_event(
1376 &mut self,
1377 _: Entity<Buffer>,
1378 event: &BufferEvent,
1379 cx: &mut Context<Self>,
1380 ) {
1381 let BufferEvent::Operation { operation, .. } = event else {
1382 return;
1383 };
1384 let Some(BufferBranchState {
1385 merged_operations, ..
1386 }) = &mut self.branch_state
1387 else {
1388 return;
1389 };
1390
1391 let mut operation_to_undo = None;
1392 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1393 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1394 {
1395 merged_operations.remove(ix);
1396 operation_to_undo = Some(operation.timestamp);
1397 }
1398
1399 self.apply_ops([operation.clone()], cx);
1400
1401 if let Some(timestamp) = operation_to_undo {
1402 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1403 self.undo_operations(counts, cx);
1404 }
1405 }
1406
1407 #[cfg(test)]
1408 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1409 &self.text
1410 }
1411
1412 /// Retrieve a snapshot of the buffer's raw text, without any
1413 /// language-related state like the syntax tree or diagnostics.
1414 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1415 self.text.snapshot()
1416 }
1417
1418 /// The file associated with the buffer, if any.
1419 pub fn file(&self) -> Option<&Arc<dyn File>> {
1420 self.file.as_ref()
1421 }
1422
1423 /// The version of the buffer that was last saved or reloaded from disk.
1424 pub fn saved_version(&self) -> &clock::Global {
1425 &self.saved_version
1426 }
1427
1428 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1429 pub fn saved_mtime(&self) -> Option<MTime> {
1430 self.saved_mtime
1431 }
1432
1433 /// Returns the character encoding of the buffer's file.
1434 pub fn encoding(&self) -> &'static Encoding {
1435 self.encoding
1436 }
1437
1438 /// Sets the character encoding of the buffer.
1439 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1440 self.encoding = encoding;
1441 }
1442
1443 /// Returns whether the buffer has a Byte Order Mark.
1444 pub fn has_bom(&self) -> bool {
1445 self.has_bom
1446 }
1447
1448 /// Sets whether the buffer has a Byte Order Mark.
1449 pub fn set_has_bom(&mut self, has_bom: bool) {
1450 self.has_bom = has_bom;
1451 }
1452
1453 /// Assign a language to the buffer.
1454 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1455 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1456 }
1457
1458 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1459 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1460 self.set_language_(language, true, cx);
1461 }
1462
1463 fn set_language_(
1464 &mut self,
1465 language: Option<Arc<Language>>,
1466 may_block: bool,
1467 cx: &mut Context<Self>,
1468 ) {
1469 self.non_text_state_update_count += 1;
1470 self.syntax_map.lock().clear(&self.text);
1471 let old_language = std::mem::replace(&mut self.language, language);
1472 self.was_changed();
1473 self.reparse(cx, may_block);
1474 let has_fresh_language =
1475 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1476 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1477 }
1478
1479 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1480 /// other languages if parts of the buffer are written in different languages.
1481 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1482 self.syntax_map
1483 .lock()
1484 .set_language_registry(language_registry);
1485 }
1486
1487 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1488 self.syntax_map.lock().language_registry()
1489 }
1490
1491 /// Assign the line ending type to the buffer.
1492 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1493 self.text.set_line_ending(line_ending);
1494
1495 let lamport_timestamp = self.text.lamport_clock.tick();
1496 self.send_operation(
1497 Operation::UpdateLineEnding {
1498 line_ending,
1499 lamport_timestamp,
1500 },
1501 true,
1502 cx,
1503 );
1504 }
1505
1506 /// Assign the buffer a new [`Capability`].
1507 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1508 if self.capability != capability {
1509 self.capability = capability;
1510 cx.emit(BufferEvent::CapabilityChanged)
1511 }
1512 }
1513
1514 /// This method is called to signal that the buffer has been saved.
1515 pub fn did_save(
1516 &mut self,
1517 version: clock::Global,
1518 mtime: Option<MTime>,
1519 cx: &mut Context<Self>,
1520 ) {
1521 self.saved_version = version.clone();
1522 self.has_unsaved_edits.set((version, false));
1523 self.has_conflict = false;
1524 self.saved_mtime = mtime;
1525 self.was_changed();
1526 cx.emit(BufferEvent::Saved);
1527 cx.notify();
1528 }
1529
1530 /// Reloads the contents of the buffer from disk.
1531 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1532 let (tx, rx) = futures::channel::oneshot::channel();
1533 let prev_version = self.text.version();
1534 self.reload_task = Some(cx.spawn(async move |this, cx| {
1535 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1536 let file = this.file.as_ref()?.as_local()?;
1537 Some((
1538 file.disk_state().mtime(),
1539 file.load_bytes(cx),
1540 this.encoding,
1541 ))
1542 })?
1543 else {
1544 return Ok(());
1545 };
1546
1547 let bytes = load_bytes_task.await?;
1548 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1549 let new_text = cow.into_owned();
1550
1551 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1552 this.update(cx, |this, cx| {
1553 if this.version() == diff.base_version {
1554 this.finalize_last_transaction();
1555 this.apply_diff(diff, cx);
1556 tx.send(this.finalize_last_transaction().cloned()).ok();
1557 this.has_conflict = false;
1558 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1559 } else {
1560 if !diff.edits.is_empty()
1561 || this
1562 .edits_since::<usize>(&diff.base_version)
1563 .next()
1564 .is_some()
1565 {
1566 this.has_conflict = true;
1567 }
1568
1569 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1570 }
1571
1572 this.reload_task.take();
1573 })
1574 }));
1575 rx
1576 }
1577
1578 /// This method is called to signal that the buffer has been reloaded.
1579 pub fn did_reload(
1580 &mut self,
1581 version: clock::Global,
1582 line_ending: LineEnding,
1583 mtime: Option<MTime>,
1584 cx: &mut Context<Self>,
1585 ) {
1586 self.saved_version = version;
1587 self.has_unsaved_edits
1588 .set((self.saved_version.clone(), false));
1589 self.text.set_line_ending(line_ending);
1590 self.saved_mtime = mtime;
1591 cx.emit(BufferEvent::Reloaded);
1592 cx.notify();
1593 }
1594
1595 /// Updates the [`File`] backing this buffer. This should be called when
1596 /// the file has changed or has been deleted.
1597 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1598 let was_dirty = self.is_dirty();
1599 let mut file_changed = false;
1600
1601 if let Some(old_file) = self.file.as_ref() {
1602 if new_file.path() != old_file.path() {
1603 file_changed = true;
1604 }
1605
1606 let old_state = old_file.disk_state();
1607 let new_state = new_file.disk_state();
1608 if old_state != new_state {
1609 file_changed = true;
1610 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1611 cx.emit(BufferEvent::ReloadNeeded)
1612 }
1613 }
1614 } else {
1615 file_changed = true;
1616 };
1617
1618 self.file = Some(new_file);
1619 if file_changed {
1620 self.was_changed();
1621 self.non_text_state_update_count += 1;
1622 if was_dirty != self.is_dirty() {
1623 cx.emit(BufferEvent::DirtyChanged);
1624 }
1625 cx.emit(BufferEvent::FileHandleChanged);
1626 cx.notify();
1627 }
1628 }
1629
1630 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1631 Some(self.branch_state.as_ref()?.base_buffer.clone())
1632 }
1633
1634 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1635 pub fn language(&self) -> Option<&Arc<Language>> {
1636 self.language.as_ref()
1637 }
1638
1639 /// Returns the [`Language`] at the given location.
1640 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1641 let offset = position.to_offset(self);
1642 let mut is_first = true;
1643 let start_anchor = self.anchor_before(offset);
1644 let end_anchor = self.anchor_after(offset);
1645 self.syntax_map
1646 .lock()
1647 .layers_for_range(offset..offset, &self.text, false)
1648 .filter(|layer| {
1649 if is_first {
1650 is_first = false;
1651 return true;
1652 }
1653
1654 layer
1655 .included_sub_ranges
1656 .map(|sub_ranges| {
1657 sub_ranges.iter().any(|sub_range| {
1658 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1659 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1660 !is_before_start && !is_after_end
1661 })
1662 })
1663 .unwrap_or(true)
1664 })
1665 .last()
1666 .map(|info| info.language.clone())
1667 .or_else(|| self.language.clone())
1668 }
1669
1670 /// Returns each [`Language`] for the active syntax layers at the given location.
1671 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1672 let offset = position.to_offset(self);
1673 let mut languages: Vec<Arc<Language>> = self
1674 .syntax_map
1675 .lock()
1676 .layers_for_range(offset..offset, &self.text, false)
1677 .map(|info| info.language.clone())
1678 .collect();
1679
1680 if languages.is_empty()
1681 && let Some(buffer_language) = self.language()
1682 {
1683 languages.push(buffer_language.clone());
1684 }
1685
1686 languages
1687 }
1688
1689 /// An integer version number that accounts for all updates besides
1690 /// the buffer's text itself (which is versioned via a version vector).
1691 pub fn non_text_state_update_count(&self) -> usize {
1692 self.non_text_state_update_count
1693 }
1694
1695 /// Whether the buffer is being parsed in the background.
1696 #[cfg(any(test, feature = "test-support"))]
1697 pub fn is_parsing(&self) -> bool {
1698 self.reparse.is_some()
1699 }
1700
1701 /// Indicates whether the buffer contains any regions that may be
1702 /// written in a language that hasn't been loaded yet.
1703 pub fn contains_unknown_injections(&self) -> bool {
1704 self.syntax_map.lock().contains_unknown_injections()
1705 }
1706
1707 #[cfg(any(test, feature = "test-support"))]
1708 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1709 self.sync_parse_timeout = timeout;
1710 }
1711
1712 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1713 match Arc::get_mut(&mut self.tree_sitter_data) {
1714 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1715 None => {
1716 let tree_sitter_data = TreeSitterData::new(snapshot);
1717 self.tree_sitter_data = Arc::new(tree_sitter_data)
1718 }
1719 }
1720 }
1721
1722 /// Called after an edit to synchronize the buffer's main parse tree with
1723 /// the buffer's new underlying state.
1724 ///
1725 /// Locks the syntax map and interpolates the edits since the last reparse
1726 /// into the foreground syntax tree.
1727 ///
1728 /// Then takes a stable snapshot of the syntax map before unlocking it.
1729 /// The snapshot with the interpolated edits is sent to a background thread,
1730 /// where we ask Tree-sitter to perform an incremental parse.
1731 ///
1732 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1733 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1734 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1735 ///
1736 /// If we time out waiting on the parse, we spawn a second task waiting
1737 /// until the parse does complete and return with the interpolated tree still
1738 /// in the foreground. When the background parse completes, call back into
1739 /// the main thread and assign the foreground parse state.
1740 ///
1741 /// If the buffer or grammar changed since the start of the background parse,
1742 /// initiate an additional reparse recursively. To avoid concurrent parses
1743 /// for the same buffer, we only initiate a new parse if we are not already
1744 /// parsing in the background.
1745 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1746 if self.text.version() != *self.tree_sitter_data.version() {
1747 self.invalidate_tree_sitter_data(self.text.snapshot());
1748 }
1749 if self.reparse.is_some() {
1750 return;
1751 }
1752 let language = if let Some(language) = self.language.clone() {
1753 language
1754 } else {
1755 return;
1756 };
1757
1758 let text = self.text_snapshot();
1759 let parsed_version = self.version();
1760
1761 let mut syntax_map = self.syntax_map.lock();
1762 syntax_map.interpolate(&text);
1763 let language_registry = syntax_map.language_registry();
1764 let mut syntax_snapshot = syntax_map.snapshot();
1765 drop(syntax_map);
1766
1767 let parse_task = cx.background_spawn({
1768 let language = language.clone();
1769 let language_registry = language_registry.clone();
1770 async move {
1771 syntax_snapshot.reparse(&text, language_registry, language);
1772 syntax_snapshot
1773 }
1774 });
1775
1776 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1777 if may_block {
1778 match cx
1779 .background_executor()
1780 .block_with_timeout(self.sync_parse_timeout, parse_task)
1781 {
1782 Ok(new_syntax_snapshot) => {
1783 self.did_finish_parsing(new_syntax_snapshot, cx);
1784 self.reparse = None;
1785 }
1786 Err(parse_task) => {
1787 self.reparse = Some(cx.spawn(async move |this, cx| {
1788 let new_syntax_map = cx.background_spawn(parse_task).await;
1789 this.update(cx, move |this, cx| {
1790 let grammar_changed = || {
1791 this.language.as_ref().is_none_or(|current_language| {
1792 !Arc::ptr_eq(&language, current_language)
1793 })
1794 };
1795 let language_registry_changed = || {
1796 new_syntax_map.contains_unknown_injections()
1797 && language_registry.is_some_and(|registry| {
1798 registry.version()
1799 != new_syntax_map.language_registry_version()
1800 })
1801 };
1802 let parse_again = this.version.changed_since(&parsed_version)
1803 || language_registry_changed()
1804 || grammar_changed();
1805 this.did_finish_parsing(new_syntax_map, cx);
1806 this.reparse = None;
1807 if parse_again {
1808 this.reparse(cx, false);
1809 }
1810 })
1811 .ok();
1812 }));
1813 }
1814 }
1815 } else {
1816 self.reparse = Some(cx.spawn(async move |this, cx| {
1817 let new_syntax_map = cx.background_spawn(parse_task).await;
1818 this.update(cx, move |this, cx| {
1819 let grammar_changed = || {
1820 this.language.as_ref().is_none_or(|current_language| {
1821 !Arc::ptr_eq(&language, current_language)
1822 })
1823 };
1824 let language_registry_changed = || {
1825 new_syntax_map.contains_unknown_injections()
1826 && language_registry.is_some_and(|registry| {
1827 registry.version() != new_syntax_map.language_registry_version()
1828 })
1829 };
1830 let parse_again = this.version.changed_since(&parsed_version)
1831 || language_registry_changed()
1832 || grammar_changed();
1833 this.did_finish_parsing(new_syntax_map, cx);
1834 this.reparse = None;
1835 if parse_again {
1836 this.reparse(cx, false);
1837 }
1838 })
1839 .ok();
1840 }));
1841 }
1842 }
1843
1844 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1845 self.was_changed();
1846 self.non_text_state_update_count += 1;
1847 self.syntax_map.lock().did_parse(syntax_snapshot);
1848 self.request_autoindent(cx);
1849 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1850 self.invalidate_tree_sitter_data(self.text.snapshot());
1851 cx.emit(BufferEvent::Reparsed);
1852 cx.notify();
1853 }
1854
1855 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1856 self.parse_status.1.clone()
1857 }
1858
1859 /// Wait until the buffer is no longer parsing
1860 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1861 let mut parse_status = self.parse_status();
1862 async move {
1863 while *parse_status.borrow() != ParseStatus::Idle {
1864 if parse_status.changed().await.is_err() {
1865 break;
1866 }
1867 }
1868 }
1869 }
1870
1871 /// Assign to the buffer a set of diagnostics created by a given language server.
1872 pub fn update_diagnostics(
1873 &mut self,
1874 server_id: LanguageServerId,
1875 diagnostics: DiagnosticSet,
1876 cx: &mut Context<Self>,
1877 ) {
1878 let lamport_timestamp = self.text.lamport_clock.tick();
1879 let op = Operation::UpdateDiagnostics {
1880 server_id,
1881 diagnostics: diagnostics.iter().cloned().collect(),
1882 lamport_timestamp,
1883 };
1884
1885 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1886 self.send_operation(op, true, cx);
1887 }
1888
1889 pub fn buffer_diagnostics(
1890 &self,
1891 for_server: Option<LanguageServerId>,
1892 ) -> Vec<&DiagnosticEntry<Anchor>> {
1893 match for_server {
1894 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1895 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1896 Err(_) => Vec::new(),
1897 },
1898 None => self
1899 .diagnostics
1900 .iter()
1901 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1902 .collect(),
1903 }
1904 }
1905
1906 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1907 if let Some(indent_sizes) = self.compute_autoindents() {
1908 let indent_sizes = cx.background_spawn(indent_sizes);
1909 match cx
1910 .background_executor()
1911 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1912 {
1913 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1914 Err(indent_sizes) => {
1915 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1916 let indent_sizes = indent_sizes.await;
1917 this.update(cx, |this, cx| {
1918 this.apply_autoindents(indent_sizes, cx);
1919 })
1920 .ok();
1921 }));
1922 }
1923 }
1924 } else {
1925 self.autoindent_requests.clear();
1926 for tx in self.wait_for_autoindent_txs.drain(..) {
1927 tx.send(()).ok();
1928 }
1929 }
1930 }
1931
1932 fn compute_autoindents(
1933 &self,
1934 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1935 let max_rows_between_yields = 100;
1936 let snapshot = self.snapshot();
1937 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1938 return None;
1939 }
1940
1941 let autoindent_requests = self.autoindent_requests.clone();
1942 Some(async move {
1943 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1944 for request in autoindent_requests {
1945 // Resolve each edited range to its row in the current buffer and in the
1946 // buffer before this batch of edits.
1947 let mut row_ranges = Vec::new();
1948 let mut old_to_new_rows = BTreeMap::new();
1949 let mut language_indent_sizes_by_new_row = Vec::new();
1950 for entry in &request.entries {
1951 let position = entry.range.start;
1952 let new_row = position.to_point(&snapshot).row;
1953 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1954 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1955
1956 if !entry.first_line_is_new {
1957 let old_row = position.to_point(&request.before_edit).row;
1958 old_to_new_rows.insert(old_row, new_row);
1959 }
1960 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1961 }
1962
1963 // Build a map containing the suggested indentation for each of the edited lines
1964 // with respect to the state of the buffer before these edits. This map is keyed
1965 // by the rows for these lines in the current state of the buffer.
1966 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1967 let old_edited_ranges =
1968 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1969 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1970 let mut language_indent_size = IndentSize::default();
1971 for old_edited_range in old_edited_ranges {
1972 let suggestions = request
1973 .before_edit
1974 .suggest_autoindents(old_edited_range.clone())
1975 .into_iter()
1976 .flatten();
1977 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1978 if let Some(suggestion) = suggestion {
1979 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1980
1981 // Find the indent size based on the language for this row.
1982 while let Some((row, size)) = language_indent_sizes.peek() {
1983 if *row > new_row {
1984 break;
1985 }
1986 language_indent_size = *size;
1987 language_indent_sizes.next();
1988 }
1989
1990 let suggested_indent = old_to_new_rows
1991 .get(&suggestion.basis_row)
1992 .and_then(|from_row| {
1993 Some(old_suggestions.get(from_row).copied()?.0)
1994 })
1995 .unwrap_or_else(|| {
1996 request
1997 .before_edit
1998 .indent_size_for_line(suggestion.basis_row)
1999 })
2000 .with_delta(suggestion.delta, language_indent_size);
2001 old_suggestions
2002 .insert(new_row, (suggested_indent, suggestion.within_error));
2003 }
2004 }
2005 yield_now().await;
2006 }
2007
2008 // Compute new suggestions for each line, but only include them in the result
2009 // if they differ from the old suggestion for that line.
2010 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
2011 let mut language_indent_size = IndentSize::default();
2012 for (row_range, original_indent_column) in row_ranges {
2013 let new_edited_row_range = if request.is_block_mode {
2014 row_range.start..row_range.start + 1
2015 } else {
2016 row_range.clone()
2017 };
2018
2019 let suggestions = snapshot
2020 .suggest_autoindents(new_edited_row_range.clone())
2021 .into_iter()
2022 .flatten();
2023 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2024 if let Some(suggestion) = suggestion {
2025 // Find the indent size based on the language for this row.
2026 while let Some((row, size)) = language_indent_sizes.peek() {
2027 if *row > new_row {
2028 break;
2029 }
2030 language_indent_size = *size;
2031 language_indent_sizes.next();
2032 }
2033
2034 let suggested_indent = indent_sizes
2035 .get(&suggestion.basis_row)
2036 .copied()
2037 .map(|e| e.0)
2038 .unwrap_or_else(|| {
2039 snapshot.indent_size_for_line(suggestion.basis_row)
2040 })
2041 .with_delta(suggestion.delta, language_indent_size);
2042
2043 if old_suggestions.get(&new_row).is_none_or(
2044 |(old_indentation, was_within_error)| {
2045 suggested_indent != *old_indentation
2046 && (!suggestion.within_error || *was_within_error)
2047 },
2048 ) {
2049 indent_sizes.insert(
2050 new_row,
2051 (suggested_indent, request.ignore_empty_lines),
2052 );
2053 }
2054 }
2055 }
2056
2057 if let (true, Some(original_indent_column)) =
2058 (request.is_block_mode, original_indent_column)
2059 {
2060 let new_indent =
2061 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2062 *indent
2063 } else {
2064 snapshot.indent_size_for_line(row_range.start)
2065 };
2066 let delta = new_indent.len as i64 - original_indent_column as i64;
2067 if delta != 0 {
2068 for row in row_range.skip(1) {
2069 indent_sizes.entry(row).or_insert_with(|| {
2070 let mut size = snapshot.indent_size_for_line(row);
2071 if size.kind == new_indent.kind {
2072 match delta.cmp(&0) {
2073 Ordering::Greater => size.len += delta as u32,
2074 Ordering::Less => {
2075 size.len = size.len.saturating_sub(-delta as u32)
2076 }
2077 Ordering::Equal => {}
2078 }
2079 }
2080 (size, request.ignore_empty_lines)
2081 });
2082 }
2083 }
2084 }
2085
2086 yield_now().await;
2087 }
2088 }
2089
2090 indent_sizes
2091 .into_iter()
2092 .filter_map(|(row, (indent, ignore_empty_lines))| {
2093 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2094 None
2095 } else {
2096 Some((row, indent))
2097 }
2098 })
2099 .collect()
2100 })
2101 }
2102
2103 fn apply_autoindents(
2104 &mut self,
2105 indent_sizes: BTreeMap<u32, IndentSize>,
2106 cx: &mut Context<Self>,
2107 ) {
2108 self.autoindent_requests.clear();
2109 for tx in self.wait_for_autoindent_txs.drain(..) {
2110 tx.send(()).ok();
2111 }
2112
2113 let edits: Vec<_> = indent_sizes
2114 .into_iter()
2115 .filter_map(|(row, indent_size)| {
2116 let current_size = indent_size_for_line(self, row);
2117 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2118 })
2119 .collect();
2120
2121 let preserve_preview = self.preserve_preview();
2122 self.edit(edits, None, cx);
2123 if preserve_preview {
2124 self.refresh_preview();
2125 }
2126 }
2127
2128 /// Create a minimal edit that will cause the given row to be indented
2129 /// with the given size. After applying this edit, the length of the line
2130 /// will always be at least `new_size.len`.
2131 pub fn edit_for_indent_size_adjustment(
2132 row: u32,
2133 current_size: IndentSize,
2134 new_size: IndentSize,
2135 ) -> Option<(Range<Point>, String)> {
2136 if new_size.kind == current_size.kind {
2137 match new_size.len.cmp(¤t_size.len) {
2138 Ordering::Greater => {
2139 let point = Point::new(row, 0);
2140 Some((
2141 point..point,
2142 iter::repeat(new_size.char())
2143 .take((new_size.len - current_size.len) as usize)
2144 .collect::<String>(),
2145 ))
2146 }
2147
2148 Ordering::Less => Some((
2149 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2150 String::new(),
2151 )),
2152
2153 Ordering::Equal => None,
2154 }
2155 } else {
2156 Some((
2157 Point::new(row, 0)..Point::new(row, current_size.len),
2158 iter::repeat(new_size.char())
2159 .take(new_size.len as usize)
2160 .collect::<String>(),
2161 ))
2162 }
2163 }
2164
2165 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2166 /// and the given new text.
2167 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2168 let old_text = self.as_rope().clone();
2169 let base_version = self.version();
2170 cx.background_executor()
2171 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2172 let old_text = old_text.to_string();
2173 let line_ending = LineEnding::detect(&new_text);
2174 LineEnding::normalize(&mut new_text);
2175 let edits = text_diff(&old_text, &new_text);
2176 Diff {
2177 base_version,
2178 line_ending,
2179 edits,
2180 }
2181 })
2182 }
2183
2184 /// Spawns a background task that searches the buffer for any whitespace
2185 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2186 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2187 let old_text = self.as_rope().clone();
2188 let line_ending = self.line_ending();
2189 let base_version = self.version();
2190 cx.background_spawn(async move {
2191 let ranges = trailing_whitespace_ranges(&old_text);
2192 let empty = Arc::<str>::from("");
2193 Diff {
2194 base_version,
2195 line_ending,
2196 edits: ranges
2197 .into_iter()
2198 .map(|range| (range, empty.clone()))
2199 .collect(),
2200 }
2201 })
2202 }
2203
2204 /// Ensures that the buffer ends with a single newline character, and
2205 /// no other whitespace. Skips if the buffer is empty.
2206 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2207 let len = self.len();
2208 if len == 0 {
2209 return;
2210 }
2211 let mut offset = len;
2212 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2213 let non_whitespace_len = chunk
2214 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2215 .len();
2216 offset -= chunk.len();
2217 offset += non_whitespace_len;
2218 if non_whitespace_len != 0 {
2219 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2220 return;
2221 }
2222 break;
2223 }
2224 }
2225 self.edit([(offset..len, "\n")], None, cx);
2226 }
2227
2228 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2229 /// calculated, then adjust the diff to account for those changes, and discard any
2230 /// parts of the diff that conflict with those changes.
2231 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2232 let snapshot = self.snapshot();
2233 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2234 let mut delta = 0;
2235 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2236 while let Some(edit_since) = edits_since.peek() {
2237 // If the edit occurs after a diff hunk, then it does not
2238 // affect that hunk.
2239 if edit_since.old.start > range.end {
2240 break;
2241 }
2242 // If the edit precedes the diff hunk, then adjust the hunk
2243 // to reflect the edit.
2244 else if edit_since.old.end < range.start {
2245 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2246 edits_since.next();
2247 }
2248 // If the edit intersects a diff hunk, then discard that hunk.
2249 else {
2250 return None;
2251 }
2252 }
2253
2254 let start = (range.start as i64 + delta) as usize;
2255 let end = (range.end as i64 + delta) as usize;
2256 Some((start..end, new_text))
2257 });
2258
2259 self.start_transaction();
2260 self.text.set_line_ending(diff.line_ending);
2261 self.edit(adjusted_edits, None, cx);
2262 self.end_transaction(cx)
2263 }
2264
2265 pub fn has_unsaved_edits(&self) -> bool {
2266 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2267
2268 if last_version == self.version {
2269 self.has_unsaved_edits
2270 .set((last_version, has_unsaved_edits));
2271 return has_unsaved_edits;
2272 }
2273
2274 let has_edits = self.has_edits_since(&self.saved_version);
2275 self.has_unsaved_edits
2276 .set((self.version.clone(), has_edits));
2277 has_edits
2278 }
2279
2280 /// Checks if the buffer has unsaved changes.
2281 pub fn is_dirty(&self) -> bool {
2282 if self.capability == Capability::ReadOnly {
2283 return false;
2284 }
2285 if self.has_conflict {
2286 return true;
2287 }
2288 match self.file.as_ref().map(|f| f.disk_state()) {
2289 Some(DiskState::New) | Some(DiskState::Deleted) => {
2290 !self.is_empty() && self.has_unsaved_edits()
2291 }
2292 _ => self.has_unsaved_edits(),
2293 }
2294 }
2295
2296 /// Marks the buffer as having a conflict regardless of current buffer state.
2297 pub fn set_conflict(&mut self) {
2298 self.has_conflict = true;
2299 }
2300
2301 /// Checks if the buffer and its file have both changed since the buffer
2302 /// was last saved or reloaded.
2303 pub fn has_conflict(&self) -> bool {
2304 if self.has_conflict {
2305 return true;
2306 }
2307 let Some(file) = self.file.as_ref() else {
2308 return false;
2309 };
2310 match file.disk_state() {
2311 DiskState::New => false,
2312 DiskState::Present { mtime } => match self.saved_mtime {
2313 Some(saved_mtime) => {
2314 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2315 }
2316 None => true,
2317 },
2318 DiskState::Deleted => false,
2319 DiskState::Historic { .. } => false,
2320 }
2321 }
2322
2323 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2324 pub fn subscribe(&mut self) -> Subscription<usize> {
2325 self.text.subscribe()
2326 }
2327
2328 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2329 ///
2330 /// This allows downstream code to check if the buffer's text has changed without
2331 /// waiting for an effect cycle, which would be required if using eents.
2332 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2333 if let Err(ix) = self
2334 .change_bits
2335 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2336 {
2337 self.change_bits.insert(ix, bit);
2338 }
2339 }
2340
2341 /// Set the change bit for all "listeners".
2342 fn was_changed(&mut self) {
2343 self.change_bits.retain(|change_bit| {
2344 change_bit
2345 .upgrade()
2346 .inspect(|bit| {
2347 _ = bit.replace(true);
2348 })
2349 .is_some()
2350 });
2351 }
2352
2353 /// Starts a transaction, if one is not already in-progress. When undoing or
2354 /// redoing edits, all of the edits performed within a transaction are undone
2355 /// or redone together.
2356 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2357 self.start_transaction_at(Instant::now())
2358 }
2359
2360 /// Starts a transaction, providing the current time. Subsequent transactions
2361 /// that occur within a short period of time will be grouped together. This
2362 /// is controlled by the buffer's undo grouping duration.
2363 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2364 self.transaction_depth += 1;
2365 if self.was_dirty_before_starting_transaction.is_none() {
2366 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2367 }
2368 self.text.start_transaction_at(now)
2369 }
2370
2371 /// Terminates the current transaction, if this is the outermost transaction.
2372 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2373 self.end_transaction_at(Instant::now(), cx)
2374 }
2375
2376 /// Terminates the current transaction, providing the current time. Subsequent transactions
2377 /// that occur within a short period of time will be grouped together. This
2378 /// is controlled by the buffer's undo grouping duration.
2379 pub fn end_transaction_at(
2380 &mut self,
2381 now: Instant,
2382 cx: &mut Context<Self>,
2383 ) -> Option<TransactionId> {
2384 assert!(self.transaction_depth > 0);
2385 self.transaction_depth -= 1;
2386 let was_dirty = if self.transaction_depth == 0 {
2387 self.was_dirty_before_starting_transaction.take().unwrap()
2388 } else {
2389 false
2390 };
2391 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2392 self.did_edit(&start_version, was_dirty, cx);
2393 Some(transaction_id)
2394 } else {
2395 None
2396 }
2397 }
2398
2399 /// Manually add a transaction to the buffer's undo history.
2400 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2401 self.text.push_transaction(transaction, now);
2402 }
2403
2404 /// Differs from `push_transaction` in that it does not clear the redo
2405 /// stack. Intended to be used to create a parent transaction to merge
2406 /// potential child transactions into.
2407 ///
2408 /// The caller is responsible for removing it from the undo history using
2409 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2410 /// are merged into this transaction, the caller is responsible for ensuring
2411 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2412 /// cleared is to create transactions with the usual `start_transaction` and
2413 /// `end_transaction` methods and merging the resulting transactions into
2414 /// the transaction created by this method
2415 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2416 self.text.push_empty_transaction(now)
2417 }
2418
2419 /// Prevent the last transaction from being grouped with any subsequent transactions,
2420 /// even if they occur with the buffer's undo grouping duration.
2421 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2422 self.text.finalize_last_transaction()
2423 }
2424
2425 /// Manually group all changes since a given transaction.
2426 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2427 self.text.group_until_transaction(transaction_id);
2428 }
2429
2430 /// Manually remove a transaction from the buffer's undo history
2431 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2432 self.text.forget_transaction(transaction_id)
2433 }
2434
2435 /// Retrieve a transaction from the buffer's undo history
2436 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2437 self.text.get_transaction(transaction_id)
2438 }
2439
2440 /// Manually merge two transactions in the buffer's undo history.
2441 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2442 self.text.merge_transactions(transaction, destination);
2443 }
2444
2445 /// Waits for the buffer to receive operations with the given timestamps.
2446 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2447 &mut self,
2448 edit_ids: It,
2449 ) -> impl Future<Output = Result<()>> + use<It> {
2450 self.text.wait_for_edits(edit_ids)
2451 }
2452
2453 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2454 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2455 &mut self,
2456 anchors: It,
2457 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2458 self.text.wait_for_anchors(anchors)
2459 }
2460
2461 /// Waits for the buffer to receive operations up to the given version.
2462 pub fn wait_for_version(
2463 &mut self,
2464 version: clock::Global,
2465 ) -> impl Future<Output = Result<()>> + use<> {
2466 self.text.wait_for_version(version)
2467 }
2468
2469 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2470 /// [`Buffer::wait_for_version`] to resolve with an error.
2471 pub fn give_up_waiting(&mut self) {
2472 self.text.give_up_waiting();
2473 }
2474
2475 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2476 let mut rx = None;
2477 if !self.autoindent_requests.is_empty() {
2478 let channel = oneshot::channel();
2479 self.wait_for_autoindent_txs.push(channel.0);
2480 rx = Some(channel.1);
2481 }
2482 rx
2483 }
2484
2485 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2486 pub fn set_active_selections(
2487 &mut self,
2488 selections: Arc<[Selection<Anchor>]>,
2489 line_mode: bool,
2490 cursor_shape: CursorShape,
2491 cx: &mut Context<Self>,
2492 ) {
2493 let lamport_timestamp = self.text.lamport_clock.tick();
2494 self.remote_selections.insert(
2495 self.text.replica_id(),
2496 SelectionSet {
2497 selections: selections.clone(),
2498 lamport_timestamp,
2499 line_mode,
2500 cursor_shape,
2501 },
2502 );
2503 self.send_operation(
2504 Operation::UpdateSelections {
2505 selections,
2506 line_mode,
2507 lamport_timestamp,
2508 cursor_shape,
2509 },
2510 true,
2511 cx,
2512 );
2513 self.non_text_state_update_count += 1;
2514 cx.notify();
2515 }
2516
2517 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2518 /// this replica.
2519 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2520 if self
2521 .remote_selections
2522 .get(&self.text.replica_id())
2523 .is_none_or(|set| !set.selections.is_empty())
2524 {
2525 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2526 }
2527 }
2528
2529 pub fn set_agent_selections(
2530 &mut self,
2531 selections: Arc<[Selection<Anchor>]>,
2532 line_mode: bool,
2533 cursor_shape: CursorShape,
2534 cx: &mut Context<Self>,
2535 ) {
2536 let lamport_timestamp = self.text.lamport_clock.tick();
2537 self.remote_selections.insert(
2538 ReplicaId::AGENT,
2539 SelectionSet {
2540 selections,
2541 lamport_timestamp,
2542 line_mode,
2543 cursor_shape,
2544 },
2545 );
2546 self.non_text_state_update_count += 1;
2547 cx.notify();
2548 }
2549
2550 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2551 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2552 }
2553
2554 /// Replaces the buffer's entire text.
2555 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2556 where
2557 T: Into<Arc<str>>,
2558 {
2559 self.autoindent_requests.clear();
2560 self.edit([(0..self.len(), text)], None, cx)
2561 }
2562
2563 /// Appends the given text to the end of the buffer.
2564 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2565 where
2566 T: Into<Arc<str>>,
2567 {
2568 self.edit([(self.len()..self.len(), text)], None, cx)
2569 }
2570
2571 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2572 /// delete, and a string of text to insert at that location.
2573 ///
2574 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2575 /// request for the edited ranges, which will be processed when the buffer finishes
2576 /// parsing.
2577 ///
2578 /// Parsing takes place at the end of a transaction, and may compute synchronously
2579 /// or asynchronously, depending on the changes.
2580 pub fn edit<I, S, T>(
2581 &mut self,
2582 edits_iter: I,
2583 autoindent_mode: Option<AutoindentMode>,
2584 cx: &mut Context<Self>,
2585 ) -> Option<clock::Lamport>
2586 where
2587 I: IntoIterator<Item = (Range<S>, T)>,
2588 S: ToOffset,
2589 T: Into<Arc<str>>,
2590 {
2591 // Skip invalid edits and coalesce contiguous ones.
2592 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2593
2594 for (range, new_text) in edits_iter {
2595 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2596
2597 if range.start > range.end {
2598 mem::swap(&mut range.start, &mut range.end);
2599 }
2600 let new_text = new_text.into();
2601 if !new_text.is_empty() || !range.is_empty() {
2602 if let Some((prev_range, prev_text)) = edits.last_mut()
2603 && prev_range.end >= range.start
2604 {
2605 prev_range.end = cmp::max(prev_range.end, range.end);
2606 *prev_text = format!("{prev_text}{new_text}").into();
2607 } else {
2608 edits.push((range, new_text));
2609 }
2610 }
2611 }
2612 if edits.is_empty() {
2613 return None;
2614 }
2615
2616 self.start_transaction();
2617 self.pending_autoindent.take();
2618 let autoindent_request = autoindent_mode
2619 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2620
2621 let edit_operation = self.text.edit(edits.iter().cloned());
2622 let edit_id = edit_operation.timestamp();
2623
2624 if let Some((before_edit, mode)) = autoindent_request {
2625 let mut delta = 0isize;
2626 let mut previous_setting = None;
2627 let entries: Vec<_> = edits
2628 .into_iter()
2629 .enumerate()
2630 .zip(&edit_operation.as_edit().unwrap().new_text)
2631 .filter(|((_, (range, _)), _)| {
2632 let language = before_edit.language_at(range.start);
2633 let language_id = language.map(|l| l.id());
2634 if let Some((cached_language_id, auto_indent)) = previous_setting
2635 && cached_language_id == language_id
2636 {
2637 auto_indent
2638 } else {
2639 // The auto-indent setting is not present in editorconfigs, hence
2640 // we can avoid passing the file here.
2641 let auto_indent =
2642 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2643 previous_setting = Some((language_id, auto_indent));
2644 auto_indent
2645 }
2646 })
2647 .map(|((ix, (range, _)), new_text)| {
2648 let new_text_length = new_text.len();
2649 let old_start = range.start.to_point(&before_edit);
2650 let new_start = (delta + range.start as isize) as usize;
2651 let range_len = range.end - range.start;
2652 delta += new_text_length as isize - range_len as isize;
2653
2654 // Decide what range of the insertion to auto-indent, and whether
2655 // the first line of the insertion should be considered a newly-inserted line
2656 // or an edit to an existing line.
2657 let mut range_of_insertion_to_indent = 0..new_text_length;
2658 let mut first_line_is_new = true;
2659
2660 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2661 let old_line_end = before_edit.line_len(old_start.row);
2662
2663 if old_start.column > old_line_start {
2664 first_line_is_new = false;
2665 }
2666
2667 if !new_text.contains('\n')
2668 && (old_start.column + (range_len as u32) < old_line_end
2669 || old_line_end == old_line_start)
2670 {
2671 first_line_is_new = false;
2672 }
2673
2674 // When inserting text starting with a newline, avoid auto-indenting the
2675 // previous line.
2676 if new_text.starts_with('\n') {
2677 range_of_insertion_to_indent.start += 1;
2678 first_line_is_new = true;
2679 }
2680
2681 let mut original_indent_column = None;
2682 if let AutoindentMode::Block {
2683 original_indent_columns,
2684 } = &mode
2685 {
2686 original_indent_column = Some(if new_text.starts_with('\n') {
2687 indent_size_for_text(
2688 new_text[range_of_insertion_to_indent.clone()].chars(),
2689 )
2690 .len
2691 } else {
2692 original_indent_columns
2693 .get(ix)
2694 .copied()
2695 .flatten()
2696 .unwrap_or_else(|| {
2697 indent_size_for_text(
2698 new_text[range_of_insertion_to_indent.clone()].chars(),
2699 )
2700 .len
2701 })
2702 });
2703
2704 // Avoid auto-indenting the line after the edit.
2705 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2706 range_of_insertion_to_indent.end -= 1;
2707 }
2708 }
2709
2710 AutoindentRequestEntry {
2711 first_line_is_new,
2712 original_indent_column,
2713 indent_size: before_edit.language_indent_size_at(range.start, cx),
2714 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2715 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2716 }
2717 })
2718 .collect();
2719
2720 if !entries.is_empty() {
2721 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2722 before_edit,
2723 entries,
2724 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2725 ignore_empty_lines: false,
2726 }));
2727 }
2728 }
2729
2730 self.end_transaction(cx);
2731 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2732 Some(edit_id)
2733 }
2734
2735 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2736 self.was_changed();
2737
2738 if self.edits_since::<usize>(old_version).next().is_none() {
2739 return;
2740 }
2741
2742 self.reparse(cx, true);
2743 cx.emit(BufferEvent::Edited);
2744 if was_dirty != self.is_dirty() {
2745 cx.emit(BufferEvent::DirtyChanged);
2746 }
2747 cx.notify();
2748 }
2749
2750 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2751 where
2752 I: IntoIterator<Item = Range<T>>,
2753 T: ToOffset + Copy,
2754 {
2755 let before_edit = self.snapshot();
2756 let entries = ranges
2757 .into_iter()
2758 .map(|range| AutoindentRequestEntry {
2759 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2760 first_line_is_new: true,
2761 indent_size: before_edit.language_indent_size_at(range.start, cx),
2762 original_indent_column: None,
2763 })
2764 .collect();
2765 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2766 before_edit,
2767 entries,
2768 is_block_mode: false,
2769 ignore_empty_lines: true,
2770 }));
2771 self.request_autoindent(cx);
2772 }
2773
2774 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2775 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2776 pub fn insert_empty_line(
2777 &mut self,
2778 position: impl ToPoint,
2779 space_above: bool,
2780 space_below: bool,
2781 cx: &mut Context<Self>,
2782 ) -> Point {
2783 let mut position = position.to_point(self);
2784
2785 self.start_transaction();
2786
2787 self.edit(
2788 [(position..position, "\n")],
2789 Some(AutoindentMode::EachLine),
2790 cx,
2791 );
2792
2793 if position.column > 0 {
2794 position += Point::new(1, 0);
2795 }
2796
2797 if !self.is_line_blank(position.row) {
2798 self.edit(
2799 [(position..position, "\n")],
2800 Some(AutoindentMode::EachLine),
2801 cx,
2802 );
2803 }
2804
2805 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2806 self.edit(
2807 [(position..position, "\n")],
2808 Some(AutoindentMode::EachLine),
2809 cx,
2810 );
2811 position.row += 1;
2812 }
2813
2814 if space_below
2815 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2816 {
2817 self.edit(
2818 [(position..position, "\n")],
2819 Some(AutoindentMode::EachLine),
2820 cx,
2821 );
2822 }
2823
2824 self.end_transaction(cx);
2825
2826 position
2827 }
2828
2829 /// Applies the given remote operations to the buffer.
2830 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2831 self.pending_autoindent.take();
2832 let was_dirty = self.is_dirty();
2833 let old_version = self.version.clone();
2834 let mut deferred_ops = Vec::new();
2835 let buffer_ops = ops
2836 .into_iter()
2837 .filter_map(|op| match op {
2838 Operation::Buffer(op) => Some(op),
2839 _ => {
2840 if self.can_apply_op(&op) {
2841 self.apply_op(op, cx);
2842 } else {
2843 deferred_ops.push(op);
2844 }
2845 None
2846 }
2847 })
2848 .collect::<Vec<_>>();
2849 for operation in buffer_ops.iter() {
2850 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2851 }
2852 self.text.apply_ops(buffer_ops);
2853 self.deferred_ops.insert(deferred_ops);
2854 self.flush_deferred_ops(cx);
2855 self.did_edit(&old_version, was_dirty, cx);
2856 // Notify independently of whether the buffer was edited as the operations could include a
2857 // selection update.
2858 cx.notify();
2859 }
2860
2861 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2862 let mut deferred_ops = Vec::new();
2863 for op in self.deferred_ops.drain().iter().cloned() {
2864 if self.can_apply_op(&op) {
2865 self.apply_op(op, cx);
2866 } else {
2867 deferred_ops.push(op);
2868 }
2869 }
2870 self.deferred_ops.insert(deferred_ops);
2871 }
2872
2873 pub fn has_deferred_ops(&self) -> bool {
2874 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2875 }
2876
2877 fn can_apply_op(&self, operation: &Operation) -> bool {
2878 match operation {
2879 Operation::Buffer(_) => {
2880 unreachable!("buffer operations should never be applied at this layer")
2881 }
2882 Operation::UpdateDiagnostics {
2883 diagnostics: diagnostic_set,
2884 ..
2885 } => diagnostic_set.iter().all(|diagnostic| {
2886 self.text.can_resolve(&diagnostic.range.start)
2887 && self.text.can_resolve(&diagnostic.range.end)
2888 }),
2889 Operation::UpdateSelections { selections, .. } => selections
2890 .iter()
2891 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2892 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2893 }
2894 }
2895
2896 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2897 match operation {
2898 Operation::Buffer(_) => {
2899 unreachable!("buffer operations should never be applied at this layer")
2900 }
2901 Operation::UpdateDiagnostics {
2902 server_id,
2903 diagnostics: diagnostic_set,
2904 lamport_timestamp,
2905 } => {
2906 let snapshot = self.snapshot();
2907 self.apply_diagnostic_update(
2908 server_id,
2909 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2910 lamport_timestamp,
2911 cx,
2912 );
2913 }
2914 Operation::UpdateSelections {
2915 selections,
2916 lamport_timestamp,
2917 line_mode,
2918 cursor_shape,
2919 } => {
2920 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2921 && set.lamport_timestamp > lamport_timestamp
2922 {
2923 return;
2924 }
2925
2926 self.remote_selections.insert(
2927 lamport_timestamp.replica_id,
2928 SelectionSet {
2929 selections,
2930 lamport_timestamp,
2931 line_mode,
2932 cursor_shape,
2933 },
2934 );
2935 self.text.lamport_clock.observe(lamport_timestamp);
2936 self.non_text_state_update_count += 1;
2937 }
2938 Operation::UpdateCompletionTriggers {
2939 triggers,
2940 lamport_timestamp,
2941 server_id,
2942 } => {
2943 if triggers.is_empty() {
2944 self.completion_triggers_per_language_server
2945 .remove(&server_id);
2946 self.completion_triggers = self
2947 .completion_triggers_per_language_server
2948 .values()
2949 .flat_map(|triggers| triggers.iter().cloned())
2950 .collect();
2951 } else {
2952 self.completion_triggers_per_language_server
2953 .insert(server_id, triggers.iter().cloned().collect());
2954 self.completion_triggers.extend(triggers);
2955 }
2956 self.text.lamport_clock.observe(lamport_timestamp);
2957 }
2958 Operation::UpdateLineEnding {
2959 line_ending,
2960 lamport_timestamp,
2961 } => {
2962 self.text.set_line_ending(line_ending);
2963 self.text.lamport_clock.observe(lamport_timestamp);
2964 }
2965 }
2966 }
2967
2968 fn apply_diagnostic_update(
2969 &mut self,
2970 server_id: LanguageServerId,
2971 diagnostics: DiagnosticSet,
2972 lamport_timestamp: clock::Lamport,
2973 cx: &mut Context<Self>,
2974 ) {
2975 if lamport_timestamp > self.diagnostics_timestamp {
2976 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2977 if diagnostics.is_empty() {
2978 if let Ok(ix) = ix {
2979 self.diagnostics.remove(ix);
2980 }
2981 } else {
2982 match ix {
2983 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2984 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2985 };
2986 }
2987 self.diagnostics_timestamp = lamport_timestamp;
2988 self.non_text_state_update_count += 1;
2989 self.text.lamport_clock.observe(lamport_timestamp);
2990 cx.notify();
2991 cx.emit(BufferEvent::DiagnosticsUpdated);
2992 }
2993 }
2994
2995 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2996 self.was_changed();
2997 cx.emit(BufferEvent::Operation {
2998 operation,
2999 is_local,
3000 });
3001 }
3002
3003 /// Removes the selections for a given peer.
3004 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3005 self.remote_selections.remove(&replica_id);
3006 cx.notify();
3007 }
3008
3009 /// Undoes the most recent transaction.
3010 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3011 let was_dirty = self.is_dirty();
3012 let old_version = self.version.clone();
3013
3014 if let Some((transaction_id, operation)) = self.text.undo() {
3015 self.send_operation(Operation::Buffer(operation), true, cx);
3016 self.did_edit(&old_version, was_dirty, cx);
3017 Some(transaction_id)
3018 } else {
3019 None
3020 }
3021 }
3022
3023 /// Manually undoes a specific transaction in the buffer's undo history.
3024 pub fn undo_transaction(
3025 &mut self,
3026 transaction_id: TransactionId,
3027 cx: &mut Context<Self>,
3028 ) -> bool {
3029 let was_dirty = self.is_dirty();
3030 let old_version = self.version.clone();
3031 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3032 self.send_operation(Operation::Buffer(operation), true, cx);
3033 self.did_edit(&old_version, was_dirty, cx);
3034 true
3035 } else {
3036 false
3037 }
3038 }
3039
3040 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3041 pub fn undo_to_transaction(
3042 &mut self,
3043 transaction_id: TransactionId,
3044 cx: &mut Context<Self>,
3045 ) -> bool {
3046 let was_dirty = self.is_dirty();
3047 let old_version = self.version.clone();
3048
3049 let operations = self.text.undo_to_transaction(transaction_id);
3050 let undone = !operations.is_empty();
3051 for operation in operations {
3052 self.send_operation(Operation::Buffer(operation), true, cx);
3053 }
3054 if undone {
3055 self.did_edit(&old_version, was_dirty, cx)
3056 }
3057 undone
3058 }
3059
3060 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3061 let was_dirty = self.is_dirty();
3062 let operation = self.text.undo_operations(counts);
3063 let old_version = self.version.clone();
3064 self.send_operation(Operation::Buffer(operation), true, cx);
3065 self.did_edit(&old_version, was_dirty, cx);
3066 }
3067
3068 /// Manually redoes a specific transaction in the buffer's redo history.
3069 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3070 let was_dirty = self.is_dirty();
3071 let old_version = self.version.clone();
3072
3073 if let Some((transaction_id, operation)) = self.text.redo() {
3074 self.send_operation(Operation::Buffer(operation), true, cx);
3075 self.did_edit(&old_version, was_dirty, cx);
3076 Some(transaction_id)
3077 } else {
3078 None
3079 }
3080 }
3081
3082 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3083 pub fn redo_to_transaction(
3084 &mut self,
3085 transaction_id: TransactionId,
3086 cx: &mut Context<Self>,
3087 ) -> bool {
3088 let was_dirty = self.is_dirty();
3089 let old_version = self.version.clone();
3090
3091 let operations = self.text.redo_to_transaction(transaction_id);
3092 let redone = !operations.is_empty();
3093 for operation in operations {
3094 self.send_operation(Operation::Buffer(operation), true, cx);
3095 }
3096 if redone {
3097 self.did_edit(&old_version, was_dirty, cx)
3098 }
3099 redone
3100 }
3101
3102 /// Override current completion triggers with the user-provided completion triggers.
3103 pub fn set_completion_triggers(
3104 &mut self,
3105 server_id: LanguageServerId,
3106 triggers: BTreeSet<String>,
3107 cx: &mut Context<Self>,
3108 ) {
3109 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3110 if triggers.is_empty() {
3111 self.completion_triggers_per_language_server
3112 .remove(&server_id);
3113 self.completion_triggers = self
3114 .completion_triggers_per_language_server
3115 .values()
3116 .flat_map(|triggers| triggers.iter().cloned())
3117 .collect();
3118 } else {
3119 self.completion_triggers_per_language_server
3120 .insert(server_id, triggers.clone());
3121 self.completion_triggers.extend(triggers.iter().cloned());
3122 }
3123 self.send_operation(
3124 Operation::UpdateCompletionTriggers {
3125 triggers: triggers.into_iter().collect(),
3126 lamport_timestamp: self.completion_triggers_timestamp,
3127 server_id,
3128 },
3129 true,
3130 cx,
3131 );
3132 cx.notify();
3133 }
3134
3135 /// Returns a list of strings which trigger a completion menu for this language.
3136 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3137 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3138 &self.completion_triggers
3139 }
3140
3141 /// Call this directly after performing edits to prevent the preview tab
3142 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3143 /// to return false until there are additional edits.
3144 pub fn refresh_preview(&mut self) {
3145 self.preview_version = self.version.clone();
3146 }
3147
3148 /// Whether we should preserve the preview status of a tab containing this buffer.
3149 pub fn preserve_preview(&self) -> bool {
3150 !self.has_edits_since(&self.preview_version)
3151 }
3152}
3153
3154#[doc(hidden)]
3155#[cfg(any(test, feature = "test-support"))]
3156impl Buffer {
3157 pub fn edit_via_marked_text(
3158 &mut self,
3159 marked_string: &str,
3160 autoindent_mode: Option<AutoindentMode>,
3161 cx: &mut Context<Self>,
3162 ) {
3163 let edits = self.edits_for_marked_text(marked_string);
3164 self.edit(edits, autoindent_mode, cx);
3165 }
3166
3167 pub fn set_group_interval(&mut self, group_interval: Duration) {
3168 self.text.set_group_interval(group_interval);
3169 }
3170
3171 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3172 where
3173 T: rand::Rng,
3174 {
3175 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3176 let mut last_end = None;
3177 for _ in 0..old_range_count {
3178 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3179 break;
3180 }
3181
3182 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3183 let mut range = self.random_byte_range(new_start, rng);
3184 if rng.random_bool(0.2) {
3185 mem::swap(&mut range.start, &mut range.end);
3186 }
3187 last_end = Some(range.end);
3188
3189 let new_text_len = rng.random_range(0..10);
3190 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3191 new_text = new_text.to_uppercase();
3192
3193 edits.push((range, new_text));
3194 }
3195 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3196 self.edit(edits, None, cx);
3197 }
3198
3199 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3200 let was_dirty = self.is_dirty();
3201 let old_version = self.version.clone();
3202
3203 let ops = self.text.randomly_undo_redo(rng);
3204 if !ops.is_empty() {
3205 for op in ops {
3206 self.send_operation(Operation::Buffer(op), true, cx);
3207 self.did_edit(&old_version, was_dirty, cx);
3208 }
3209 }
3210 }
3211}
3212
3213impl EventEmitter<BufferEvent> for Buffer {}
3214
3215impl Deref for Buffer {
3216 type Target = TextBuffer;
3217
3218 fn deref(&self) -> &Self::Target {
3219 &self.text
3220 }
3221}
3222
3223impl BufferSnapshot {
3224 /// Returns [`IndentSize`] for a given line that respects user settings and
3225 /// language preferences.
3226 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3227 indent_size_for_line(self, row)
3228 }
3229
3230 /// Returns [`IndentSize`] for a given position that respects user settings
3231 /// and language preferences.
3232 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3233 let settings = language_settings(
3234 self.language_at(position).map(|l| l.name()),
3235 self.file(),
3236 cx,
3237 );
3238 if settings.hard_tabs {
3239 IndentSize::tab()
3240 } else {
3241 IndentSize::spaces(settings.tab_size.get())
3242 }
3243 }
3244
3245 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3246 /// is passed in as `single_indent_size`.
3247 pub fn suggested_indents(
3248 &self,
3249 rows: impl Iterator<Item = u32>,
3250 single_indent_size: IndentSize,
3251 ) -> BTreeMap<u32, IndentSize> {
3252 let mut result = BTreeMap::new();
3253
3254 for row_range in contiguous_ranges(rows, 10) {
3255 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3256 Some(suggestions) => suggestions,
3257 _ => break,
3258 };
3259
3260 for (row, suggestion) in row_range.zip(suggestions) {
3261 let indent_size = if let Some(suggestion) = suggestion {
3262 result
3263 .get(&suggestion.basis_row)
3264 .copied()
3265 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3266 .with_delta(suggestion.delta, single_indent_size)
3267 } else {
3268 self.indent_size_for_line(row)
3269 };
3270
3271 result.insert(row, indent_size);
3272 }
3273 }
3274
3275 result
3276 }
3277
3278 fn suggest_autoindents(
3279 &self,
3280 row_range: Range<u32>,
3281 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3282 let config = &self.language.as_ref()?.config;
3283 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3284
3285 #[derive(Debug, Clone)]
3286 struct StartPosition {
3287 start: Point,
3288 suffix: SharedString,
3289 language: Arc<Language>,
3290 }
3291
3292 // Find the suggested indentation ranges based on the syntax tree.
3293 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3294 let end = Point::new(row_range.end, 0);
3295 let range = (start..end).to_offset(&self.text);
3296 let mut matches = self.syntax.matches_with_options(
3297 range.clone(),
3298 &self.text,
3299 TreeSitterOptions {
3300 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3301 max_start_depth: None,
3302 },
3303 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3304 );
3305 let indent_configs = matches
3306 .grammars()
3307 .iter()
3308 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3309 .collect::<Vec<_>>();
3310
3311 let mut indent_ranges = Vec::<Range<Point>>::new();
3312 let mut start_positions = Vec::<StartPosition>::new();
3313 let mut outdent_positions = Vec::<Point>::new();
3314 while let Some(mat) = matches.peek() {
3315 let mut start: Option<Point> = None;
3316 let mut end: Option<Point> = None;
3317
3318 let config = indent_configs[mat.grammar_index];
3319 for capture in mat.captures {
3320 if capture.index == config.indent_capture_ix {
3321 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3322 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3323 } else if Some(capture.index) == config.start_capture_ix {
3324 start = Some(Point::from_ts_point(capture.node.end_position()));
3325 } else if Some(capture.index) == config.end_capture_ix {
3326 end = Some(Point::from_ts_point(capture.node.start_position()));
3327 } else if Some(capture.index) == config.outdent_capture_ix {
3328 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3329 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3330 start_positions.push(StartPosition {
3331 start: Point::from_ts_point(capture.node.start_position()),
3332 suffix: suffix.clone(),
3333 language: mat.language.clone(),
3334 });
3335 }
3336 }
3337
3338 matches.advance();
3339 if let Some((start, end)) = start.zip(end) {
3340 if start.row == end.row {
3341 continue;
3342 }
3343 let range = start..end;
3344 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3345 Err(ix) => indent_ranges.insert(ix, range),
3346 Ok(ix) => {
3347 let prev_range = &mut indent_ranges[ix];
3348 prev_range.end = prev_range.end.max(range.end);
3349 }
3350 }
3351 }
3352 }
3353
3354 let mut error_ranges = Vec::<Range<Point>>::new();
3355 let mut matches = self
3356 .syntax
3357 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3358 while let Some(mat) = matches.peek() {
3359 let node = mat.captures[0].node;
3360 let start = Point::from_ts_point(node.start_position());
3361 let end = Point::from_ts_point(node.end_position());
3362 let range = start..end;
3363 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3364 Ok(ix) | Err(ix) => ix,
3365 };
3366 let mut end_ix = ix;
3367 while let Some(existing_range) = error_ranges.get(end_ix) {
3368 if existing_range.end < end {
3369 end_ix += 1;
3370 } else {
3371 break;
3372 }
3373 }
3374 error_ranges.splice(ix..end_ix, [range]);
3375 matches.advance();
3376 }
3377
3378 outdent_positions.sort();
3379 for outdent_position in outdent_positions {
3380 // find the innermost indent range containing this outdent_position
3381 // set its end to the outdent position
3382 if let Some(range_to_truncate) = indent_ranges
3383 .iter_mut()
3384 .rfind(|indent_range| indent_range.contains(&outdent_position))
3385 {
3386 range_to_truncate.end = outdent_position;
3387 }
3388 }
3389
3390 start_positions.sort_by_key(|b| b.start);
3391
3392 // Find the suggested indentation increases and decreased based on regexes.
3393 let mut regex_outdent_map = HashMap::default();
3394 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3395 let mut start_positions_iter = start_positions.iter().peekable();
3396
3397 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3398 self.for_each_line(
3399 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3400 ..Point::new(row_range.end, 0),
3401 |row, line| {
3402 let indent_len = self.indent_size_for_line(row).len;
3403 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3404 let row_language_config = row_language
3405 .as_ref()
3406 .map(|lang| lang.config())
3407 .unwrap_or(config);
3408
3409 if row_language_config
3410 .decrease_indent_pattern
3411 .as_ref()
3412 .is_some_and(|regex| regex.is_match(line))
3413 {
3414 indent_change_rows.push((row, Ordering::Less));
3415 }
3416 if row_language_config
3417 .increase_indent_pattern
3418 .as_ref()
3419 .is_some_and(|regex| regex.is_match(line))
3420 {
3421 indent_change_rows.push((row + 1, Ordering::Greater));
3422 }
3423 while let Some(pos) = start_positions_iter.peek() {
3424 if pos.start.row < row {
3425 let pos = start_positions_iter.next().unwrap().clone();
3426 last_seen_suffix
3427 .entry(pos.suffix.to_string())
3428 .or_default()
3429 .push(pos);
3430 } else {
3431 break;
3432 }
3433 }
3434 for rule in &row_language_config.decrease_indent_patterns {
3435 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3436 let row_start_column = self.indent_size_for_line(row).len;
3437 let basis_row = rule
3438 .valid_after
3439 .iter()
3440 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3441 .flatten()
3442 .filter(|pos| {
3443 row_language
3444 .as_ref()
3445 .or(self.language.as_ref())
3446 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3447 })
3448 .filter(|pos| pos.start.column <= row_start_column)
3449 .max_by_key(|pos| pos.start.row);
3450 if let Some(outdent_to) = basis_row {
3451 regex_outdent_map.insert(row, outdent_to.start.row);
3452 }
3453 break;
3454 }
3455 }
3456 },
3457 );
3458
3459 let mut indent_changes = indent_change_rows.into_iter().peekable();
3460 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3461 prev_non_blank_row.unwrap_or(0)
3462 } else {
3463 row_range.start.saturating_sub(1)
3464 };
3465
3466 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3467 Some(row_range.map(move |row| {
3468 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3469
3470 let mut indent_from_prev_row = false;
3471 let mut outdent_from_prev_row = false;
3472 let mut outdent_to_row = u32::MAX;
3473 let mut from_regex = false;
3474
3475 while let Some((indent_row, delta)) = indent_changes.peek() {
3476 match indent_row.cmp(&row) {
3477 Ordering::Equal => match delta {
3478 Ordering::Less => {
3479 from_regex = true;
3480 outdent_from_prev_row = true
3481 }
3482 Ordering::Greater => {
3483 indent_from_prev_row = true;
3484 from_regex = true
3485 }
3486 _ => {}
3487 },
3488
3489 Ordering::Greater => break,
3490 Ordering::Less => {}
3491 }
3492
3493 indent_changes.next();
3494 }
3495
3496 for range in &indent_ranges {
3497 if range.start.row >= row {
3498 break;
3499 }
3500 if range.start.row == prev_row && range.end > row_start {
3501 indent_from_prev_row = true;
3502 }
3503 if range.end > prev_row_start && range.end <= row_start {
3504 outdent_to_row = outdent_to_row.min(range.start.row);
3505 }
3506 }
3507
3508 if let Some(basis_row) = regex_outdent_map.get(&row) {
3509 indent_from_prev_row = false;
3510 outdent_to_row = *basis_row;
3511 from_regex = true;
3512 }
3513
3514 let within_error = error_ranges
3515 .iter()
3516 .any(|e| e.start.row < row && e.end > row_start);
3517
3518 let suggestion = if outdent_to_row == prev_row
3519 || (outdent_from_prev_row && indent_from_prev_row)
3520 {
3521 Some(IndentSuggestion {
3522 basis_row: prev_row,
3523 delta: Ordering::Equal,
3524 within_error: within_error && !from_regex,
3525 })
3526 } else if indent_from_prev_row {
3527 Some(IndentSuggestion {
3528 basis_row: prev_row,
3529 delta: Ordering::Greater,
3530 within_error: within_error && !from_regex,
3531 })
3532 } else if outdent_to_row < prev_row {
3533 Some(IndentSuggestion {
3534 basis_row: outdent_to_row,
3535 delta: Ordering::Equal,
3536 within_error: within_error && !from_regex,
3537 })
3538 } else if outdent_from_prev_row {
3539 Some(IndentSuggestion {
3540 basis_row: prev_row,
3541 delta: Ordering::Less,
3542 within_error: within_error && !from_regex,
3543 })
3544 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3545 {
3546 Some(IndentSuggestion {
3547 basis_row: prev_row,
3548 delta: Ordering::Equal,
3549 within_error: within_error && !from_regex,
3550 })
3551 } else {
3552 None
3553 };
3554
3555 prev_row = row;
3556 prev_row_start = row_start;
3557 suggestion
3558 }))
3559 }
3560
3561 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3562 while row > 0 {
3563 row -= 1;
3564 if !self.is_line_blank(row) {
3565 return Some(row);
3566 }
3567 }
3568 None
3569 }
3570
3571 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3572 let captures = self.syntax.captures(range, &self.text, |grammar| {
3573 grammar
3574 .highlights_config
3575 .as_ref()
3576 .map(|config| &config.query)
3577 });
3578 let highlight_maps = captures
3579 .grammars()
3580 .iter()
3581 .map(|grammar| grammar.highlight_map())
3582 .collect();
3583 (captures, highlight_maps)
3584 }
3585
3586 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3587 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3588 /// returned in chunks where each chunk has a single syntax highlighting style and
3589 /// diagnostic status.
3590 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3591 let range = range.start.to_offset(self)..range.end.to_offset(self);
3592
3593 let mut syntax = None;
3594 if language_aware {
3595 syntax = Some(self.get_highlights(range.clone()));
3596 }
3597 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3598 let diagnostics = language_aware;
3599 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3600 }
3601
3602 pub fn highlighted_text_for_range<T: ToOffset>(
3603 &self,
3604 range: Range<T>,
3605 override_style: Option<HighlightStyle>,
3606 syntax_theme: &SyntaxTheme,
3607 ) -> HighlightedText {
3608 HighlightedText::from_buffer_range(
3609 range,
3610 &self.text,
3611 &self.syntax,
3612 override_style,
3613 syntax_theme,
3614 )
3615 }
3616
3617 /// Invokes the given callback for each line of text in the given range of the buffer.
3618 /// Uses callback to avoid allocating a string for each line.
3619 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3620 let mut line = String::new();
3621 let mut row = range.start.row;
3622 for chunk in self
3623 .as_rope()
3624 .chunks_in_range(range.to_offset(self))
3625 .chain(["\n"])
3626 {
3627 for (newline_ix, text) in chunk.split('\n').enumerate() {
3628 if newline_ix > 0 {
3629 callback(row, &line);
3630 row += 1;
3631 line.clear();
3632 }
3633 line.push_str(text);
3634 }
3635 }
3636 }
3637
3638 /// Iterates over every [`SyntaxLayer`] in the buffer.
3639 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3640 self.syntax_layers_for_range(0..self.len(), true)
3641 }
3642
3643 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3644 let offset = position.to_offset(self);
3645 self.syntax_layers_for_range(offset..offset, false)
3646 .filter(|l| {
3647 if let Some(ranges) = l.included_sub_ranges {
3648 ranges.iter().any(|range| {
3649 let start = range.start.to_offset(self);
3650 start <= offset && {
3651 let end = range.end.to_offset(self);
3652 offset < end
3653 }
3654 })
3655 } else {
3656 l.node().start_byte() <= offset && l.node().end_byte() > offset
3657 }
3658 })
3659 .last()
3660 }
3661
3662 pub fn syntax_layers_for_range<D: ToOffset>(
3663 &self,
3664 range: Range<D>,
3665 include_hidden: bool,
3666 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3667 self.syntax
3668 .layers_for_range(range, &self.text, include_hidden)
3669 }
3670
3671 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3672 &self,
3673 range: Range<D>,
3674 ) -> Option<SyntaxLayer<'_>> {
3675 let range = range.to_offset(self);
3676 self.syntax
3677 .layers_for_range(range, &self.text, false)
3678 .max_by(|a, b| {
3679 if a.depth != b.depth {
3680 a.depth.cmp(&b.depth)
3681 } else if a.offset.0 != b.offset.0 {
3682 a.offset.0.cmp(&b.offset.0)
3683 } else {
3684 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3685 }
3686 })
3687 }
3688
3689 /// Returns the main [`Language`].
3690 pub fn language(&self) -> Option<&Arc<Language>> {
3691 self.language.as_ref()
3692 }
3693
3694 /// Returns the [`Language`] at the given location.
3695 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3696 self.syntax_layer_at(position)
3697 .map(|info| info.language)
3698 .or(self.language.as_ref())
3699 }
3700
3701 /// Returns the settings for the language at the given location.
3702 pub fn settings_at<'a, D: ToOffset>(
3703 &'a self,
3704 position: D,
3705 cx: &'a App,
3706 ) -> Cow<'a, LanguageSettings> {
3707 language_settings(
3708 self.language_at(position).map(|l| l.name()),
3709 self.file.as_ref(),
3710 cx,
3711 )
3712 }
3713
3714 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3715 CharClassifier::new(self.language_scope_at(point))
3716 }
3717
3718 /// Returns the [`LanguageScope`] at the given location.
3719 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3720 let offset = position.to_offset(self);
3721 let mut scope = None;
3722 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3723
3724 // Use the layer that has the smallest node intersecting the given point.
3725 for layer in self
3726 .syntax
3727 .layers_for_range(offset..offset, &self.text, false)
3728 {
3729 let mut cursor = layer.node().walk();
3730
3731 let mut range = None;
3732 loop {
3733 let child_range = cursor.node().byte_range();
3734 if !child_range.contains(&offset) {
3735 break;
3736 }
3737
3738 range = Some(child_range);
3739 if cursor.goto_first_child_for_byte(offset).is_none() {
3740 break;
3741 }
3742 }
3743
3744 if let Some(range) = range
3745 && smallest_range_and_depth.as_ref().is_none_or(
3746 |(smallest_range, smallest_range_depth)| {
3747 if layer.depth > *smallest_range_depth {
3748 true
3749 } else if layer.depth == *smallest_range_depth {
3750 range.len() < smallest_range.len()
3751 } else {
3752 false
3753 }
3754 },
3755 )
3756 {
3757 smallest_range_and_depth = Some((range, layer.depth));
3758 scope = Some(LanguageScope {
3759 language: layer.language.clone(),
3760 override_id: layer.override_id(offset, &self.text),
3761 });
3762 }
3763 }
3764
3765 scope.or_else(|| {
3766 self.language.clone().map(|language| LanguageScope {
3767 language,
3768 override_id: None,
3769 })
3770 })
3771 }
3772
3773 /// Returns a tuple of the range and character kind of the word
3774 /// surrounding the given position.
3775 pub fn surrounding_word<T: ToOffset>(
3776 &self,
3777 start: T,
3778 scope_context: Option<CharScopeContext>,
3779 ) -> (Range<usize>, Option<CharKind>) {
3780 let mut start = start.to_offset(self);
3781 let mut end = start;
3782 let mut next_chars = self.chars_at(start).take(128).peekable();
3783 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3784
3785 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3786 let word_kind = cmp::max(
3787 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3788 next_chars.peek().copied().map(|c| classifier.kind(c)),
3789 );
3790
3791 for ch in prev_chars {
3792 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3793 start -= ch.len_utf8();
3794 } else {
3795 break;
3796 }
3797 }
3798
3799 for ch in next_chars {
3800 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3801 end += ch.len_utf8();
3802 } else {
3803 break;
3804 }
3805 }
3806
3807 (start..end, word_kind)
3808 }
3809
3810 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3811 /// range. When `require_larger` is true, the node found must be larger than the query range.
3812 ///
3813 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3814 /// be moved to the root of the tree.
3815 fn goto_node_enclosing_range(
3816 cursor: &mut tree_sitter::TreeCursor,
3817 query_range: &Range<usize>,
3818 require_larger: bool,
3819 ) -> bool {
3820 let mut ascending = false;
3821 loop {
3822 let mut range = cursor.node().byte_range();
3823 if query_range.is_empty() {
3824 // When the query range is empty and the current node starts after it, move to the
3825 // previous sibling to find the node the containing node.
3826 if range.start > query_range.start {
3827 cursor.goto_previous_sibling();
3828 range = cursor.node().byte_range();
3829 }
3830 } else {
3831 // When the query range is non-empty and the current node ends exactly at the start,
3832 // move to the next sibling to find a node that extends beyond the start.
3833 if range.end == query_range.start {
3834 cursor.goto_next_sibling();
3835 range = cursor.node().byte_range();
3836 }
3837 }
3838
3839 let encloses = range.contains_inclusive(query_range)
3840 && (!require_larger || range.len() > query_range.len());
3841 if !encloses {
3842 ascending = true;
3843 if !cursor.goto_parent() {
3844 return false;
3845 }
3846 continue;
3847 } else if ascending {
3848 return true;
3849 }
3850
3851 // Descend into the current node.
3852 if cursor
3853 .goto_first_child_for_byte(query_range.start)
3854 .is_none()
3855 {
3856 return true;
3857 }
3858 }
3859 }
3860
3861 pub fn syntax_ancestor<'a, T: ToOffset>(
3862 &'a self,
3863 range: Range<T>,
3864 ) -> Option<tree_sitter::Node<'a>> {
3865 let range = range.start.to_offset(self)..range.end.to_offset(self);
3866 let mut result: Option<tree_sitter::Node<'a>> = None;
3867 for layer in self
3868 .syntax
3869 .layers_for_range(range.clone(), &self.text, true)
3870 {
3871 let mut cursor = layer.node().walk();
3872
3873 // Find the node that both contains the range and is larger than it.
3874 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3875 continue;
3876 }
3877
3878 let left_node = cursor.node();
3879 let mut layer_result = left_node;
3880
3881 // For an empty range, try to find another node immediately to the right of the range.
3882 if left_node.end_byte() == range.start {
3883 let mut right_node = None;
3884 while !cursor.goto_next_sibling() {
3885 if !cursor.goto_parent() {
3886 break;
3887 }
3888 }
3889
3890 while cursor.node().start_byte() == range.start {
3891 right_node = Some(cursor.node());
3892 if !cursor.goto_first_child() {
3893 break;
3894 }
3895 }
3896
3897 // If there is a candidate node on both sides of the (empty) range, then
3898 // decide between the two by favoring a named node over an anonymous token.
3899 // If both nodes are the same in that regard, favor the right one.
3900 if let Some(right_node) = right_node
3901 && (right_node.is_named() || !left_node.is_named())
3902 {
3903 layer_result = right_node;
3904 }
3905 }
3906
3907 if let Some(previous_result) = &result
3908 && previous_result.byte_range().len() < layer_result.byte_range().len()
3909 {
3910 continue;
3911 }
3912 result = Some(layer_result);
3913 }
3914
3915 result
3916 }
3917
3918 /// Find the previous sibling syntax node at the given range.
3919 ///
3920 /// This function locates the syntax node that precedes the node containing
3921 /// the given range. It searches hierarchically by:
3922 /// 1. Finding the node that contains the given range
3923 /// 2. Looking for the previous sibling at the same tree level
3924 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3925 ///
3926 /// Returns `None` if there is no previous sibling at any ancestor level.
3927 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3928 &'a self,
3929 range: Range<T>,
3930 ) -> Option<tree_sitter::Node<'a>> {
3931 let range = range.start.to_offset(self)..range.end.to_offset(self);
3932 let mut result: Option<tree_sitter::Node<'a>> = None;
3933
3934 for layer in self
3935 .syntax
3936 .layers_for_range(range.clone(), &self.text, true)
3937 {
3938 let mut cursor = layer.node().walk();
3939
3940 // Find the node that contains the range
3941 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3942 continue;
3943 }
3944
3945 // Look for the previous sibling, moving up ancestor levels if needed
3946 loop {
3947 if cursor.goto_previous_sibling() {
3948 let layer_result = cursor.node();
3949
3950 if let Some(previous_result) = &result {
3951 if previous_result.byte_range().end < layer_result.byte_range().end {
3952 continue;
3953 }
3954 }
3955 result = Some(layer_result);
3956 break;
3957 }
3958
3959 // No sibling found at this level, try moving up to parent
3960 if !cursor.goto_parent() {
3961 break;
3962 }
3963 }
3964 }
3965
3966 result
3967 }
3968
3969 /// Find the next sibling syntax node at the given range.
3970 ///
3971 /// This function locates the syntax node that follows the node containing
3972 /// the given range. It searches hierarchically by:
3973 /// 1. Finding the node that contains the given range
3974 /// 2. Looking for the next sibling at the same tree level
3975 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3976 ///
3977 /// Returns `None` if there is no next sibling at any ancestor level.
3978 pub fn syntax_next_sibling<'a, T: ToOffset>(
3979 &'a self,
3980 range: Range<T>,
3981 ) -> Option<tree_sitter::Node<'a>> {
3982 let range = range.start.to_offset(self)..range.end.to_offset(self);
3983 let mut result: Option<tree_sitter::Node<'a>> = None;
3984
3985 for layer in self
3986 .syntax
3987 .layers_for_range(range.clone(), &self.text, true)
3988 {
3989 let mut cursor = layer.node().walk();
3990
3991 // Find the node that contains the range
3992 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3993 continue;
3994 }
3995
3996 // Look for the next sibling, moving up ancestor levels if needed
3997 loop {
3998 if cursor.goto_next_sibling() {
3999 let layer_result = cursor.node();
4000
4001 if let Some(previous_result) = &result {
4002 if previous_result.byte_range().start > layer_result.byte_range().start {
4003 continue;
4004 }
4005 }
4006 result = Some(layer_result);
4007 break;
4008 }
4009
4010 // No sibling found at this level, try moving up to parent
4011 if !cursor.goto_parent() {
4012 break;
4013 }
4014 }
4015 }
4016
4017 result
4018 }
4019
4020 /// Returns the root syntax node within the given row
4021 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4022 let start_offset = position.to_offset(self);
4023
4024 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4025
4026 let layer = self
4027 .syntax
4028 .layers_for_range(start_offset..start_offset, &self.text, true)
4029 .next()?;
4030
4031 let mut cursor = layer.node().walk();
4032
4033 // Descend to the first leaf that touches the start of the range.
4034 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4035 if cursor.node().end_byte() == start_offset {
4036 cursor.goto_next_sibling();
4037 }
4038 }
4039
4040 // Ascend to the root node within the same row.
4041 while cursor.goto_parent() {
4042 if cursor.node().start_position().row != row {
4043 break;
4044 }
4045 }
4046
4047 Some(cursor.node())
4048 }
4049
4050 /// Returns the outline for the buffer.
4051 ///
4052 /// This method allows passing an optional [`SyntaxTheme`] to
4053 /// syntax-highlight the returned symbols.
4054 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4055 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4056 }
4057
4058 /// Returns all the symbols that contain the given position.
4059 ///
4060 /// This method allows passing an optional [`SyntaxTheme`] to
4061 /// syntax-highlight the returned symbols.
4062 pub fn symbols_containing<T: ToOffset>(
4063 &self,
4064 position: T,
4065 theme: Option<&SyntaxTheme>,
4066 ) -> Vec<OutlineItem<Anchor>> {
4067 let position = position.to_offset(self);
4068 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4069 let end = self.clip_offset(position + 1, Bias::Right);
4070 let mut items = self.outline_items_containing(start..end, false, theme);
4071 let mut prev_depth = None;
4072 items.retain(|item| {
4073 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4074 prev_depth = Some(item.depth);
4075 result
4076 });
4077 items
4078 }
4079
4080 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4081 let range = range.to_offset(self);
4082 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4083 grammar.outline_config.as_ref().map(|c| &c.query)
4084 });
4085 let configs = matches
4086 .grammars()
4087 .iter()
4088 .map(|g| g.outline_config.as_ref().unwrap())
4089 .collect::<Vec<_>>();
4090
4091 while let Some(mat) = matches.peek() {
4092 let config = &configs[mat.grammar_index];
4093 let containing_item_node = maybe!({
4094 let item_node = mat.captures.iter().find_map(|cap| {
4095 if cap.index == config.item_capture_ix {
4096 Some(cap.node)
4097 } else {
4098 None
4099 }
4100 })?;
4101
4102 let item_byte_range = item_node.byte_range();
4103 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4104 None
4105 } else {
4106 Some(item_node)
4107 }
4108 });
4109
4110 if let Some(item_node) = containing_item_node {
4111 return Some(
4112 Point::from_ts_point(item_node.start_position())
4113 ..Point::from_ts_point(item_node.end_position()),
4114 );
4115 }
4116
4117 matches.advance();
4118 }
4119 None
4120 }
4121
4122 pub fn outline_items_containing<T: ToOffset>(
4123 &self,
4124 range: Range<T>,
4125 include_extra_context: bool,
4126 theme: Option<&SyntaxTheme>,
4127 ) -> Vec<OutlineItem<Anchor>> {
4128 self.outline_items_containing_internal(
4129 range,
4130 include_extra_context,
4131 theme,
4132 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4133 )
4134 }
4135
4136 pub fn outline_items_as_points_containing<T: ToOffset>(
4137 &self,
4138 range: Range<T>,
4139 include_extra_context: bool,
4140 theme: Option<&SyntaxTheme>,
4141 ) -> Vec<OutlineItem<Point>> {
4142 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4143 range
4144 })
4145 }
4146
4147 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4148 &self,
4149 range: Range<T>,
4150 include_extra_context: bool,
4151 theme: Option<&SyntaxTheme>,
4152 ) -> Vec<OutlineItem<usize>> {
4153 self.outline_items_containing_internal(
4154 range,
4155 include_extra_context,
4156 theme,
4157 |buffer, range| range.to_offset(buffer),
4158 )
4159 }
4160
4161 fn outline_items_containing_internal<T: ToOffset, U>(
4162 &self,
4163 range: Range<T>,
4164 include_extra_context: bool,
4165 theme: Option<&SyntaxTheme>,
4166 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4167 ) -> Vec<OutlineItem<U>> {
4168 let range = range.to_offset(self);
4169 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4170 grammar.outline_config.as_ref().map(|c| &c.query)
4171 });
4172
4173 let mut items = Vec::new();
4174 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4175 while let Some(mat) = matches.peek() {
4176 let config = matches.grammars()[mat.grammar_index]
4177 .outline_config
4178 .as_ref()
4179 .unwrap();
4180 if let Some(item) =
4181 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4182 {
4183 items.push(item);
4184 } else if let Some(capture) = mat
4185 .captures
4186 .iter()
4187 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4188 {
4189 let capture_range = capture.node.start_position()..capture.node.end_position();
4190 let mut capture_row_range =
4191 capture_range.start.row as u32..capture_range.end.row as u32;
4192 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4193 {
4194 capture_row_range.end -= 1;
4195 }
4196 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4197 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4198 last_row_range.end = capture_row_range.end;
4199 } else {
4200 annotation_row_ranges.push(capture_row_range);
4201 }
4202 } else {
4203 annotation_row_ranges.push(capture_row_range);
4204 }
4205 }
4206 matches.advance();
4207 }
4208
4209 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4210
4211 // Assign depths based on containment relationships and convert to anchors.
4212 let mut item_ends_stack = Vec::<Point>::new();
4213 let mut anchor_items = Vec::new();
4214 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4215 for item in items {
4216 while let Some(last_end) = item_ends_stack.last().copied() {
4217 if last_end < item.range.end {
4218 item_ends_stack.pop();
4219 } else {
4220 break;
4221 }
4222 }
4223
4224 let mut annotation_row_range = None;
4225 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4226 let row_preceding_item = item.range.start.row.saturating_sub(1);
4227 if next_annotation_row_range.end < row_preceding_item {
4228 annotation_row_ranges.next();
4229 } else {
4230 if next_annotation_row_range.end == row_preceding_item {
4231 annotation_row_range = Some(next_annotation_row_range.clone());
4232 annotation_row_ranges.next();
4233 }
4234 break;
4235 }
4236 }
4237
4238 anchor_items.push(OutlineItem {
4239 depth: item_ends_stack.len(),
4240 range: range_callback(self, item.range.clone()),
4241 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4242 text: item.text,
4243 highlight_ranges: item.highlight_ranges,
4244 name_ranges: item.name_ranges,
4245 body_range: item.body_range.map(|r| range_callback(self, r)),
4246 annotation_range: annotation_row_range.map(|annotation_range| {
4247 let point_range = Point::new(annotation_range.start, 0)
4248 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4249 range_callback(self, point_range)
4250 }),
4251 });
4252 item_ends_stack.push(item.range.end);
4253 }
4254
4255 anchor_items
4256 }
4257
4258 fn next_outline_item(
4259 &self,
4260 config: &OutlineConfig,
4261 mat: &SyntaxMapMatch,
4262 range: &Range<usize>,
4263 include_extra_context: bool,
4264 theme: Option<&SyntaxTheme>,
4265 ) -> Option<OutlineItem<Point>> {
4266 let item_node = mat.captures.iter().find_map(|cap| {
4267 if cap.index == config.item_capture_ix {
4268 Some(cap.node)
4269 } else {
4270 None
4271 }
4272 })?;
4273
4274 let item_byte_range = item_node.byte_range();
4275 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4276 return None;
4277 }
4278 let item_point_range = Point::from_ts_point(item_node.start_position())
4279 ..Point::from_ts_point(item_node.end_position());
4280
4281 let mut open_point = None;
4282 let mut close_point = None;
4283
4284 let mut buffer_ranges = Vec::new();
4285 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4286 let mut range = node.start_byte()..node.end_byte();
4287 let start = node.start_position();
4288 if node.end_position().row > start.row {
4289 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4290 }
4291
4292 if !range.is_empty() {
4293 buffer_ranges.push((range, node_is_name));
4294 }
4295 };
4296
4297 for capture in mat.captures {
4298 if capture.index == config.name_capture_ix {
4299 add_to_buffer_ranges(capture.node, true);
4300 } else if Some(capture.index) == config.context_capture_ix
4301 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4302 {
4303 add_to_buffer_ranges(capture.node, false);
4304 } else {
4305 if Some(capture.index) == config.open_capture_ix {
4306 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4307 } else if Some(capture.index) == config.close_capture_ix {
4308 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4309 }
4310 }
4311 }
4312
4313 if buffer_ranges.is_empty() {
4314 return None;
4315 }
4316 let source_range_for_text =
4317 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4318
4319 let mut text = String::new();
4320 let mut highlight_ranges = Vec::new();
4321 let mut name_ranges = Vec::new();
4322 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4323 let mut last_buffer_range_end = 0;
4324 for (buffer_range, is_name) in buffer_ranges {
4325 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4326 if space_added {
4327 text.push(' ');
4328 }
4329 let before_append_len = text.len();
4330 let mut offset = buffer_range.start;
4331 chunks.seek(buffer_range.clone());
4332 for mut chunk in chunks.by_ref() {
4333 if chunk.text.len() > buffer_range.end - offset {
4334 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4335 offset = buffer_range.end;
4336 } else {
4337 offset += chunk.text.len();
4338 }
4339 let style = chunk
4340 .syntax_highlight_id
4341 .zip(theme)
4342 .and_then(|(highlight, theme)| highlight.style(theme));
4343 if let Some(style) = style {
4344 let start = text.len();
4345 let end = start + chunk.text.len();
4346 highlight_ranges.push((start..end, style));
4347 }
4348 text.push_str(chunk.text);
4349 if offset >= buffer_range.end {
4350 break;
4351 }
4352 }
4353 if is_name {
4354 let after_append_len = text.len();
4355 let start = if space_added && !name_ranges.is_empty() {
4356 before_append_len - 1
4357 } else {
4358 before_append_len
4359 };
4360 name_ranges.push(start..after_append_len);
4361 }
4362 last_buffer_range_end = buffer_range.end;
4363 }
4364
4365 Some(OutlineItem {
4366 depth: 0, // We'll calculate the depth later
4367 range: item_point_range,
4368 source_range_for_text: source_range_for_text.to_point(self),
4369 text,
4370 highlight_ranges,
4371 name_ranges,
4372 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4373 annotation_range: None,
4374 })
4375 }
4376
4377 pub fn function_body_fold_ranges<T: ToOffset>(
4378 &self,
4379 within: Range<T>,
4380 ) -> impl Iterator<Item = Range<usize>> + '_ {
4381 self.text_object_ranges(within, TreeSitterOptions::default())
4382 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4383 }
4384
4385 /// For each grammar in the language, runs the provided
4386 /// [`tree_sitter::Query`] against the given range.
4387 pub fn matches(
4388 &self,
4389 range: Range<usize>,
4390 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4391 ) -> SyntaxMapMatches<'_> {
4392 self.syntax.matches(range, self, query)
4393 }
4394
4395 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4396 /// Hence, may return more bracket pairs than the range contains.
4397 ///
4398 /// Will omit known chunks.
4399 /// The resulting bracket match collections are not ordered.
4400 pub fn fetch_bracket_ranges(
4401 &self,
4402 range: Range<usize>,
4403 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4404 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4405 let mut all_bracket_matches = HashMap::default();
4406
4407 for chunk in self
4408 .tree_sitter_data
4409 .chunks
4410 .applicable_chunks(&[range.to_point(self)])
4411 {
4412 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4413 continue;
4414 }
4415 let chunk_range = chunk.anchor_range();
4416 let chunk_range = chunk_range.to_offset(&self);
4417
4418 if let Some(cached_brackets) =
4419 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4420 {
4421 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4422 continue;
4423 }
4424
4425 let mut all_brackets = Vec::new();
4426 let mut opens = Vec::new();
4427 let mut color_pairs = Vec::new();
4428
4429 let mut matches = self.syntax.matches_with_options(
4430 chunk_range.clone(),
4431 &self.text,
4432 TreeSitterOptions {
4433 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4434 max_start_depth: None,
4435 },
4436 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4437 );
4438 let configs = matches
4439 .grammars()
4440 .iter()
4441 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4442 .collect::<Vec<_>>();
4443
4444 while let Some(mat) = matches.peek() {
4445 let mut open = None;
4446 let mut close = None;
4447 let syntax_layer_depth = mat.depth;
4448 let config = configs[mat.grammar_index];
4449 let pattern = &config.patterns[mat.pattern_index];
4450 for capture in mat.captures {
4451 if capture.index == config.open_capture_ix {
4452 open = Some(capture.node.byte_range());
4453 } else if capture.index == config.close_capture_ix {
4454 close = Some(capture.node.byte_range());
4455 }
4456 }
4457
4458 matches.advance();
4459
4460 let Some((open_range, close_range)) = open.zip(close) else {
4461 continue;
4462 };
4463
4464 let bracket_range = open_range.start..=close_range.end;
4465 if !bracket_range.overlaps(&chunk_range) {
4466 continue;
4467 }
4468
4469 let index = all_brackets.len();
4470 all_brackets.push(BracketMatch {
4471 open_range: open_range.clone(),
4472 close_range: close_range.clone(),
4473 newline_only: pattern.newline_only,
4474 syntax_layer_depth,
4475 color_index: None,
4476 });
4477
4478 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4479 // bracket will match the entire tag with all text inside.
4480 // For now, avoid highlighting any pair that has more than single char in each bracket.
4481 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4482 let should_color =
4483 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4484 if should_color {
4485 opens.push(open_range.clone());
4486 color_pairs.push((open_range, close_range, index));
4487 }
4488 }
4489
4490 opens.sort_by_key(|r| (r.start, r.end));
4491 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4492 color_pairs.sort_by_key(|(_, close, _)| close.end);
4493
4494 let mut open_stack = Vec::new();
4495 let mut open_index = 0;
4496 for (open, close, index) in color_pairs {
4497 while open_index < opens.len() && opens[open_index].start < close.start {
4498 open_stack.push(opens[open_index].clone());
4499 open_index += 1;
4500 }
4501
4502 if open_stack.last() == Some(&open) {
4503 let depth_index = open_stack.len() - 1;
4504 all_brackets[index].color_index = Some(depth_index);
4505 open_stack.pop();
4506 }
4507 }
4508
4509 all_brackets.sort_by_key(|bracket_match| {
4510 (bracket_match.open_range.start, bracket_match.open_range.end)
4511 });
4512
4513 if let empty_slot @ None =
4514 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4515 {
4516 *empty_slot = Some(all_brackets.clone());
4517 }
4518 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4519 }
4520
4521 all_bracket_matches
4522 }
4523
4524 pub fn all_bracket_ranges(
4525 &self,
4526 range: Range<usize>,
4527 ) -> impl Iterator<Item = BracketMatch<usize>> {
4528 self.fetch_bracket_ranges(range.clone(), None)
4529 .into_values()
4530 .flatten()
4531 .filter(move |bracket_match| {
4532 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4533 bracket_range.overlaps(&range)
4534 })
4535 }
4536
4537 /// Returns bracket range pairs overlapping or adjacent to `range`
4538 pub fn bracket_ranges<T: ToOffset>(
4539 &self,
4540 range: Range<T>,
4541 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4542 // Find bracket pairs that *inclusively* contain the given range.
4543 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4544 self.all_bracket_ranges(range)
4545 .filter(|pair| !pair.newline_only)
4546 }
4547
4548 pub fn debug_variables_query<T: ToOffset>(
4549 &self,
4550 range: Range<T>,
4551 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4552 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4553
4554 let mut matches = self.syntax.matches_with_options(
4555 range.clone(),
4556 &self.text,
4557 TreeSitterOptions::default(),
4558 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4559 );
4560
4561 let configs = matches
4562 .grammars()
4563 .iter()
4564 .map(|grammar| grammar.debug_variables_config.as_ref())
4565 .collect::<Vec<_>>();
4566
4567 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4568
4569 iter::from_fn(move || {
4570 loop {
4571 while let Some(capture) = captures.pop() {
4572 if capture.0.overlaps(&range) {
4573 return Some(capture);
4574 }
4575 }
4576
4577 let mat = matches.peek()?;
4578
4579 let Some(config) = configs[mat.grammar_index].as_ref() else {
4580 matches.advance();
4581 continue;
4582 };
4583
4584 for capture in mat.captures {
4585 let Some(ix) = config
4586 .objects_by_capture_ix
4587 .binary_search_by_key(&capture.index, |e| e.0)
4588 .ok()
4589 else {
4590 continue;
4591 };
4592 let text_object = config.objects_by_capture_ix[ix].1;
4593 let byte_range = capture.node.byte_range();
4594
4595 let mut found = false;
4596 for (range, existing) in captures.iter_mut() {
4597 if existing == &text_object {
4598 range.start = range.start.min(byte_range.start);
4599 range.end = range.end.max(byte_range.end);
4600 found = true;
4601 break;
4602 }
4603 }
4604
4605 if !found {
4606 captures.push((byte_range, text_object));
4607 }
4608 }
4609
4610 matches.advance();
4611 }
4612 })
4613 }
4614
4615 pub fn text_object_ranges<T: ToOffset>(
4616 &self,
4617 range: Range<T>,
4618 options: TreeSitterOptions,
4619 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4620 let range =
4621 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4622
4623 let mut matches =
4624 self.syntax
4625 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4626 grammar.text_object_config.as_ref().map(|c| &c.query)
4627 });
4628
4629 let configs = matches
4630 .grammars()
4631 .iter()
4632 .map(|grammar| grammar.text_object_config.as_ref())
4633 .collect::<Vec<_>>();
4634
4635 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4636
4637 iter::from_fn(move || {
4638 loop {
4639 while let Some(capture) = captures.pop() {
4640 if capture.0.overlaps(&range) {
4641 return Some(capture);
4642 }
4643 }
4644
4645 let mat = matches.peek()?;
4646
4647 let Some(config) = configs[mat.grammar_index].as_ref() else {
4648 matches.advance();
4649 continue;
4650 };
4651
4652 for capture in mat.captures {
4653 let Some(ix) = config
4654 .text_objects_by_capture_ix
4655 .binary_search_by_key(&capture.index, |e| e.0)
4656 .ok()
4657 else {
4658 continue;
4659 };
4660 let text_object = config.text_objects_by_capture_ix[ix].1;
4661 let byte_range = capture.node.byte_range();
4662
4663 let mut found = false;
4664 for (range, existing) in captures.iter_mut() {
4665 if existing == &text_object {
4666 range.start = range.start.min(byte_range.start);
4667 range.end = range.end.max(byte_range.end);
4668 found = true;
4669 break;
4670 }
4671 }
4672
4673 if !found {
4674 captures.push((byte_range, text_object));
4675 }
4676 }
4677
4678 matches.advance();
4679 }
4680 })
4681 }
4682
4683 /// Returns enclosing bracket ranges containing the given range
4684 pub fn enclosing_bracket_ranges<T: ToOffset>(
4685 &self,
4686 range: Range<T>,
4687 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4688 let range = range.start.to_offset(self)..range.end.to_offset(self);
4689
4690 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4691 let max_depth = result
4692 .iter()
4693 .map(|mat| mat.syntax_layer_depth)
4694 .max()
4695 .unwrap_or(0);
4696 result.into_iter().filter(move |pair| {
4697 pair.open_range.start <= range.start
4698 && pair.close_range.end >= range.end
4699 && pair.syntax_layer_depth == max_depth
4700 })
4701 }
4702
4703 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4704 ///
4705 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4706 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4707 &self,
4708 range: Range<T>,
4709 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4710 ) -> Option<(Range<usize>, Range<usize>)> {
4711 let range = range.start.to_offset(self)..range.end.to_offset(self);
4712
4713 // Get the ranges of the innermost pair of brackets.
4714 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4715
4716 for pair in self.enclosing_bracket_ranges(range) {
4717 if let Some(range_filter) = range_filter
4718 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4719 {
4720 continue;
4721 }
4722
4723 let len = pair.close_range.end - pair.open_range.start;
4724
4725 if let Some((existing_open, existing_close)) = &result {
4726 let existing_len = existing_close.end - existing_open.start;
4727 if len > existing_len {
4728 continue;
4729 }
4730 }
4731
4732 result = Some((pair.open_range, pair.close_range));
4733 }
4734
4735 result
4736 }
4737
4738 /// Returns anchor ranges for any matches of the redaction query.
4739 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4740 /// will be run on the relevant section of the buffer.
4741 pub fn redacted_ranges<T: ToOffset>(
4742 &self,
4743 range: Range<T>,
4744 ) -> impl Iterator<Item = Range<usize>> + '_ {
4745 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4746 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4747 grammar
4748 .redactions_config
4749 .as_ref()
4750 .map(|config| &config.query)
4751 });
4752
4753 let configs = syntax_matches
4754 .grammars()
4755 .iter()
4756 .map(|grammar| grammar.redactions_config.as_ref())
4757 .collect::<Vec<_>>();
4758
4759 iter::from_fn(move || {
4760 let redacted_range = syntax_matches
4761 .peek()
4762 .and_then(|mat| {
4763 configs[mat.grammar_index].and_then(|config| {
4764 mat.captures
4765 .iter()
4766 .find(|capture| capture.index == config.redaction_capture_ix)
4767 })
4768 })
4769 .map(|mat| mat.node.byte_range());
4770 syntax_matches.advance();
4771 redacted_range
4772 })
4773 }
4774
4775 pub fn injections_intersecting_range<T: ToOffset>(
4776 &self,
4777 range: Range<T>,
4778 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4779 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4780
4781 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4782 grammar
4783 .injection_config
4784 .as_ref()
4785 .map(|config| &config.query)
4786 });
4787
4788 let configs = syntax_matches
4789 .grammars()
4790 .iter()
4791 .map(|grammar| grammar.injection_config.as_ref())
4792 .collect::<Vec<_>>();
4793
4794 iter::from_fn(move || {
4795 let ranges = syntax_matches.peek().and_then(|mat| {
4796 let config = &configs[mat.grammar_index]?;
4797 let content_capture_range = mat.captures.iter().find_map(|capture| {
4798 if capture.index == config.content_capture_ix {
4799 Some(capture.node.byte_range())
4800 } else {
4801 None
4802 }
4803 })?;
4804 let language = self.language_at(content_capture_range.start)?;
4805 Some((content_capture_range, language))
4806 });
4807 syntax_matches.advance();
4808 ranges
4809 })
4810 }
4811
4812 pub fn runnable_ranges(
4813 &self,
4814 offset_range: Range<usize>,
4815 ) -> impl Iterator<Item = RunnableRange> + '_ {
4816 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4817 grammar.runnable_config.as_ref().map(|config| &config.query)
4818 });
4819
4820 let test_configs = syntax_matches
4821 .grammars()
4822 .iter()
4823 .map(|grammar| grammar.runnable_config.as_ref())
4824 .collect::<Vec<_>>();
4825
4826 iter::from_fn(move || {
4827 loop {
4828 let mat = syntax_matches.peek()?;
4829
4830 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4831 let mut run_range = None;
4832 let full_range = mat.captures.iter().fold(
4833 Range {
4834 start: usize::MAX,
4835 end: 0,
4836 },
4837 |mut acc, next| {
4838 let byte_range = next.node.byte_range();
4839 if acc.start > byte_range.start {
4840 acc.start = byte_range.start;
4841 }
4842 if acc.end < byte_range.end {
4843 acc.end = byte_range.end;
4844 }
4845 acc
4846 },
4847 );
4848 if full_range.start > full_range.end {
4849 // We did not find a full spanning range of this match.
4850 return None;
4851 }
4852 let extra_captures: SmallVec<[_; 1]> =
4853 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4854 test_configs
4855 .extra_captures
4856 .get(capture.index as usize)
4857 .cloned()
4858 .and_then(|tag_name| match tag_name {
4859 RunnableCapture::Named(name) => {
4860 Some((capture.node.byte_range(), name))
4861 }
4862 RunnableCapture::Run => {
4863 let _ = run_range.insert(capture.node.byte_range());
4864 None
4865 }
4866 })
4867 }));
4868 let run_range = run_range?;
4869 let tags = test_configs
4870 .query
4871 .property_settings(mat.pattern_index)
4872 .iter()
4873 .filter_map(|property| {
4874 if *property.key == *"tag" {
4875 property
4876 .value
4877 .as_ref()
4878 .map(|value| RunnableTag(value.to_string().into()))
4879 } else {
4880 None
4881 }
4882 })
4883 .collect();
4884 let extra_captures = extra_captures
4885 .into_iter()
4886 .map(|(range, name)| {
4887 (
4888 name.to_string(),
4889 self.text_for_range(range).collect::<String>(),
4890 )
4891 })
4892 .collect();
4893 // All tags should have the same range.
4894 Some(RunnableRange {
4895 run_range,
4896 full_range,
4897 runnable: Runnable {
4898 tags,
4899 language: mat.language,
4900 buffer: self.remote_id(),
4901 },
4902 extra_captures,
4903 buffer_id: self.remote_id(),
4904 })
4905 });
4906
4907 syntax_matches.advance();
4908 if test_range.is_some() {
4909 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4910 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4911 return test_range;
4912 }
4913 }
4914 })
4915 }
4916
4917 /// Returns selections for remote peers intersecting the given range.
4918 #[allow(clippy::type_complexity)]
4919 pub fn selections_in_range(
4920 &self,
4921 range: Range<Anchor>,
4922 include_local: bool,
4923 ) -> impl Iterator<
4924 Item = (
4925 ReplicaId,
4926 bool,
4927 CursorShape,
4928 impl Iterator<Item = &Selection<Anchor>> + '_,
4929 ),
4930 > + '_ {
4931 self.remote_selections
4932 .iter()
4933 .filter(move |(replica_id, set)| {
4934 (include_local || **replica_id != self.text.replica_id())
4935 && !set.selections.is_empty()
4936 })
4937 .map(move |(replica_id, set)| {
4938 let start_ix = match set.selections.binary_search_by(|probe| {
4939 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4940 }) {
4941 Ok(ix) | Err(ix) => ix,
4942 };
4943 let end_ix = match set.selections.binary_search_by(|probe| {
4944 probe.start.cmp(&range.end, self).then(Ordering::Less)
4945 }) {
4946 Ok(ix) | Err(ix) => ix,
4947 };
4948
4949 (
4950 *replica_id,
4951 set.line_mode,
4952 set.cursor_shape,
4953 set.selections[start_ix..end_ix].iter(),
4954 )
4955 })
4956 }
4957
4958 /// Returns if the buffer contains any diagnostics.
4959 pub fn has_diagnostics(&self) -> bool {
4960 !self.diagnostics.is_empty()
4961 }
4962
4963 /// Returns all the diagnostics intersecting the given range.
4964 pub fn diagnostics_in_range<'a, T, O>(
4965 &'a self,
4966 search_range: Range<T>,
4967 reversed: bool,
4968 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4969 where
4970 T: 'a + Clone + ToOffset,
4971 O: 'a + FromAnchor,
4972 {
4973 let mut iterators: Vec<_> = self
4974 .diagnostics
4975 .iter()
4976 .map(|(_, collection)| {
4977 collection
4978 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4979 .peekable()
4980 })
4981 .collect();
4982
4983 std::iter::from_fn(move || {
4984 let (next_ix, _) = iterators
4985 .iter_mut()
4986 .enumerate()
4987 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4988 .min_by(|(_, a), (_, b)| {
4989 let cmp = a
4990 .range
4991 .start
4992 .cmp(&b.range.start, self)
4993 // when range is equal, sort by diagnostic severity
4994 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4995 // and stabilize order with group_id
4996 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4997 if reversed { cmp.reverse() } else { cmp }
4998 })?;
4999 iterators[next_ix]
5000 .next()
5001 .map(
5002 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5003 diagnostic,
5004 range: FromAnchor::from_anchor(&range.start, self)
5005 ..FromAnchor::from_anchor(&range.end, self),
5006 },
5007 )
5008 })
5009 }
5010
5011 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5012 /// should be used instead.
5013 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5014 &self.diagnostics
5015 }
5016
5017 /// Returns all the diagnostic groups associated with the given
5018 /// language server ID. If no language server ID is provided,
5019 /// all diagnostics groups are returned.
5020 pub fn diagnostic_groups(
5021 &self,
5022 language_server_id: Option<LanguageServerId>,
5023 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5024 let mut groups = Vec::new();
5025
5026 if let Some(language_server_id) = language_server_id {
5027 if let Ok(ix) = self
5028 .diagnostics
5029 .binary_search_by_key(&language_server_id, |e| e.0)
5030 {
5031 self.diagnostics[ix]
5032 .1
5033 .groups(language_server_id, &mut groups, self);
5034 }
5035 } else {
5036 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5037 diagnostics.groups(*language_server_id, &mut groups, self);
5038 }
5039 }
5040
5041 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5042 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5043 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5044 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5045 });
5046
5047 groups
5048 }
5049
5050 /// Returns an iterator over the diagnostics for the given group.
5051 pub fn diagnostic_group<O>(
5052 &self,
5053 group_id: usize,
5054 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5055 where
5056 O: FromAnchor + 'static,
5057 {
5058 self.diagnostics
5059 .iter()
5060 .flat_map(move |(_, set)| set.group(group_id, self))
5061 }
5062
5063 /// An integer version number that accounts for all updates besides
5064 /// the buffer's text itself (which is versioned via a version vector).
5065 pub fn non_text_state_update_count(&self) -> usize {
5066 self.non_text_state_update_count
5067 }
5068
5069 /// An integer version that changes when the buffer's syntax changes.
5070 pub fn syntax_update_count(&self) -> usize {
5071 self.syntax.update_count()
5072 }
5073
5074 /// Returns a snapshot of underlying file.
5075 pub fn file(&self) -> Option<&Arc<dyn File>> {
5076 self.file.as_ref()
5077 }
5078
5079 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5080 if let Some(file) = self.file() {
5081 if file.path().file_name().is_none() || include_root {
5082 Some(file.full_path(cx).to_string_lossy().into_owned())
5083 } else {
5084 Some(file.path().display(file.path_style(cx)).to_string())
5085 }
5086 } else {
5087 None
5088 }
5089 }
5090
5091 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5092 let query_str = query.fuzzy_contents;
5093 if query_str.is_some_and(|query| query.is_empty()) {
5094 return BTreeMap::default();
5095 }
5096
5097 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5098 language,
5099 override_id: None,
5100 }));
5101
5102 let mut query_ix = 0;
5103 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5104 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5105
5106 let mut words = BTreeMap::default();
5107 let mut current_word_start_ix = None;
5108 let mut chunk_ix = query.range.start;
5109 for chunk in self.chunks(query.range, false) {
5110 for (i, c) in chunk.text.char_indices() {
5111 let ix = chunk_ix + i;
5112 if classifier.is_word(c) {
5113 if current_word_start_ix.is_none() {
5114 current_word_start_ix = Some(ix);
5115 }
5116
5117 if let Some(query_chars) = &query_chars
5118 && query_ix < query_len
5119 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5120 {
5121 query_ix += 1;
5122 }
5123 continue;
5124 } else if let Some(word_start) = current_word_start_ix.take()
5125 && query_ix == query_len
5126 {
5127 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5128 let mut word_text = self.text_for_range(word_start..ix).peekable();
5129 let first_char = word_text
5130 .peek()
5131 .and_then(|first_chunk| first_chunk.chars().next());
5132 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5133 if !query.skip_digits
5134 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5135 {
5136 words.insert(word_text.collect(), word_range);
5137 }
5138 }
5139 query_ix = 0;
5140 }
5141 chunk_ix += chunk.text.len();
5142 }
5143
5144 words
5145 }
5146}
5147
5148pub struct WordsQuery<'a> {
5149 /// Only returns words with all chars from the fuzzy string in them.
5150 pub fuzzy_contents: Option<&'a str>,
5151 /// Skips words that start with a digit.
5152 pub skip_digits: bool,
5153 /// Buffer offset range, to look for words.
5154 pub range: Range<usize>,
5155}
5156
5157fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5158 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5159}
5160
5161fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5162 let mut result = IndentSize::spaces(0);
5163 for c in text {
5164 let kind = match c {
5165 ' ' => IndentKind::Space,
5166 '\t' => IndentKind::Tab,
5167 _ => break,
5168 };
5169 if result.len == 0 {
5170 result.kind = kind;
5171 }
5172 result.len += 1;
5173 }
5174 result
5175}
5176
5177impl Clone for BufferSnapshot {
5178 fn clone(&self) -> Self {
5179 Self {
5180 text: self.text.clone(),
5181 syntax: self.syntax.clone(),
5182 file: self.file.clone(),
5183 remote_selections: self.remote_selections.clone(),
5184 diagnostics: self.diagnostics.clone(),
5185 language: self.language.clone(),
5186 tree_sitter_data: self.tree_sitter_data.clone(),
5187 non_text_state_update_count: self.non_text_state_update_count,
5188 capability: self.capability,
5189 }
5190 }
5191}
5192
5193impl Deref for BufferSnapshot {
5194 type Target = text::BufferSnapshot;
5195
5196 fn deref(&self) -> &Self::Target {
5197 &self.text
5198 }
5199}
5200
5201unsafe impl Send for BufferChunks<'_> {}
5202
5203impl<'a> BufferChunks<'a> {
5204 pub(crate) fn new(
5205 text: &'a Rope,
5206 range: Range<usize>,
5207 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5208 diagnostics: bool,
5209 buffer_snapshot: Option<&'a BufferSnapshot>,
5210 ) -> Self {
5211 let mut highlights = None;
5212 if let Some((captures, highlight_maps)) = syntax {
5213 highlights = Some(BufferChunkHighlights {
5214 captures,
5215 next_capture: None,
5216 stack: Default::default(),
5217 highlight_maps,
5218 })
5219 }
5220
5221 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5222 let chunks = text.chunks_in_range(range.clone());
5223
5224 let mut this = BufferChunks {
5225 range,
5226 buffer_snapshot,
5227 chunks,
5228 diagnostic_endpoints,
5229 error_depth: 0,
5230 warning_depth: 0,
5231 information_depth: 0,
5232 hint_depth: 0,
5233 unnecessary_depth: 0,
5234 underline: true,
5235 highlights,
5236 };
5237 this.initialize_diagnostic_endpoints();
5238 this
5239 }
5240
5241 /// Seeks to the given byte offset in the buffer.
5242 pub fn seek(&mut self, range: Range<usize>) {
5243 let old_range = std::mem::replace(&mut self.range, range.clone());
5244 self.chunks.set_range(self.range.clone());
5245 if let Some(highlights) = self.highlights.as_mut() {
5246 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5247 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5248 highlights
5249 .stack
5250 .retain(|(end_offset, _)| *end_offset > range.start);
5251 if let Some(capture) = &highlights.next_capture
5252 && range.start >= capture.node.start_byte()
5253 {
5254 let next_capture_end = capture.node.end_byte();
5255 if range.start < next_capture_end {
5256 highlights.stack.push((
5257 next_capture_end,
5258 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5259 ));
5260 }
5261 highlights.next_capture.take();
5262 }
5263 } else if let Some(snapshot) = self.buffer_snapshot {
5264 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5265 *highlights = BufferChunkHighlights {
5266 captures,
5267 next_capture: None,
5268 stack: Default::default(),
5269 highlight_maps,
5270 };
5271 } else {
5272 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5273 // Seeking such BufferChunks is not supported.
5274 debug_assert!(
5275 false,
5276 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5277 );
5278 }
5279
5280 highlights.captures.set_byte_range(self.range.clone());
5281 self.initialize_diagnostic_endpoints();
5282 }
5283 }
5284
5285 fn initialize_diagnostic_endpoints(&mut self) {
5286 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5287 && let Some(buffer) = self.buffer_snapshot
5288 {
5289 let mut diagnostic_endpoints = Vec::new();
5290 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5291 diagnostic_endpoints.push(DiagnosticEndpoint {
5292 offset: entry.range.start,
5293 is_start: true,
5294 severity: entry.diagnostic.severity,
5295 is_unnecessary: entry.diagnostic.is_unnecessary,
5296 underline: entry.diagnostic.underline,
5297 });
5298 diagnostic_endpoints.push(DiagnosticEndpoint {
5299 offset: entry.range.end,
5300 is_start: false,
5301 severity: entry.diagnostic.severity,
5302 is_unnecessary: entry.diagnostic.is_unnecessary,
5303 underline: entry.diagnostic.underline,
5304 });
5305 }
5306 diagnostic_endpoints
5307 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5308 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5309 self.hint_depth = 0;
5310 self.error_depth = 0;
5311 self.warning_depth = 0;
5312 self.information_depth = 0;
5313 }
5314 }
5315
5316 /// The current byte offset in the buffer.
5317 pub fn offset(&self) -> usize {
5318 self.range.start
5319 }
5320
5321 pub fn range(&self) -> Range<usize> {
5322 self.range.clone()
5323 }
5324
5325 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5326 let depth = match endpoint.severity {
5327 DiagnosticSeverity::ERROR => &mut self.error_depth,
5328 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5329 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5330 DiagnosticSeverity::HINT => &mut self.hint_depth,
5331 _ => return,
5332 };
5333 if endpoint.is_start {
5334 *depth += 1;
5335 } else {
5336 *depth -= 1;
5337 }
5338
5339 if endpoint.is_unnecessary {
5340 if endpoint.is_start {
5341 self.unnecessary_depth += 1;
5342 } else {
5343 self.unnecessary_depth -= 1;
5344 }
5345 }
5346 }
5347
5348 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5349 if self.error_depth > 0 {
5350 Some(DiagnosticSeverity::ERROR)
5351 } else if self.warning_depth > 0 {
5352 Some(DiagnosticSeverity::WARNING)
5353 } else if self.information_depth > 0 {
5354 Some(DiagnosticSeverity::INFORMATION)
5355 } else if self.hint_depth > 0 {
5356 Some(DiagnosticSeverity::HINT)
5357 } else {
5358 None
5359 }
5360 }
5361
5362 fn current_code_is_unnecessary(&self) -> bool {
5363 self.unnecessary_depth > 0
5364 }
5365}
5366
5367impl<'a> Iterator for BufferChunks<'a> {
5368 type Item = Chunk<'a>;
5369
5370 fn next(&mut self) -> Option<Self::Item> {
5371 let mut next_capture_start = usize::MAX;
5372 let mut next_diagnostic_endpoint = usize::MAX;
5373
5374 if let Some(highlights) = self.highlights.as_mut() {
5375 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5376 if *parent_capture_end <= self.range.start {
5377 highlights.stack.pop();
5378 } else {
5379 break;
5380 }
5381 }
5382
5383 if highlights.next_capture.is_none() {
5384 highlights.next_capture = highlights.captures.next();
5385 }
5386
5387 while let Some(capture) = highlights.next_capture.as_ref() {
5388 if self.range.start < capture.node.start_byte() {
5389 next_capture_start = capture.node.start_byte();
5390 break;
5391 } else {
5392 let highlight_id =
5393 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5394 highlights
5395 .stack
5396 .push((capture.node.end_byte(), highlight_id));
5397 highlights.next_capture = highlights.captures.next();
5398 }
5399 }
5400 }
5401
5402 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5403 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5404 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5405 if endpoint.offset <= self.range.start {
5406 self.update_diagnostic_depths(endpoint);
5407 diagnostic_endpoints.next();
5408 self.underline = endpoint.underline;
5409 } else {
5410 next_diagnostic_endpoint = endpoint.offset;
5411 break;
5412 }
5413 }
5414 }
5415 self.diagnostic_endpoints = diagnostic_endpoints;
5416
5417 if let Some(ChunkBitmaps {
5418 text: chunk,
5419 chars: chars_map,
5420 tabs,
5421 }) = self.chunks.peek_with_bitmaps()
5422 {
5423 let chunk_start = self.range.start;
5424 let mut chunk_end = (self.chunks.offset() + chunk.len())
5425 .min(next_capture_start)
5426 .min(next_diagnostic_endpoint);
5427 let mut highlight_id = None;
5428 if let Some(highlights) = self.highlights.as_ref()
5429 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5430 {
5431 chunk_end = chunk_end.min(*parent_capture_end);
5432 highlight_id = Some(*parent_highlight_id);
5433 }
5434 let bit_start = chunk_start - self.chunks.offset();
5435 let bit_end = chunk_end - self.chunks.offset();
5436
5437 let slice = &chunk[bit_start..bit_end];
5438
5439 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5440 let tabs = (tabs >> bit_start) & mask;
5441 let chars = (chars_map >> bit_start) & mask;
5442
5443 self.range.start = chunk_end;
5444 if self.range.start == self.chunks.offset() + chunk.len() {
5445 self.chunks.next().unwrap();
5446 }
5447
5448 Some(Chunk {
5449 text: slice,
5450 syntax_highlight_id: highlight_id,
5451 underline: self.underline,
5452 diagnostic_severity: self.current_diagnostic_severity(),
5453 is_unnecessary: self.current_code_is_unnecessary(),
5454 tabs,
5455 chars,
5456 ..Chunk::default()
5457 })
5458 } else {
5459 None
5460 }
5461 }
5462}
5463
5464impl operation_queue::Operation for Operation {
5465 fn lamport_timestamp(&self) -> clock::Lamport {
5466 match self {
5467 Operation::Buffer(_) => {
5468 unreachable!("buffer operations should never be deferred at this layer")
5469 }
5470 Operation::UpdateDiagnostics {
5471 lamport_timestamp, ..
5472 }
5473 | Operation::UpdateSelections {
5474 lamport_timestamp, ..
5475 }
5476 | Operation::UpdateCompletionTriggers {
5477 lamport_timestamp, ..
5478 }
5479 | Operation::UpdateLineEnding {
5480 lamport_timestamp, ..
5481 } => *lamport_timestamp,
5482 }
5483 }
5484}
5485
5486impl Default for Diagnostic {
5487 fn default() -> Self {
5488 Self {
5489 source: Default::default(),
5490 source_kind: DiagnosticSourceKind::Other,
5491 code: None,
5492 code_description: None,
5493 severity: DiagnosticSeverity::ERROR,
5494 message: Default::default(),
5495 markdown: None,
5496 group_id: 0,
5497 is_primary: false,
5498 is_disk_based: false,
5499 is_unnecessary: false,
5500 underline: true,
5501 data: None,
5502 registration_id: None,
5503 }
5504 }
5505}
5506
5507impl IndentSize {
5508 /// Returns an [`IndentSize`] representing the given spaces.
5509 pub fn spaces(len: u32) -> Self {
5510 Self {
5511 len,
5512 kind: IndentKind::Space,
5513 }
5514 }
5515
5516 /// Returns an [`IndentSize`] representing a tab.
5517 pub fn tab() -> Self {
5518 Self {
5519 len: 1,
5520 kind: IndentKind::Tab,
5521 }
5522 }
5523
5524 /// An iterator over the characters represented by this [`IndentSize`].
5525 pub fn chars(&self) -> impl Iterator<Item = char> {
5526 iter::repeat(self.char()).take(self.len as usize)
5527 }
5528
5529 /// The character representation of this [`IndentSize`].
5530 pub fn char(&self) -> char {
5531 match self.kind {
5532 IndentKind::Space => ' ',
5533 IndentKind::Tab => '\t',
5534 }
5535 }
5536
5537 /// Consumes the current [`IndentSize`] and returns a new one that has
5538 /// been shrunk or enlarged by the given size along the given direction.
5539 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5540 match direction {
5541 Ordering::Less => {
5542 if self.kind == size.kind && self.len >= size.len {
5543 self.len -= size.len;
5544 }
5545 }
5546 Ordering::Equal => {}
5547 Ordering::Greater => {
5548 if self.len == 0 {
5549 self = size;
5550 } else if self.kind == size.kind {
5551 self.len += size.len;
5552 }
5553 }
5554 }
5555 self
5556 }
5557
5558 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5559 match self.kind {
5560 IndentKind::Space => self.len as usize,
5561 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5562 }
5563 }
5564}
5565
5566#[cfg(any(test, feature = "test-support"))]
5567pub struct TestFile {
5568 pub path: Arc<RelPath>,
5569 pub root_name: String,
5570 pub local_root: Option<PathBuf>,
5571}
5572
5573#[cfg(any(test, feature = "test-support"))]
5574impl File for TestFile {
5575 fn path(&self) -> &Arc<RelPath> {
5576 &self.path
5577 }
5578
5579 fn full_path(&self, _: &gpui::App) -> PathBuf {
5580 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5581 }
5582
5583 fn as_local(&self) -> Option<&dyn LocalFile> {
5584 if self.local_root.is_some() {
5585 Some(self)
5586 } else {
5587 None
5588 }
5589 }
5590
5591 fn disk_state(&self) -> DiskState {
5592 unimplemented!()
5593 }
5594
5595 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5596 self.path().file_name().unwrap_or(self.root_name.as_ref())
5597 }
5598
5599 fn worktree_id(&self, _: &App) -> WorktreeId {
5600 WorktreeId::from_usize(0)
5601 }
5602
5603 fn to_proto(&self, _: &App) -> rpc::proto::File {
5604 unimplemented!()
5605 }
5606
5607 fn is_private(&self) -> bool {
5608 false
5609 }
5610
5611 fn path_style(&self, _cx: &App) -> PathStyle {
5612 PathStyle::local()
5613 }
5614}
5615
5616#[cfg(any(test, feature = "test-support"))]
5617impl LocalFile for TestFile {
5618 fn abs_path(&self, _cx: &App) -> PathBuf {
5619 PathBuf::from(self.local_root.as_ref().unwrap())
5620 .join(&self.root_name)
5621 .join(self.path.as_std_path())
5622 }
5623
5624 fn load(&self, _cx: &App) -> Task<Result<String>> {
5625 unimplemented!()
5626 }
5627
5628 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5629 unimplemented!()
5630 }
5631}
5632
5633pub(crate) fn contiguous_ranges(
5634 values: impl Iterator<Item = u32>,
5635 max_len: usize,
5636) -> impl Iterator<Item = Range<u32>> {
5637 let mut values = values;
5638 let mut current_range: Option<Range<u32>> = None;
5639 std::iter::from_fn(move || {
5640 loop {
5641 if let Some(value) = values.next() {
5642 if let Some(range) = &mut current_range
5643 && value == range.end
5644 && range.len() < max_len
5645 {
5646 range.end += 1;
5647 continue;
5648 }
5649
5650 let prev_range = current_range.clone();
5651 current_range = Some(value..(value + 1));
5652 if prev_range.is_some() {
5653 return prev_range;
5654 }
5655 } else {
5656 return current_range.take();
5657 }
5658 }
5659 })
5660}
5661
5662#[derive(Default, Debug)]
5663pub struct CharClassifier {
5664 scope: Option<LanguageScope>,
5665 scope_context: Option<CharScopeContext>,
5666 ignore_punctuation: bool,
5667}
5668
5669impl CharClassifier {
5670 pub fn new(scope: Option<LanguageScope>) -> Self {
5671 Self {
5672 scope,
5673 scope_context: None,
5674 ignore_punctuation: false,
5675 }
5676 }
5677
5678 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5679 Self {
5680 scope_context,
5681 ..self
5682 }
5683 }
5684
5685 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5686 Self {
5687 ignore_punctuation,
5688 ..self
5689 }
5690 }
5691
5692 pub fn is_whitespace(&self, c: char) -> bool {
5693 self.kind(c) == CharKind::Whitespace
5694 }
5695
5696 pub fn is_word(&self, c: char) -> bool {
5697 self.kind(c) == CharKind::Word
5698 }
5699
5700 pub fn is_punctuation(&self, c: char) -> bool {
5701 self.kind(c) == CharKind::Punctuation
5702 }
5703
5704 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5705 if c.is_alphanumeric() || c == '_' {
5706 return CharKind::Word;
5707 }
5708
5709 if let Some(scope) = &self.scope {
5710 let characters = match self.scope_context {
5711 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5712 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5713 None => scope.word_characters(),
5714 };
5715 if let Some(characters) = characters
5716 && characters.contains(&c)
5717 {
5718 return CharKind::Word;
5719 }
5720 }
5721
5722 if c.is_whitespace() {
5723 return CharKind::Whitespace;
5724 }
5725
5726 if ignore_punctuation {
5727 CharKind::Word
5728 } else {
5729 CharKind::Punctuation
5730 }
5731 }
5732
5733 pub fn kind(&self, c: char) -> CharKind {
5734 self.kind_with(c, self.ignore_punctuation)
5735 }
5736}
5737
5738/// Find all of the ranges of whitespace that occur at the ends of lines
5739/// in the given rope.
5740///
5741/// This could also be done with a regex search, but this implementation
5742/// avoids copying text.
5743pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5744 let mut ranges = Vec::new();
5745
5746 let mut offset = 0;
5747 let mut prev_chunk_trailing_whitespace_range = 0..0;
5748 for chunk in rope.chunks() {
5749 let mut prev_line_trailing_whitespace_range = 0..0;
5750 for (i, line) in chunk.split('\n').enumerate() {
5751 let line_end_offset = offset + line.len();
5752 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5753 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5754
5755 if i == 0 && trimmed_line_len == 0 {
5756 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5757 }
5758 if !prev_line_trailing_whitespace_range.is_empty() {
5759 ranges.push(prev_line_trailing_whitespace_range);
5760 }
5761
5762 offset = line_end_offset + 1;
5763 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5764 }
5765
5766 offset -= 1;
5767 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5768 }
5769
5770 if !prev_chunk_trailing_whitespace_range.is_empty() {
5771 ranges.push(prev_chunk_trailing_whitespace_range);
5772 }
5773
5774 ranges
5775}