1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::Arc,
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a mutable replica, but toggled to be only readable.
85 Read,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90impl Capability {
91 /// Returns `true` if the capability is `ReadWrite`.
92 pub fn editable(self) -> bool {
93 matches!(self, Capability::ReadWrite)
94 }
95}
96
97pub type BufferRow = u32;
98
99/// An in-memory representation of a source code file, including its text,
100/// syntax trees, git status, and diagnostics.
101pub struct Buffer {
102 text: TextBuffer,
103 branch_state: Option<BufferBranchState>,
104 /// Filesystem state, `None` when there is no path.
105 file: Option<Arc<dyn File>>,
106 /// The mtime of the file when this buffer was last loaded from
107 /// or saved to disk.
108 saved_mtime: Option<MTime>,
109 /// The version vector when this buffer was last loaded from
110 /// or saved to disk.
111 saved_version: clock::Global,
112 preview_version: clock::Global,
113 transaction_depth: usize,
114 was_dirty_before_starting_transaction: Option<bool>,
115 reload_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 autoindent_requests: Vec<Arc<AutoindentRequest>>,
118 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
119 pending_autoindent: Option<Task<()>>,
120 sync_parse_timeout: Option<Duration>,
121 syntax_map: Mutex<SyntaxMap>,
122 reparse: Option<Task<()>>,
123 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
124 non_text_state_update_count: usize,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 remote_selections: TreeMap<ReplicaId, SelectionSet>,
127 diagnostics_timestamp: clock::Lamport,
128 completion_triggers: BTreeSet<String>,
129 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
130 completion_triggers_timestamp: clock::Lamport,
131 deferred_ops: OperationQueue<Operation>,
132 capability: Capability,
133 has_conflict: bool,
134 /// Memoize calls to has_changes_since(saved_version).
135 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
136 has_unsaved_edits: Cell<(clock::Global, bool)>,
137 change_bits: Vec<rc::Weak<Cell<bool>>>,
138 _subscriptions: Vec<gpui::Subscription>,
139 tree_sitter_data: Arc<TreeSitterData>,
140 encoding: &'static Encoding,
141 has_bom: bool,
142}
143
144#[derive(Debug)]
145pub struct TreeSitterData {
146 chunks: RowChunks,
147 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
148}
149
150const MAX_ROWS_IN_A_CHUNK: u32 = 50;
151
152impl TreeSitterData {
153 fn clear(&mut self, snapshot: text::BufferSnapshot) {
154 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 self.brackets_by_chunks.get_mut().clear();
156 self.brackets_by_chunks
157 .get_mut()
158 .resize(self.chunks.len(), None);
159 }
160
161 fn new(snapshot: text::BufferSnapshot) -> Self {
162 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
163 Self {
164 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
165 chunks,
166 }
167 }
168
169 fn version(&self) -> &clock::Global {
170 self.chunks.version()
171 }
172}
173
174#[derive(Copy, Clone, Debug, PartialEq, Eq)]
175pub enum ParseStatus {
176 Idle,
177 Parsing,
178}
179
180struct BufferBranchState {
181 base_buffer: Entity<Buffer>,
182 merged_operations: Vec<Lamport>,
183}
184
185/// An immutable, cheaply cloneable representation of a fixed
186/// state of a buffer.
187pub struct BufferSnapshot {
188 pub text: text::BufferSnapshot,
189 pub syntax: SyntaxSnapshot,
190 file: Option<Arc<dyn File>>,
191 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
192 remote_selections: TreeMap<ReplicaId, SelectionSet>,
193 language: Option<Arc<Language>>,
194 non_text_state_update_count: usize,
195 tree_sitter_data: Arc<TreeSitterData>,
196 pub capability: Capability,
197}
198
199/// The kind and amount of indentation in a particular line. For now,
200/// assumes that indentation is all the same character.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub struct IndentSize {
203 /// The number of bytes that comprise the indentation.
204 pub len: u32,
205 /// The kind of whitespace used for indentation.
206 pub kind: IndentKind,
207}
208
209/// A whitespace character that's used for indentation.
210#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
211pub enum IndentKind {
212 /// An ASCII space character.
213 #[default]
214 Space,
215 /// An ASCII tab character.
216 Tab,
217}
218
219/// The shape of a selection cursor.
220#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
221pub enum CursorShape {
222 /// A vertical bar
223 #[default]
224 Bar,
225 /// A block that surrounds the following character
226 Block,
227 /// An underline that runs along the following character
228 Underline,
229 /// A box drawn around the following character
230 Hollow,
231}
232
233impl From<settings::CursorShape> for CursorShape {
234 fn from(shape: settings::CursorShape) -> Self {
235 match shape {
236 settings::CursorShape::Bar => CursorShape::Bar,
237 settings::CursorShape::Block => CursorShape::Block,
238 settings::CursorShape::Underline => CursorShape::Underline,
239 settings::CursorShape::Hollow => CursorShape::Hollow,
240 }
241 }
242}
243
244#[derive(Clone, Debug)]
245struct SelectionSet {
246 line_mode: bool,
247 cursor_shape: CursorShape,
248 selections: Arc<[Selection<Anchor>]>,
249 lamport_timestamp: clock::Lamport,
250}
251
252/// A diagnostic associated with a certain range of a buffer.
253#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
254pub struct Diagnostic {
255 /// The name of the service that produced this diagnostic.
256 pub source: Option<String>,
257 /// The ID provided by the dynamic registration that produced this diagnostic.
258 pub registration_id: Option<SharedString>,
259 /// A machine-readable code that identifies this diagnostic.
260 pub code: Option<NumberOrString>,
261 pub code_description: Option<lsp::Uri>,
262 /// Whether this diagnostic is a hint, warning, or error.
263 pub severity: DiagnosticSeverity,
264 /// The human-readable message associated with this diagnostic.
265 pub message: String,
266 /// The human-readable message (in markdown format)
267 pub markdown: Option<String>,
268 /// An id that identifies the group to which this diagnostic belongs.
269 ///
270 /// When a language server produces a diagnostic with
271 /// one or more associated diagnostics, those diagnostics are all
272 /// assigned a single group ID.
273 pub group_id: usize,
274 /// Whether this diagnostic is the primary diagnostic for its group.
275 ///
276 /// In a given group, the primary diagnostic is the top-level diagnostic
277 /// returned by the language server. The non-primary diagnostics are the
278 /// associated diagnostics.
279 pub is_primary: bool,
280 /// Whether this diagnostic is considered to originate from an analysis of
281 /// files on disk, as opposed to any unsaved buffer contents. This is a
282 /// property of a given diagnostic source, and is configured for a given
283 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
284 /// for the language server.
285 pub is_disk_based: bool,
286 /// Whether this diagnostic marks unnecessary code.
287 pub is_unnecessary: bool,
288 /// Quick separation of diagnostics groups based by their source.
289 pub source_kind: DiagnosticSourceKind,
290 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
291 pub data: Option<Value>,
292 /// Whether to underline the corresponding text range in the editor.
293 pub underline: bool,
294}
295
296#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
297pub enum DiagnosticSourceKind {
298 Pulled,
299 Pushed,
300 Other,
301}
302
303/// An operation used to synchronize this buffer with its other replicas.
304#[derive(Clone, Debug, PartialEq)]
305pub enum Operation {
306 /// A text operation.
307 Buffer(text::Operation),
308
309 /// An update to the buffer's diagnostics.
310 UpdateDiagnostics {
311 /// The id of the language server that produced the new diagnostics.
312 server_id: LanguageServerId,
313 /// The diagnostics.
314 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
315 /// The buffer's lamport timestamp.
316 lamport_timestamp: clock::Lamport,
317 },
318
319 /// An update to the most recent selections in this buffer.
320 UpdateSelections {
321 /// The selections.
322 selections: Arc<[Selection<Anchor>]>,
323 /// The buffer's lamport timestamp.
324 lamport_timestamp: clock::Lamport,
325 /// Whether the selections are in 'line mode'.
326 line_mode: bool,
327 /// The [`CursorShape`] associated with these selections.
328 cursor_shape: CursorShape,
329 },
330
331 /// An update to the characters that should trigger autocompletion
332 /// for this buffer.
333 UpdateCompletionTriggers {
334 /// The characters that trigger autocompletion.
335 triggers: Vec<String>,
336 /// The buffer's lamport timestamp.
337 lamport_timestamp: clock::Lamport,
338 /// The language server ID.
339 server_id: LanguageServerId,
340 },
341
342 /// An update to the line ending type of this buffer.
343 UpdateLineEnding {
344 /// The line ending type.
345 line_ending: LineEnding,
346 /// The buffer's lamport timestamp.
347 lamport_timestamp: clock::Lamport,
348 },
349}
350
351/// An event that occurs in a buffer.
352#[derive(Clone, Debug, PartialEq)]
353pub enum BufferEvent {
354 /// The buffer was changed in a way that must be
355 /// propagated to its other replicas.
356 Operation {
357 operation: Operation,
358 is_local: bool,
359 },
360 /// The buffer was edited.
361 Edited,
362 /// The buffer's `dirty` bit changed.
363 DirtyChanged,
364 /// The buffer was saved.
365 Saved,
366 /// The buffer's file was changed on disk.
367 FileHandleChanged,
368 /// The buffer was reloaded.
369 Reloaded,
370 /// The buffer is in need of a reload
371 ReloadNeeded,
372 /// The buffer's language was changed.
373 /// The boolean indicates whether this buffer did not have a language before, but does now.
374 LanguageChanged(bool),
375 /// The buffer's syntax trees were updated.
376 Reparsed,
377 /// The buffer's diagnostics were updated.
378 DiagnosticsUpdated,
379 /// The buffer gained or lost editing capabilities.
380 CapabilityChanged,
381}
382
383/// The file associated with a buffer.
384pub trait File: Send + Sync + Any {
385 /// Returns the [`LocalFile`] associated with this file, if the
386 /// file is local.
387 fn as_local(&self) -> Option<&dyn LocalFile>;
388
389 /// Returns whether this file is local.
390 fn is_local(&self) -> bool {
391 self.as_local().is_some()
392 }
393
394 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
395 /// only available in some states, such as modification time.
396 fn disk_state(&self) -> DiskState;
397
398 /// Returns the path of this file relative to the worktree's root directory.
399 fn path(&self) -> &Arc<RelPath>;
400
401 /// Returns the path of this file relative to the worktree's parent directory (this means it
402 /// includes the name of the worktree's root folder).
403 fn full_path(&self, cx: &App) -> PathBuf;
404
405 /// Returns the path style of this file.
406 fn path_style(&self, cx: &App) -> PathStyle;
407
408 /// Returns the last component of this handle's absolute path. If this handle refers to the root
409 /// of its worktree, then this method will return the name of the worktree itself.
410 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
411
412 /// Returns the id of the worktree to which this file belongs.
413 ///
414 /// This is needed for looking up project-specific settings.
415 fn worktree_id(&self, cx: &App) -> WorktreeId;
416
417 /// Converts this file into a protobuf message.
418 fn to_proto(&self, cx: &App) -> rpc::proto::File;
419
420 /// Return whether Zed considers this to be a private file.
421 fn is_private(&self) -> bool;
422
423 fn can_open(&self) -> bool {
424 !self.is_local()
425 }
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// The row of the edit start in the buffer before the edit was applied.
524 /// This is stored here because the anchor in range is created after
525 /// the edit, so it cannot be used with the before_edit snapshot.
526 old_row: Option<u32>,
527 indent_size: IndentSize,
528 original_indent_column: Option<u32>,
529}
530
531#[derive(Debug)]
532struct IndentSuggestion {
533 basis_row: u32,
534 delta: Ordering,
535 within_error: bool,
536}
537
538struct BufferChunkHighlights<'a> {
539 captures: SyntaxMapCaptures<'a>,
540 next_capture: Option<SyntaxMapCapture<'a>>,
541 stack: Vec<(usize, HighlightId)>,
542 highlight_maps: Vec<HighlightMap>,
543}
544
545/// An iterator that yields chunks of a buffer's text, along with their
546/// syntax highlights and diagnostic status.
547pub struct BufferChunks<'a> {
548 buffer_snapshot: Option<&'a BufferSnapshot>,
549 range: Range<usize>,
550 chunks: text::Chunks<'a>,
551 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
552 error_depth: usize,
553 warning_depth: usize,
554 information_depth: usize,
555 hint_depth: usize,
556 unnecessary_depth: usize,
557 underline: bool,
558 highlights: Option<BufferChunkHighlights<'a>>,
559}
560
561/// A chunk of a buffer's text, along with its syntax highlight and
562/// diagnostic status.
563#[derive(Clone, Debug, Default)]
564pub struct Chunk<'a> {
565 /// The text of the chunk.
566 pub text: &'a str,
567 /// The syntax highlighting style of the chunk.
568 pub syntax_highlight_id: Option<HighlightId>,
569 /// The highlight style that has been applied to this chunk in
570 /// the editor.
571 pub highlight_style: Option<HighlightStyle>,
572 /// The severity of diagnostic associated with this chunk, if any.
573 pub diagnostic_severity: Option<DiagnosticSeverity>,
574 /// A bitset of which characters are tabs in this string.
575 pub tabs: u128,
576 /// Bitmap of character indices in this chunk
577 pub chars: u128,
578 /// Whether this chunk of text is marked as unnecessary.
579 pub is_unnecessary: bool,
580 /// Whether this chunk of text was originally a tab character.
581 pub is_tab: bool,
582 /// Whether this chunk of text was originally an inlay.
583 pub is_inlay: bool,
584 /// Whether to underline the corresponding text range in the editor.
585 pub underline: bool,
586}
587
588/// A set of edits to a given version of a buffer, computed asynchronously.
589#[derive(Debug)]
590pub struct Diff {
591 pub base_version: clock::Global,
592 pub line_ending: LineEnding,
593 pub edits: Vec<(Range<usize>, Arc<str>)>,
594}
595
596#[derive(Debug, Clone, Copy)]
597pub(crate) struct DiagnosticEndpoint {
598 offset: usize,
599 is_start: bool,
600 underline: bool,
601 severity: DiagnosticSeverity,
602 is_unnecessary: bool,
603}
604
605/// A class of characters, used for characterizing a run of text.
606#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
607pub enum CharKind {
608 /// Whitespace.
609 Whitespace,
610 /// Punctuation.
611 Punctuation,
612 /// Word.
613 Word,
614}
615
616/// Context for character classification within a specific scope.
617#[derive(Copy, Clone, Eq, PartialEq, Debug)]
618pub enum CharScopeContext {
619 /// Character classification for completion queries.
620 ///
621 /// This context treats certain characters as word constituents that would
622 /// normally be considered punctuation, such as '-' in Tailwind classes
623 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
624 Completion,
625 /// Character classification for linked edits.
626 ///
627 /// This context handles characters that should be treated as part of
628 /// identifiers during linked editing operations, such as '.' in JSX
629 /// component names like `<Animated.View>`.
630 LinkedEdit,
631}
632
633/// A runnable is a set of data about a region that could be resolved into a task
634pub struct Runnable {
635 pub tags: SmallVec<[RunnableTag; 1]>,
636 pub language: Arc<Language>,
637 pub buffer: BufferId,
638}
639
640#[derive(Default, Clone, Debug)]
641pub struct HighlightedText {
642 pub text: SharedString,
643 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
644}
645
646#[derive(Default, Debug)]
647struct HighlightedTextBuilder {
648 pub text: String,
649 highlights: Vec<(Range<usize>, HighlightStyle)>,
650}
651
652impl HighlightedText {
653 pub fn from_buffer_range<T: ToOffset>(
654 range: Range<T>,
655 snapshot: &text::BufferSnapshot,
656 syntax_snapshot: &SyntaxSnapshot,
657 override_style: Option<HighlightStyle>,
658 syntax_theme: &SyntaxTheme,
659 ) -> Self {
660 let mut highlighted_text = HighlightedTextBuilder::default();
661 highlighted_text.add_text_from_buffer_range(
662 range,
663 snapshot,
664 syntax_snapshot,
665 override_style,
666 syntax_theme,
667 );
668 highlighted_text.build()
669 }
670
671 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
672 gpui::StyledText::new(self.text.clone())
673 .with_default_highlights(default_style, self.highlights.iter().cloned())
674 }
675
676 /// Returns the first line without leading whitespace unless highlighted
677 /// and a boolean indicating if there are more lines after
678 pub fn first_line_preview(self) -> (Self, bool) {
679 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
680 let first_line = &self.text[..newline_ix];
681
682 // Trim leading whitespace, unless an edit starts prior to it.
683 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
684 if let Some((first_highlight_range, _)) = self.highlights.first() {
685 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
686 }
687
688 let preview_text = &first_line[preview_start_ix..];
689 let preview_highlights = self
690 .highlights
691 .into_iter()
692 .skip_while(|(range, _)| range.end <= preview_start_ix)
693 .take_while(|(range, _)| range.start < newline_ix)
694 .filter_map(|(mut range, highlight)| {
695 range.start = range.start.saturating_sub(preview_start_ix);
696 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
697 if range.is_empty() {
698 None
699 } else {
700 Some((range, highlight))
701 }
702 });
703
704 let preview = Self {
705 text: SharedString::new(preview_text),
706 highlights: preview_highlights.collect(),
707 };
708
709 (preview, self.text.len() > newline_ix)
710 }
711}
712
713impl HighlightedTextBuilder {
714 pub fn build(self) -> HighlightedText {
715 HighlightedText {
716 text: self.text.into(),
717 highlights: self.highlights,
718 }
719 }
720
721 pub fn add_text_from_buffer_range<T: ToOffset>(
722 &mut self,
723 range: Range<T>,
724 snapshot: &text::BufferSnapshot,
725 syntax_snapshot: &SyntaxSnapshot,
726 override_style: Option<HighlightStyle>,
727 syntax_theme: &SyntaxTheme,
728 ) {
729 let range = range.to_offset(snapshot);
730 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
731 let start = self.text.len();
732 self.text.push_str(chunk.text);
733 let end = self.text.len();
734
735 if let Some(highlight_style) = chunk
736 .syntax_highlight_id
737 .and_then(|id| id.style(syntax_theme))
738 {
739 let highlight_style = override_style.map_or(highlight_style, |override_style| {
740 highlight_style.highlight(override_style)
741 });
742 self.highlights.push((start..end, highlight_style));
743 } else if let Some(override_style) = override_style {
744 self.highlights.push((start..end, override_style));
745 }
746 }
747 }
748
749 fn highlighted_chunks<'a>(
750 range: Range<usize>,
751 snapshot: &'a text::BufferSnapshot,
752 syntax_snapshot: &'a SyntaxSnapshot,
753 ) -> BufferChunks<'a> {
754 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
755 grammar
756 .highlights_config
757 .as_ref()
758 .map(|config| &config.query)
759 });
760
761 let highlight_maps = captures
762 .grammars()
763 .iter()
764 .map(|grammar| grammar.highlight_map())
765 .collect();
766
767 BufferChunks::new(
768 snapshot.as_rope(),
769 range,
770 Some((captures, highlight_maps)),
771 false,
772 None,
773 )
774 }
775}
776
777#[derive(Clone)]
778pub struct EditPreview {
779 old_snapshot: text::BufferSnapshot,
780 applied_edits_snapshot: text::BufferSnapshot,
781 syntax_snapshot: SyntaxSnapshot,
782}
783
784impl EditPreview {
785 pub fn as_unified_diff(
786 &self,
787 file: Option<&Arc<dyn File>>,
788 edits: &[(Range<Anchor>, impl AsRef<str>)],
789 ) -> Option<String> {
790 let (first, _) = edits.first()?;
791 let (last, _) = edits.last()?;
792
793 let start = first.start.to_point(&self.old_snapshot);
794 let old_end = last.end.to_point(&self.old_snapshot);
795 let new_end = last
796 .end
797 .bias_right(&self.old_snapshot)
798 .to_point(&self.applied_edits_snapshot);
799
800 let start = Point::new(start.row.saturating_sub(3), 0);
801 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
802 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
803
804 let diff_body = unified_diff_with_offsets(
805 &self
806 .old_snapshot
807 .text_for_range(start..old_end)
808 .collect::<String>(),
809 &self
810 .applied_edits_snapshot
811 .text_for_range(start..new_end)
812 .collect::<String>(),
813 start.row,
814 start.row,
815 );
816
817 let path = file.map(|f| f.path().as_unix_str());
818 let header = match path {
819 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
820 None => String::new(),
821 };
822
823 Some(format!("{}{}", header, diff_body))
824 }
825
826 pub fn highlight_edits(
827 &self,
828 current_snapshot: &BufferSnapshot,
829 edits: &[(Range<Anchor>, impl AsRef<str>)],
830 include_deletions: bool,
831 cx: &App,
832 ) -> HighlightedText {
833 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
834 return HighlightedText::default();
835 };
836
837 let mut highlighted_text = HighlightedTextBuilder::default();
838
839 let visible_range_in_preview_snapshot =
840 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
841 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
842
843 let insertion_highlight_style = HighlightStyle {
844 background_color: Some(cx.theme().status().created_background),
845 ..Default::default()
846 };
847 let deletion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().deleted_background),
849 ..Default::default()
850 };
851 let syntax_theme = cx.theme().syntax();
852
853 for (range, edit_text) in edits {
854 let edit_new_end_in_preview_snapshot = range
855 .end
856 .bias_right(&self.old_snapshot)
857 .to_offset(&self.applied_edits_snapshot);
858 let edit_start_in_preview_snapshot =
859 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
860
861 let unchanged_range_in_preview_snapshot =
862 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
863 if !unchanged_range_in_preview_snapshot.is_empty() {
864 highlighted_text.add_text_from_buffer_range(
865 unchanged_range_in_preview_snapshot,
866 &self.applied_edits_snapshot,
867 &self.syntax_snapshot,
868 None,
869 syntax_theme,
870 );
871 }
872
873 let range_in_current_snapshot = range.to_offset(current_snapshot);
874 if include_deletions && !range_in_current_snapshot.is_empty() {
875 highlighted_text.add_text_from_buffer_range(
876 range_in_current_snapshot,
877 ¤t_snapshot.text,
878 ¤t_snapshot.syntax,
879 Some(deletion_highlight_style),
880 syntax_theme,
881 );
882 }
883
884 if !edit_text.as_ref().is_empty() {
885 highlighted_text.add_text_from_buffer_range(
886 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
887 &self.applied_edits_snapshot,
888 &self.syntax_snapshot,
889 Some(insertion_highlight_style),
890 syntax_theme,
891 );
892 }
893
894 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
895 }
896
897 highlighted_text.add_text_from_buffer_range(
898 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
899 &self.applied_edits_snapshot,
900 &self.syntax_snapshot,
901 None,
902 syntax_theme,
903 );
904
905 highlighted_text.build()
906 }
907
908 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
909 cx.new(|cx| {
910 let mut buffer = Buffer::local_normalized(
911 self.applied_edits_snapshot.as_rope().clone(),
912 self.applied_edits_snapshot.line_ending(),
913 cx,
914 );
915 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
916 buffer
917 })
918 }
919
920 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
921 let (first, _) = edits.first()?;
922 let (last, _) = edits.last()?;
923
924 let start = first
925 .start
926 .bias_left(&self.old_snapshot)
927 .to_point(&self.applied_edits_snapshot);
928 let end = last
929 .end
930 .bias_right(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932
933 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
934 let range = Point::new(start.row, 0)
935 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
936
937 Some(range)
938 }
939}
940
941#[derive(Clone, Debug, PartialEq, Eq)]
942pub struct BracketMatch<T> {
943 pub open_range: Range<T>,
944 pub close_range: Range<T>,
945 pub newline_only: bool,
946 pub syntax_layer_depth: usize,
947 pub color_index: Option<usize>,
948}
949
950impl<T> BracketMatch<T> {
951 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
952 (self.open_range, self.close_range)
953 }
954}
955
956impl Buffer {
957 /// Create a new buffer with the given base text.
958 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
959 Self::build(
960 TextBuffer::new(
961 ReplicaId::LOCAL,
962 cx.entity_id().as_non_zero_u64().into(),
963 base_text.into(),
964 ),
965 None,
966 Capability::ReadWrite,
967 )
968 }
969
970 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
971 pub fn local_normalized(
972 base_text_normalized: Rope,
973 line_ending: LineEnding,
974 cx: &Context<Self>,
975 ) -> Self {
976 Self::build(
977 TextBuffer::new_normalized(
978 ReplicaId::LOCAL,
979 cx.entity_id().as_non_zero_u64().into(),
980 line_ending,
981 base_text_normalized,
982 ),
983 None,
984 Capability::ReadWrite,
985 )
986 }
987
988 /// Create a new buffer that is a replica of a remote buffer.
989 pub fn remote(
990 remote_id: BufferId,
991 replica_id: ReplicaId,
992 capability: Capability,
993 base_text: impl Into<String>,
994 ) -> Self {
995 Self::build(
996 TextBuffer::new(replica_id, remote_id, base_text.into()),
997 None,
998 capability,
999 )
1000 }
1001
1002 /// Create a new buffer that is a replica of a remote buffer, populating its
1003 /// state from the given protobuf message.
1004 pub fn from_proto(
1005 replica_id: ReplicaId,
1006 capability: Capability,
1007 message: proto::BufferState,
1008 file: Option<Arc<dyn File>>,
1009 ) -> Result<Self> {
1010 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1011 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1012 let mut this = Self::build(buffer, file, capability);
1013 this.text.set_line_ending(proto::deserialize_line_ending(
1014 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1015 ));
1016 this.saved_version = proto::deserialize_version(&message.saved_version);
1017 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1018 Ok(this)
1019 }
1020
1021 /// Serialize the buffer's state to a protobuf message.
1022 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1023 proto::BufferState {
1024 id: self.remote_id().into(),
1025 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1026 base_text: self.base_text().to_string(),
1027 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1028 saved_version: proto::serialize_version(&self.saved_version),
1029 saved_mtime: self.saved_mtime.map(|time| time.into()),
1030 }
1031 }
1032
1033 /// Serialize as protobufs all of the changes to the buffer since the given version.
1034 pub fn serialize_ops(
1035 &self,
1036 since: Option<clock::Global>,
1037 cx: &App,
1038 ) -> Task<Vec<proto::Operation>> {
1039 let mut operations = Vec::new();
1040 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1041
1042 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1043 proto::serialize_operation(&Operation::UpdateSelections {
1044 selections: set.selections.clone(),
1045 lamport_timestamp: set.lamport_timestamp,
1046 line_mode: set.line_mode,
1047 cursor_shape: set.cursor_shape,
1048 })
1049 }));
1050
1051 for (server_id, diagnostics) in &self.diagnostics {
1052 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1053 lamport_timestamp: self.diagnostics_timestamp,
1054 server_id: *server_id,
1055 diagnostics: diagnostics.iter().cloned().collect(),
1056 }));
1057 }
1058
1059 for (server_id, completions) in &self.completion_triggers_per_language_server {
1060 operations.push(proto::serialize_operation(
1061 &Operation::UpdateCompletionTriggers {
1062 triggers: completions.iter().cloned().collect(),
1063 lamport_timestamp: self.completion_triggers_timestamp,
1064 server_id: *server_id,
1065 },
1066 ));
1067 }
1068
1069 let text_operations = self.text.operations().clone();
1070 cx.background_spawn(async move {
1071 let since = since.unwrap_or_default();
1072 operations.extend(
1073 text_operations
1074 .iter()
1075 .filter(|(_, op)| !since.observed(op.timestamp()))
1076 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1077 );
1078 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1079 operations
1080 })
1081 }
1082
1083 /// Assign a language to the buffer, returning the buffer.
1084 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1085 self.set_language_async(Some(language), cx);
1086 self
1087 }
1088
1089 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1090 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1091 self.set_language(Some(language), cx);
1092 self
1093 }
1094
1095 /// Returns the [`Capability`] of this buffer.
1096 pub fn capability(&self) -> Capability {
1097 self.capability
1098 }
1099
1100 /// Whether this buffer can only be read.
1101 pub fn read_only(&self) -> bool {
1102 !self.capability.editable()
1103 }
1104
1105 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1106 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1107 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1108 let snapshot = buffer.snapshot();
1109 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1110 let tree_sitter_data = TreeSitterData::new(snapshot);
1111 Self {
1112 saved_mtime,
1113 tree_sitter_data: Arc::new(tree_sitter_data),
1114 saved_version: buffer.version(),
1115 preview_version: buffer.version(),
1116 reload_task: None,
1117 transaction_depth: 0,
1118 was_dirty_before_starting_transaction: None,
1119 has_unsaved_edits: Cell::new((buffer.version(), false)),
1120 text: buffer,
1121 branch_state: None,
1122 file,
1123 capability,
1124 syntax_map,
1125 reparse: None,
1126 non_text_state_update_count: 0,
1127 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1128 Some(Duration::from_millis(10))
1129 } else {
1130 Some(Duration::from_millis(1))
1131 },
1132 parse_status: watch::channel(ParseStatus::Idle),
1133 autoindent_requests: Default::default(),
1134 wait_for_autoindent_txs: Default::default(),
1135 pending_autoindent: Default::default(),
1136 language: None,
1137 remote_selections: Default::default(),
1138 diagnostics: Default::default(),
1139 diagnostics_timestamp: Lamport::MIN,
1140 completion_triggers: Default::default(),
1141 completion_triggers_per_language_server: Default::default(),
1142 completion_triggers_timestamp: Lamport::MIN,
1143 deferred_ops: OperationQueue::new(),
1144 has_conflict: false,
1145 change_bits: Default::default(),
1146 _subscriptions: Vec::new(),
1147 encoding: encoding_rs::UTF_8,
1148 has_bom: false,
1149 }
1150 }
1151
1152 pub fn build_snapshot(
1153 text: Rope,
1154 language: Option<Arc<Language>>,
1155 language_registry: Option<Arc<LanguageRegistry>>,
1156 cx: &mut App,
1157 ) -> impl Future<Output = BufferSnapshot> + use<> {
1158 let entity_id = cx.reserve_entity::<Self>().entity_id();
1159 let buffer_id = entity_id.as_non_zero_u64().into();
1160 async move {
1161 let text =
1162 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1163 .snapshot();
1164 let mut syntax = SyntaxMap::new(&text).snapshot();
1165 if let Some(language) = language.clone() {
1166 let language_registry = language_registry.clone();
1167 syntax.reparse(&text, language_registry, language);
1168 }
1169 let tree_sitter_data = TreeSitterData::new(text.clone());
1170 BufferSnapshot {
1171 text,
1172 syntax,
1173 file: None,
1174 diagnostics: Default::default(),
1175 remote_selections: Default::default(),
1176 tree_sitter_data: Arc::new(tree_sitter_data),
1177 language,
1178 non_text_state_update_count: 0,
1179 capability: Capability::ReadOnly,
1180 }
1181 }
1182 }
1183
1184 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1185 let entity_id = cx.reserve_entity::<Self>().entity_id();
1186 let buffer_id = entity_id.as_non_zero_u64().into();
1187 let text = TextBuffer::new_normalized(
1188 ReplicaId::LOCAL,
1189 buffer_id,
1190 Default::default(),
1191 Rope::new(),
1192 )
1193 .snapshot();
1194 let syntax = SyntaxMap::new(&text).snapshot();
1195 let tree_sitter_data = TreeSitterData::new(text.clone());
1196 BufferSnapshot {
1197 text,
1198 syntax,
1199 tree_sitter_data: Arc::new(tree_sitter_data),
1200 file: None,
1201 diagnostics: Default::default(),
1202 remote_selections: Default::default(),
1203 language: None,
1204 non_text_state_update_count: 0,
1205 capability: Capability::ReadOnly,
1206 }
1207 }
1208
1209 #[cfg(any(test, feature = "test-support"))]
1210 pub fn build_snapshot_sync(
1211 text: Rope,
1212 language: Option<Arc<Language>>,
1213 language_registry: Option<Arc<LanguageRegistry>>,
1214 cx: &mut App,
1215 ) -> BufferSnapshot {
1216 let entity_id = cx.reserve_entity::<Self>().entity_id();
1217 let buffer_id = entity_id.as_non_zero_u64().into();
1218 let text =
1219 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1220 .snapshot();
1221 let mut syntax = SyntaxMap::new(&text).snapshot();
1222 if let Some(language) = language.clone() {
1223 syntax.reparse(&text, language_registry, language);
1224 }
1225 let tree_sitter_data = TreeSitterData::new(text.clone());
1226 BufferSnapshot {
1227 text,
1228 syntax,
1229 tree_sitter_data: Arc::new(tree_sitter_data),
1230 file: None,
1231 diagnostics: Default::default(),
1232 remote_selections: Default::default(),
1233 language,
1234 non_text_state_update_count: 0,
1235 capability: Capability::ReadOnly,
1236 }
1237 }
1238
1239 /// Retrieve a snapshot of the buffer's current state. This is computationally
1240 /// cheap, and allows reading from the buffer on a background thread.
1241 pub fn snapshot(&self) -> BufferSnapshot {
1242 let text = self.text.snapshot();
1243 let mut syntax_map = self.syntax_map.lock();
1244 syntax_map.interpolate(&text);
1245 let syntax = syntax_map.snapshot();
1246
1247 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1248 Arc::new(TreeSitterData::new(text.clone()))
1249 } else {
1250 self.tree_sitter_data.clone()
1251 };
1252
1253 BufferSnapshot {
1254 text,
1255 syntax,
1256 tree_sitter_data,
1257 file: self.file.clone(),
1258 remote_selections: self.remote_selections.clone(),
1259 diagnostics: self.diagnostics.clone(),
1260 language: self.language.clone(),
1261 non_text_state_update_count: self.non_text_state_update_count,
1262 capability: self.capability,
1263 }
1264 }
1265
1266 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1267 let this = cx.entity();
1268 cx.new(|cx| {
1269 let mut branch = Self {
1270 branch_state: Some(BufferBranchState {
1271 base_buffer: this.clone(),
1272 merged_operations: Default::default(),
1273 }),
1274 language: self.language.clone(),
1275 has_conflict: self.has_conflict,
1276 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1277 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1278 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1279 };
1280 if let Some(language_registry) = self.language_registry() {
1281 branch.set_language_registry(language_registry);
1282 }
1283
1284 // Reparse the branch buffer so that we get syntax highlighting immediately.
1285 branch.reparse(cx, true);
1286
1287 branch
1288 })
1289 }
1290
1291 pub fn preview_edits(
1292 &self,
1293 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1294 cx: &App,
1295 ) -> Task<EditPreview> {
1296 let registry = self.language_registry();
1297 let language = self.language().cloned();
1298 let old_snapshot = self.text.snapshot();
1299 let mut branch_buffer = self.text.branch();
1300 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1301 cx.background_spawn(async move {
1302 if !edits.is_empty() {
1303 if let Some(language) = language.clone() {
1304 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1305 }
1306
1307 branch_buffer.edit(edits.iter().cloned());
1308 let snapshot = branch_buffer.snapshot();
1309 syntax_snapshot.interpolate(&snapshot);
1310
1311 if let Some(language) = language {
1312 syntax_snapshot.reparse(&snapshot, registry, language);
1313 }
1314 }
1315 EditPreview {
1316 old_snapshot,
1317 applied_edits_snapshot: branch_buffer.snapshot(),
1318 syntax_snapshot,
1319 }
1320 })
1321 }
1322
1323 /// Applies all of the changes in this buffer that intersect any of the
1324 /// given `ranges` to its base buffer.
1325 ///
1326 /// If `ranges` is empty, then all changes will be applied. This buffer must
1327 /// be a branch buffer to call this method.
1328 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1329 let Some(base_buffer) = self.base_buffer() else {
1330 debug_panic!("not a branch buffer");
1331 return;
1332 };
1333
1334 let mut ranges = if ranges.is_empty() {
1335 &[0..usize::MAX]
1336 } else {
1337 ranges.as_slice()
1338 }
1339 .iter()
1340 .peekable();
1341
1342 let mut edits = Vec::new();
1343 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1344 let mut is_included = false;
1345 while let Some(range) = ranges.peek() {
1346 if range.end < edit.new.start {
1347 ranges.next().unwrap();
1348 } else {
1349 if range.start <= edit.new.end {
1350 is_included = true;
1351 }
1352 break;
1353 }
1354 }
1355
1356 if is_included {
1357 edits.push((
1358 edit.old.clone(),
1359 self.text_for_range(edit.new.clone()).collect::<String>(),
1360 ));
1361 }
1362 }
1363
1364 let operation = base_buffer.update(cx, |base_buffer, cx| {
1365 // cx.emit(BufferEvent::DiffBaseChanged);
1366 base_buffer.edit(edits, None, cx)
1367 });
1368
1369 if let Some(operation) = operation
1370 && let Some(BufferBranchState {
1371 merged_operations, ..
1372 }) = &mut self.branch_state
1373 {
1374 merged_operations.push(operation);
1375 }
1376 }
1377
1378 fn on_base_buffer_event(
1379 &mut self,
1380 _: Entity<Buffer>,
1381 event: &BufferEvent,
1382 cx: &mut Context<Self>,
1383 ) {
1384 let BufferEvent::Operation { operation, .. } = event else {
1385 return;
1386 };
1387 let Some(BufferBranchState {
1388 merged_operations, ..
1389 }) = &mut self.branch_state
1390 else {
1391 return;
1392 };
1393
1394 let mut operation_to_undo = None;
1395 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1396 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1397 {
1398 merged_operations.remove(ix);
1399 operation_to_undo = Some(operation.timestamp);
1400 }
1401
1402 self.apply_ops([operation.clone()], cx);
1403
1404 if let Some(timestamp) = operation_to_undo {
1405 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1406 self.undo_operations(counts, cx);
1407 }
1408 }
1409
1410 #[cfg(test)]
1411 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1412 &self.text
1413 }
1414
1415 /// Retrieve a snapshot of the buffer's raw text, without any
1416 /// language-related state like the syntax tree or diagnostics.
1417 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1418 self.text.snapshot()
1419 }
1420
1421 /// The file associated with the buffer, if any.
1422 pub fn file(&self) -> Option<&Arc<dyn File>> {
1423 self.file.as_ref()
1424 }
1425
1426 /// The version of the buffer that was last saved or reloaded from disk.
1427 pub fn saved_version(&self) -> &clock::Global {
1428 &self.saved_version
1429 }
1430
1431 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1432 pub fn saved_mtime(&self) -> Option<MTime> {
1433 self.saved_mtime
1434 }
1435
1436 /// Returns the character encoding of the buffer's file.
1437 pub fn encoding(&self) -> &'static Encoding {
1438 self.encoding
1439 }
1440
1441 /// Sets the character encoding of the buffer.
1442 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1443 self.encoding = encoding;
1444 }
1445
1446 /// Returns whether the buffer has a Byte Order Mark.
1447 pub fn has_bom(&self) -> bool {
1448 self.has_bom
1449 }
1450
1451 /// Sets whether the buffer has a Byte Order Mark.
1452 pub fn set_has_bom(&mut self, has_bom: bool) {
1453 self.has_bom = has_bom;
1454 }
1455
1456 /// Assign a language to the buffer.
1457 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1458 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1459 }
1460
1461 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1462 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1463 self.set_language_(language, true, cx);
1464 }
1465
1466 fn set_language_(
1467 &mut self,
1468 language: Option<Arc<Language>>,
1469 may_block: bool,
1470 cx: &mut Context<Self>,
1471 ) {
1472 self.non_text_state_update_count += 1;
1473 self.syntax_map.lock().clear(&self.text);
1474 let old_language = std::mem::replace(&mut self.language, language);
1475 self.was_changed();
1476 self.reparse(cx, may_block);
1477 let has_fresh_language =
1478 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1479 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1480 }
1481
1482 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1483 /// other languages if parts of the buffer are written in different languages.
1484 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1485 self.syntax_map
1486 .lock()
1487 .set_language_registry(language_registry);
1488 }
1489
1490 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1491 self.syntax_map.lock().language_registry()
1492 }
1493
1494 /// Assign the line ending type to the buffer.
1495 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1496 self.text.set_line_ending(line_ending);
1497
1498 let lamport_timestamp = self.text.lamport_clock.tick();
1499 self.send_operation(
1500 Operation::UpdateLineEnding {
1501 line_ending,
1502 lamport_timestamp,
1503 },
1504 true,
1505 cx,
1506 );
1507 }
1508
1509 /// Assign the buffer a new [`Capability`].
1510 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1511 if self.capability != capability {
1512 self.capability = capability;
1513 cx.emit(BufferEvent::CapabilityChanged)
1514 }
1515 }
1516
1517 /// This method is called to signal that the buffer has been saved.
1518 pub fn did_save(
1519 &mut self,
1520 version: clock::Global,
1521 mtime: Option<MTime>,
1522 cx: &mut Context<Self>,
1523 ) {
1524 self.saved_version = version.clone();
1525 self.has_unsaved_edits.set((version, false));
1526 self.has_conflict = false;
1527 self.saved_mtime = mtime;
1528 self.was_changed();
1529 cx.emit(BufferEvent::Saved);
1530 cx.notify();
1531 }
1532
1533 /// Reloads the contents of the buffer from disk.
1534 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1535 let (tx, rx) = futures::channel::oneshot::channel();
1536 let prev_version = self.text.version();
1537 self.reload_task = Some(cx.spawn(async move |this, cx| {
1538 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1539 let file = this.file.as_ref()?.as_local()?;
1540 Some((
1541 file.disk_state().mtime(),
1542 file.load_bytes(cx),
1543 this.encoding,
1544 ))
1545 })?
1546 else {
1547 return Ok(());
1548 };
1549
1550 let bytes = load_bytes_task.await?;
1551 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1552 let new_text = cow.into_owned();
1553
1554 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1555 this.update(cx, |this, cx| {
1556 if this.version() == diff.base_version {
1557 this.finalize_last_transaction();
1558 this.apply_diff(diff, cx);
1559 tx.send(this.finalize_last_transaction().cloned()).ok();
1560 this.has_conflict = false;
1561 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1562 } else {
1563 if !diff.edits.is_empty()
1564 || this
1565 .edits_since::<usize>(&diff.base_version)
1566 .next()
1567 .is_some()
1568 {
1569 this.has_conflict = true;
1570 }
1571
1572 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1573 }
1574
1575 this.reload_task.take();
1576 })
1577 }));
1578 rx
1579 }
1580
1581 /// This method is called to signal that the buffer has been reloaded.
1582 pub fn did_reload(
1583 &mut self,
1584 version: clock::Global,
1585 line_ending: LineEnding,
1586 mtime: Option<MTime>,
1587 cx: &mut Context<Self>,
1588 ) {
1589 self.saved_version = version;
1590 self.has_unsaved_edits
1591 .set((self.saved_version.clone(), false));
1592 self.text.set_line_ending(line_ending);
1593 self.saved_mtime = mtime;
1594 cx.emit(BufferEvent::Reloaded);
1595 cx.notify();
1596 }
1597
1598 /// Updates the [`File`] backing this buffer. This should be called when
1599 /// the file has changed or has been deleted.
1600 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1601 let was_dirty = self.is_dirty();
1602 let mut file_changed = false;
1603
1604 if let Some(old_file) = self.file.as_ref() {
1605 if new_file.path() != old_file.path() {
1606 file_changed = true;
1607 }
1608
1609 let old_state = old_file.disk_state();
1610 let new_state = new_file.disk_state();
1611 if old_state != new_state {
1612 file_changed = true;
1613 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1614 cx.emit(BufferEvent::ReloadNeeded)
1615 }
1616 }
1617 } else {
1618 file_changed = true;
1619 };
1620
1621 self.file = Some(new_file);
1622 if file_changed {
1623 self.was_changed();
1624 self.non_text_state_update_count += 1;
1625 if was_dirty != self.is_dirty() {
1626 cx.emit(BufferEvent::DirtyChanged);
1627 }
1628 cx.emit(BufferEvent::FileHandleChanged);
1629 cx.notify();
1630 }
1631 }
1632
1633 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1634 Some(self.branch_state.as_ref()?.base_buffer.clone())
1635 }
1636
1637 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1638 pub fn language(&self) -> Option<&Arc<Language>> {
1639 self.language.as_ref()
1640 }
1641
1642 /// Returns the [`Language`] at the given location.
1643 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1644 let offset = position.to_offset(self);
1645 let mut is_first = true;
1646 let start_anchor = self.anchor_before(offset);
1647 let end_anchor = self.anchor_after(offset);
1648 self.syntax_map
1649 .lock()
1650 .layers_for_range(offset..offset, &self.text, false)
1651 .filter(|layer| {
1652 if is_first {
1653 is_first = false;
1654 return true;
1655 }
1656
1657 layer
1658 .included_sub_ranges
1659 .map(|sub_ranges| {
1660 sub_ranges.iter().any(|sub_range| {
1661 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1662 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1663 !is_before_start && !is_after_end
1664 })
1665 })
1666 .unwrap_or(true)
1667 })
1668 .last()
1669 .map(|info| info.language.clone())
1670 .or_else(|| self.language.clone())
1671 }
1672
1673 /// Returns each [`Language`] for the active syntax layers at the given location.
1674 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1675 let offset = position.to_offset(self);
1676 let mut languages: Vec<Arc<Language>> = self
1677 .syntax_map
1678 .lock()
1679 .layers_for_range(offset..offset, &self.text, false)
1680 .map(|info| info.language.clone())
1681 .collect();
1682
1683 if languages.is_empty()
1684 && let Some(buffer_language) = self.language()
1685 {
1686 languages.push(buffer_language.clone());
1687 }
1688
1689 languages
1690 }
1691
1692 /// An integer version number that accounts for all updates besides
1693 /// the buffer's text itself (which is versioned via a version vector).
1694 pub fn non_text_state_update_count(&self) -> usize {
1695 self.non_text_state_update_count
1696 }
1697
1698 /// Whether the buffer is being parsed in the background.
1699 #[cfg(any(test, feature = "test-support"))]
1700 pub fn is_parsing(&self) -> bool {
1701 self.reparse.is_some()
1702 }
1703
1704 /// Indicates whether the buffer contains any regions that may be
1705 /// written in a language that hasn't been loaded yet.
1706 pub fn contains_unknown_injections(&self) -> bool {
1707 self.syntax_map.lock().contains_unknown_injections()
1708 }
1709
1710 #[cfg(any(test, feature = "test-support"))]
1711 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1712 self.sync_parse_timeout = timeout;
1713 }
1714
1715 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1716 match Arc::get_mut(&mut self.tree_sitter_data) {
1717 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1718 None => {
1719 let tree_sitter_data = TreeSitterData::new(snapshot);
1720 self.tree_sitter_data = Arc::new(tree_sitter_data)
1721 }
1722 }
1723 }
1724
1725 /// Called after an edit to synchronize the buffer's main parse tree with
1726 /// the buffer's new underlying state.
1727 ///
1728 /// Locks the syntax map and interpolates the edits since the last reparse
1729 /// into the foreground syntax tree.
1730 ///
1731 /// Then takes a stable snapshot of the syntax map before unlocking it.
1732 /// The snapshot with the interpolated edits is sent to a background thread,
1733 /// where we ask Tree-sitter to perform an incremental parse.
1734 ///
1735 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1736 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1737 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1738 ///
1739 /// If we time out waiting on the parse, we spawn a second task waiting
1740 /// until the parse does complete and return with the interpolated tree still
1741 /// in the foreground. When the background parse completes, call back into
1742 /// the main thread and assign the foreground parse state.
1743 ///
1744 /// If the buffer or grammar changed since the start of the background parse,
1745 /// initiate an additional reparse recursively. To avoid concurrent parses
1746 /// for the same buffer, we only initiate a new parse if we are not already
1747 /// parsing in the background.
1748 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1749 if self.text.version() != *self.tree_sitter_data.version() {
1750 self.invalidate_tree_sitter_data(self.text.snapshot());
1751 }
1752 if self.reparse.is_some() {
1753 return;
1754 }
1755 let language = if let Some(language) = self.language.clone() {
1756 language
1757 } else {
1758 return;
1759 };
1760
1761 let text = self.text_snapshot();
1762 let parsed_version = self.version();
1763
1764 let mut syntax_map = self.syntax_map.lock();
1765 syntax_map.interpolate(&text);
1766 let language_registry = syntax_map.language_registry();
1767 let mut syntax_snapshot = syntax_map.snapshot();
1768 drop(syntax_map);
1769
1770 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1771 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1772 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1773 &text,
1774 language_registry.clone(),
1775 language.clone(),
1776 sync_parse_timeout,
1777 ) {
1778 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1779 self.reparse = None;
1780 return;
1781 }
1782 }
1783
1784 let parse_task = cx.background_spawn({
1785 let language = language.clone();
1786 let language_registry = language_registry.clone();
1787 async move {
1788 syntax_snapshot.reparse(&text, language_registry, language);
1789 syntax_snapshot
1790 }
1791 });
1792
1793 self.reparse = Some(cx.spawn(async move |this, cx| {
1794 let new_syntax_map = parse_task.await;
1795 this.update(cx, move |this, cx| {
1796 let grammar_changed = || {
1797 this.language
1798 .as_ref()
1799 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1800 };
1801 let language_registry_changed = || {
1802 new_syntax_map.contains_unknown_injections()
1803 && language_registry.is_some_and(|registry| {
1804 registry.version() != new_syntax_map.language_registry_version()
1805 })
1806 };
1807 let parse_again = this.version.changed_since(&parsed_version)
1808 || language_registry_changed()
1809 || grammar_changed();
1810 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1811 this.reparse = None;
1812 if parse_again {
1813 this.reparse(cx, false);
1814 }
1815 })
1816 .ok();
1817 }));
1818 }
1819
1820 fn did_finish_parsing(
1821 &mut self,
1822 syntax_snapshot: SyntaxSnapshot,
1823 block_budget: Duration,
1824 cx: &mut Context<Self>,
1825 ) {
1826 self.non_text_state_update_count += 1;
1827 self.syntax_map.lock().did_parse(syntax_snapshot);
1828 self.was_changed();
1829 self.request_autoindent(cx, block_budget);
1830 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1831 self.invalidate_tree_sitter_data(self.text.snapshot());
1832 cx.emit(BufferEvent::Reparsed);
1833 cx.notify();
1834 }
1835
1836 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1837 self.parse_status.1.clone()
1838 }
1839
1840 /// Wait until the buffer is no longer parsing
1841 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1842 let mut parse_status = self.parse_status();
1843 async move {
1844 while *parse_status.borrow() != ParseStatus::Idle {
1845 if parse_status.changed().await.is_err() {
1846 break;
1847 }
1848 }
1849 }
1850 }
1851
1852 /// Assign to the buffer a set of diagnostics created by a given language server.
1853 pub fn update_diagnostics(
1854 &mut self,
1855 server_id: LanguageServerId,
1856 diagnostics: DiagnosticSet,
1857 cx: &mut Context<Self>,
1858 ) {
1859 let lamport_timestamp = self.text.lamport_clock.tick();
1860 let op = Operation::UpdateDiagnostics {
1861 server_id,
1862 diagnostics: diagnostics.iter().cloned().collect(),
1863 lamport_timestamp,
1864 };
1865
1866 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1867 self.send_operation(op, true, cx);
1868 }
1869
1870 pub fn buffer_diagnostics(
1871 &self,
1872 for_server: Option<LanguageServerId>,
1873 ) -> Vec<&DiagnosticEntry<Anchor>> {
1874 match for_server {
1875 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1876 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1877 Err(_) => Vec::new(),
1878 },
1879 None => self
1880 .diagnostics
1881 .iter()
1882 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1883 .collect(),
1884 }
1885 }
1886
1887 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1888 if let Some(indent_sizes) = self.compute_autoindents() {
1889 let indent_sizes = cx.background_spawn(indent_sizes);
1890 match cx
1891 .foreground_executor()
1892 .block_with_timeout(block_budget, indent_sizes)
1893 {
1894 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1895 Err(indent_sizes) => {
1896 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1897 let indent_sizes = indent_sizes.await;
1898 this.update(cx, |this, cx| {
1899 this.apply_autoindents(indent_sizes, cx);
1900 })
1901 .ok();
1902 }));
1903 }
1904 }
1905 } else {
1906 self.autoindent_requests.clear();
1907 for tx in self.wait_for_autoindent_txs.drain(..) {
1908 tx.send(()).ok();
1909 }
1910 }
1911 }
1912
1913 fn compute_autoindents(
1914 &self,
1915 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1916 let max_rows_between_yields = 100;
1917 let snapshot = self.snapshot();
1918 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1919 return None;
1920 }
1921
1922 let autoindent_requests = self.autoindent_requests.clone();
1923 Some(async move {
1924 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1925 for request in autoindent_requests {
1926 // Resolve each edited range to its row in the current buffer and in the
1927 // buffer before this batch of edits.
1928 let mut row_ranges = Vec::new();
1929 let mut old_to_new_rows = BTreeMap::new();
1930 let mut language_indent_sizes_by_new_row = Vec::new();
1931 for entry in &request.entries {
1932 let position = entry.range.start;
1933 let new_row = position.to_point(&snapshot).row;
1934 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1935 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1936
1937 if let Some(old_row) = entry.old_row {
1938 old_to_new_rows.insert(old_row, new_row);
1939 }
1940 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1941 }
1942
1943 // Build a map containing the suggested indentation for each of the edited lines
1944 // with respect to the state of the buffer before these edits. This map is keyed
1945 // by the rows for these lines in the current state of the buffer.
1946 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1947 let old_edited_ranges =
1948 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1949 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1950 let mut language_indent_size = IndentSize::default();
1951 for old_edited_range in old_edited_ranges {
1952 let suggestions = request
1953 .before_edit
1954 .suggest_autoindents(old_edited_range.clone())
1955 .into_iter()
1956 .flatten();
1957 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1958 if let Some(suggestion) = suggestion {
1959 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1960
1961 // Find the indent size based on the language for this row.
1962 while let Some((row, size)) = language_indent_sizes.peek() {
1963 if *row > new_row {
1964 break;
1965 }
1966 language_indent_size = *size;
1967 language_indent_sizes.next();
1968 }
1969
1970 let suggested_indent = old_to_new_rows
1971 .get(&suggestion.basis_row)
1972 .and_then(|from_row| {
1973 Some(old_suggestions.get(from_row).copied()?.0)
1974 })
1975 .unwrap_or_else(|| {
1976 request
1977 .before_edit
1978 .indent_size_for_line(suggestion.basis_row)
1979 })
1980 .with_delta(suggestion.delta, language_indent_size);
1981 old_suggestions
1982 .insert(new_row, (suggested_indent, suggestion.within_error));
1983 }
1984 }
1985 yield_now().await;
1986 }
1987
1988 // Compute new suggestions for each line, but only include them in the result
1989 // if they differ from the old suggestion for that line.
1990 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1991 let mut language_indent_size = IndentSize::default();
1992 for (row_range, original_indent_column) in row_ranges {
1993 let new_edited_row_range = if request.is_block_mode {
1994 row_range.start..row_range.start + 1
1995 } else {
1996 row_range.clone()
1997 };
1998
1999 let suggestions = snapshot
2000 .suggest_autoindents(new_edited_row_range.clone())
2001 .into_iter()
2002 .flatten();
2003 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2004 if let Some(suggestion) = suggestion {
2005 // Find the indent size based on the language for this row.
2006 while let Some((row, size)) = language_indent_sizes.peek() {
2007 if *row > new_row {
2008 break;
2009 }
2010 language_indent_size = *size;
2011 language_indent_sizes.next();
2012 }
2013
2014 let suggested_indent = indent_sizes
2015 .get(&suggestion.basis_row)
2016 .copied()
2017 .map(|e| e.0)
2018 .unwrap_or_else(|| {
2019 snapshot.indent_size_for_line(suggestion.basis_row)
2020 })
2021 .with_delta(suggestion.delta, language_indent_size);
2022
2023 if old_suggestions.get(&new_row).is_none_or(
2024 |(old_indentation, was_within_error)| {
2025 suggested_indent != *old_indentation
2026 && (!suggestion.within_error || *was_within_error)
2027 },
2028 ) {
2029 indent_sizes.insert(
2030 new_row,
2031 (suggested_indent, request.ignore_empty_lines),
2032 );
2033 }
2034 }
2035 }
2036
2037 if let (true, Some(original_indent_column)) =
2038 (request.is_block_mode, original_indent_column)
2039 {
2040 let new_indent =
2041 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2042 *indent
2043 } else {
2044 snapshot.indent_size_for_line(row_range.start)
2045 };
2046 let delta = new_indent.len as i64 - original_indent_column as i64;
2047 if delta != 0 {
2048 for row in row_range.skip(1) {
2049 indent_sizes.entry(row).or_insert_with(|| {
2050 let mut size = snapshot.indent_size_for_line(row);
2051 if size.kind == new_indent.kind {
2052 match delta.cmp(&0) {
2053 Ordering::Greater => size.len += delta as u32,
2054 Ordering::Less => {
2055 size.len = size.len.saturating_sub(-delta as u32)
2056 }
2057 Ordering::Equal => {}
2058 }
2059 }
2060 (size, request.ignore_empty_lines)
2061 });
2062 }
2063 }
2064 }
2065
2066 yield_now().await;
2067 }
2068 }
2069
2070 indent_sizes
2071 .into_iter()
2072 .filter_map(|(row, (indent, ignore_empty_lines))| {
2073 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2074 None
2075 } else {
2076 Some((row, indent))
2077 }
2078 })
2079 .collect()
2080 })
2081 }
2082
2083 fn apply_autoindents(
2084 &mut self,
2085 indent_sizes: BTreeMap<u32, IndentSize>,
2086 cx: &mut Context<Self>,
2087 ) {
2088 self.autoindent_requests.clear();
2089 for tx in self.wait_for_autoindent_txs.drain(..) {
2090 tx.send(()).ok();
2091 }
2092
2093 let edits: Vec<_> = indent_sizes
2094 .into_iter()
2095 .filter_map(|(row, indent_size)| {
2096 let current_size = indent_size_for_line(self, row);
2097 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2098 })
2099 .collect();
2100
2101 let preserve_preview = self.preserve_preview();
2102 self.edit(edits, None, cx);
2103 if preserve_preview {
2104 self.refresh_preview();
2105 }
2106 }
2107
2108 /// Create a minimal edit that will cause the given row to be indented
2109 /// with the given size. After applying this edit, the length of the line
2110 /// will always be at least `new_size.len`.
2111 pub fn edit_for_indent_size_adjustment(
2112 row: u32,
2113 current_size: IndentSize,
2114 new_size: IndentSize,
2115 ) -> Option<(Range<Point>, String)> {
2116 if new_size.kind == current_size.kind {
2117 match new_size.len.cmp(¤t_size.len) {
2118 Ordering::Greater => {
2119 let point = Point::new(row, 0);
2120 Some((
2121 point..point,
2122 iter::repeat(new_size.char())
2123 .take((new_size.len - current_size.len) as usize)
2124 .collect::<String>(),
2125 ))
2126 }
2127
2128 Ordering::Less => Some((
2129 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2130 String::new(),
2131 )),
2132
2133 Ordering::Equal => None,
2134 }
2135 } else {
2136 Some((
2137 Point::new(row, 0)..Point::new(row, current_size.len),
2138 iter::repeat(new_size.char())
2139 .take(new_size.len as usize)
2140 .collect::<String>(),
2141 ))
2142 }
2143 }
2144
2145 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2146 /// and the given new text.
2147 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2148 let old_text = self.as_rope().clone();
2149 let base_version = self.version();
2150 cx.background_spawn(async move {
2151 let old_text = old_text.to_string();
2152 let line_ending = LineEnding::detect(&new_text);
2153 LineEnding::normalize(&mut new_text);
2154 let edits = text_diff(&old_text, &new_text);
2155 Diff {
2156 base_version,
2157 line_ending,
2158 edits,
2159 }
2160 })
2161 }
2162
2163 /// Spawns a background task that searches the buffer for any whitespace
2164 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2165 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2166 let old_text = self.as_rope().clone();
2167 let line_ending = self.line_ending();
2168 let base_version = self.version();
2169 cx.background_spawn(async move {
2170 let ranges = trailing_whitespace_ranges(&old_text);
2171 let empty = Arc::<str>::from("");
2172 Diff {
2173 base_version,
2174 line_ending,
2175 edits: ranges
2176 .into_iter()
2177 .map(|range| (range, empty.clone()))
2178 .collect(),
2179 }
2180 })
2181 }
2182
2183 /// Ensures that the buffer ends with a single newline character, and
2184 /// no other whitespace. Skips if the buffer is empty.
2185 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2186 let len = self.len();
2187 if len == 0 {
2188 return;
2189 }
2190 let mut offset = len;
2191 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2192 let non_whitespace_len = chunk
2193 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2194 .len();
2195 offset -= chunk.len();
2196 offset += non_whitespace_len;
2197 if non_whitespace_len != 0 {
2198 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2199 return;
2200 }
2201 break;
2202 }
2203 }
2204 self.edit([(offset..len, "\n")], None, cx);
2205 }
2206
2207 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2208 /// calculated, then adjust the diff to account for those changes, and discard any
2209 /// parts of the diff that conflict with those changes.
2210 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2211 let snapshot = self.snapshot();
2212 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2213 let mut delta = 0;
2214 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2215 while let Some(edit_since) = edits_since.peek() {
2216 // If the edit occurs after a diff hunk, then it does not
2217 // affect that hunk.
2218 if edit_since.old.start > range.end {
2219 break;
2220 }
2221 // If the edit precedes the diff hunk, then adjust the hunk
2222 // to reflect the edit.
2223 else if edit_since.old.end < range.start {
2224 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2225 edits_since.next();
2226 }
2227 // If the edit intersects a diff hunk, then discard that hunk.
2228 else {
2229 return None;
2230 }
2231 }
2232
2233 let start = (range.start as i64 + delta) as usize;
2234 let end = (range.end as i64 + delta) as usize;
2235 Some((start..end, new_text))
2236 });
2237
2238 self.start_transaction();
2239 self.text.set_line_ending(diff.line_ending);
2240 self.edit(adjusted_edits, None, cx);
2241 self.end_transaction(cx)
2242 }
2243
2244 pub fn has_unsaved_edits(&self) -> bool {
2245 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2246
2247 if last_version == self.version {
2248 self.has_unsaved_edits
2249 .set((last_version, has_unsaved_edits));
2250 return has_unsaved_edits;
2251 }
2252
2253 let has_edits = self.has_edits_since(&self.saved_version);
2254 self.has_unsaved_edits
2255 .set((self.version.clone(), has_edits));
2256 has_edits
2257 }
2258
2259 /// Checks if the buffer has unsaved changes.
2260 pub fn is_dirty(&self) -> bool {
2261 if self.capability == Capability::ReadOnly {
2262 return false;
2263 }
2264 if self.has_conflict {
2265 return true;
2266 }
2267 match self.file.as_ref().map(|f| f.disk_state()) {
2268 Some(DiskState::New) | Some(DiskState::Deleted) => {
2269 !self.is_empty() && self.has_unsaved_edits()
2270 }
2271 _ => self.has_unsaved_edits(),
2272 }
2273 }
2274
2275 /// Marks the buffer as having a conflict regardless of current buffer state.
2276 pub fn set_conflict(&mut self) {
2277 self.has_conflict = true;
2278 }
2279
2280 /// Checks if the buffer and its file have both changed since the buffer
2281 /// was last saved or reloaded.
2282 pub fn has_conflict(&self) -> bool {
2283 if self.has_conflict {
2284 return true;
2285 }
2286 let Some(file) = self.file.as_ref() else {
2287 return false;
2288 };
2289 match file.disk_state() {
2290 DiskState::New => false,
2291 DiskState::Present { mtime } => match self.saved_mtime {
2292 Some(saved_mtime) => {
2293 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2294 }
2295 None => true,
2296 },
2297 DiskState::Deleted => false,
2298 DiskState::Historic { .. } => false,
2299 }
2300 }
2301
2302 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2303 pub fn subscribe(&mut self) -> Subscription<usize> {
2304 self.text.subscribe()
2305 }
2306
2307 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2308 ///
2309 /// This allows downstream code to check if the buffer's text has changed without
2310 /// waiting for an effect cycle, which would be required if using eents.
2311 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2312 if let Err(ix) = self
2313 .change_bits
2314 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2315 {
2316 self.change_bits.insert(ix, bit);
2317 }
2318 }
2319
2320 /// Set the change bit for all "listeners".
2321 fn was_changed(&mut self) {
2322 self.change_bits.retain(|change_bit| {
2323 change_bit
2324 .upgrade()
2325 .inspect(|bit| {
2326 _ = bit.replace(true);
2327 })
2328 .is_some()
2329 });
2330 }
2331
2332 /// Starts a transaction, if one is not already in-progress. When undoing or
2333 /// redoing edits, all of the edits performed within a transaction are undone
2334 /// or redone together.
2335 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2336 self.start_transaction_at(Instant::now())
2337 }
2338
2339 /// Starts a transaction, providing the current time. Subsequent transactions
2340 /// that occur within a short period of time will be grouped together. This
2341 /// is controlled by the buffer's undo grouping duration.
2342 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2343 self.transaction_depth += 1;
2344 if self.was_dirty_before_starting_transaction.is_none() {
2345 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2346 }
2347 self.text.start_transaction_at(now)
2348 }
2349
2350 /// Terminates the current transaction, if this is the outermost transaction.
2351 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2352 self.end_transaction_at(Instant::now(), cx)
2353 }
2354
2355 /// Terminates the current transaction, providing the current time. Subsequent transactions
2356 /// that occur within a short period of time will be grouped together. This
2357 /// is controlled by the buffer's undo grouping duration.
2358 pub fn end_transaction_at(
2359 &mut self,
2360 now: Instant,
2361 cx: &mut Context<Self>,
2362 ) -> Option<TransactionId> {
2363 assert!(self.transaction_depth > 0);
2364 self.transaction_depth -= 1;
2365 let was_dirty = if self.transaction_depth == 0 {
2366 self.was_dirty_before_starting_transaction.take().unwrap()
2367 } else {
2368 false
2369 };
2370 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2371 self.did_edit(&start_version, was_dirty, cx);
2372 Some(transaction_id)
2373 } else {
2374 None
2375 }
2376 }
2377
2378 /// Manually add a transaction to the buffer's undo history.
2379 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2380 self.text.push_transaction(transaction, now);
2381 }
2382
2383 /// Differs from `push_transaction` in that it does not clear the redo
2384 /// stack. Intended to be used to create a parent transaction to merge
2385 /// potential child transactions into.
2386 ///
2387 /// The caller is responsible for removing it from the undo history using
2388 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2389 /// are merged into this transaction, the caller is responsible for ensuring
2390 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2391 /// cleared is to create transactions with the usual `start_transaction` and
2392 /// `end_transaction` methods and merging the resulting transactions into
2393 /// the transaction created by this method
2394 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2395 self.text.push_empty_transaction(now)
2396 }
2397
2398 /// Prevent the last transaction from being grouped with any subsequent transactions,
2399 /// even if they occur with the buffer's undo grouping duration.
2400 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2401 self.text.finalize_last_transaction()
2402 }
2403
2404 /// Manually group all changes since a given transaction.
2405 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2406 self.text.group_until_transaction(transaction_id);
2407 }
2408
2409 /// Manually remove a transaction from the buffer's undo history
2410 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2411 self.text.forget_transaction(transaction_id)
2412 }
2413
2414 /// Retrieve a transaction from the buffer's undo history
2415 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2416 self.text.get_transaction(transaction_id)
2417 }
2418
2419 /// Manually merge two transactions in the buffer's undo history.
2420 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2421 self.text.merge_transactions(transaction, destination);
2422 }
2423
2424 /// Waits for the buffer to receive operations with the given timestamps.
2425 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2426 &mut self,
2427 edit_ids: It,
2428 ) -> impl Future<Output = Result<()>> + use<It> {
2429 self.text.wait_for_edits(edit_ids)
2430 }
2431
2432 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2433 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2434 &mut self,
2435 anchors: It,
2436 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2437 self.text.wait_for_anchors(anchors)
2438 }
2439
2440 /// Waits for the buffer to receive operations up to the given version.
2441 pub fn wait_for_version(
2442 &mut self,
2443 version: clock::Global,
2444 ) -> impl Future<Output = Result<()>> + use<> {
2445 self.text.wait_for_version(version)
2446 }
2447
2448 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2449 /// [`Buffer::wait_for_version`] to resolve with an error.
2450 pub fn give_up_waiting(&mut self) {
2451 self.text.give_up_waiting();
2452 }
2453
2454 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2455 let mut rx = None;
2456 if !self.autoindent_requests.is_empty() {
2457 let channel = oneshot::channel();
2458 self.wait_for_autoindent_txs.push(channel.0);
2459 rx = Some(channel.1);
2460 }
2461 rx
2462 }
2463
2464 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2465 pub fn set_active_selections(
2466 &mut self,
2467 selections: Arc<[Selection<Anchor>]>,
2468 line_mode: bool,
2469 cursor_shape: CursorShape,
2470 cx: &mut Context<Self>,
2471 ) {
2472 let lamport_timestamp = self.text.lamport_clock.tick();
2473 self.remote_selections.insert(
2474 self.text.replica_id(),
2475 SelectionSet {
2476 selections: selections.clone(),
2477 lamport_timestamp,
2478 line_mode,
2479 cursor_shape,
2480 },
2481 );
2482 self.send_operation(
2483 Operation::UpdateSelections {
2484 selections,
2485 line_mode,
2486 lamport_timestamp,
2487 cursor_shape,
2488 },
2489 true,
2490 cx,
2491 );
2492 self.non_text_state_update_count += 1;
2493 cx.notify();
2494 }
2495
2496 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2497 /// this replica.
2498 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2499 if self
2500 .remote_selections
2501 .get(&self.text.replica_id())
2502 .is_none_or(|set| !set.selections.is_empty())
2503 {
2504 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2505 }
2506 }
2507
2508 pub fn set_agent_selections(
2509 &mut self,
2510 selections: Arc<[Selection<Anchor>]>,
2511 line_mode: bool,
2512 cursor_shape: CursorShape,
2513 cx: &mut Context<Self>,
2514 ) {
2515 let lamport_timestamp = self.text.lamport_clock.tick();
2516 self.remote_selections.insert(
2517 ReplicaId::AGENT,
2518 SelectionSet {
2519 selections,
2520 lamport_timestamp,
2521 line_mode,
2522 cursor_shape,
2523 },
2524 );
2525 self.non_text_state_update_count += 1;
2526 cx.notify();
2527 }
2528
2529 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2530 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2531 }
2532
2533 /// Replaces the buffer's entire text.
2534 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2535 where
2536 T: Into<Arc<str>>,
2537 {
2538 self.autoindent_requests.clear();
2539 self.edit([(0..self.len(), text)], None, cx)
2540 }
2541
2542 /// Appends the given text to the end of the buffer.
2543 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2544 where
2545 T: Into<Arc<str>>,
2546 {
2547 self.edit([(self.len()..self.len(), text)], None, cx)
2548 }
2549
2550 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2551 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2552 ///
2553 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2554 /// request for the edited ranges, which will be processed when the buffer finishes
2555 /// parsing.
2556 ///
2557 /// Parsing takes place at the end of a transaction, and may compute synchronously
2558 /// or asynchronously, depending on the changes.
2559 pub fn edit<I, S, T>(
2560 &mut self,
2561 edits_iter: I,
2562 autoindent_mode: Option<AutoindentMode>,
2563 cx: &mut Context<Self>,
2564 ) -> Option<clock::Lamport>
2565 where
2566 I: IntoIterator<Item = (Range<S>, T)>,
2567 S: ToOffset,
2568 T: Into<Arc<str>>,
2569 {
2570 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2571 }
2572
2573 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2574 pub fn edit_non_coalesce<I, S, T>(
2575 &mut self,
2576 edits_iter: I,
2577 autoindent_mode: Option<AutoindentMode>,
2578 cx: &mut Context<Self>,
2579 ) -> Option<clock::Lamport>
2580 where
2581 I: IntoIterator<Item = (Range<S>, T)>,
2582 S: ToOffset,
2583 T: Into<Arc<str>>,
2584 {
2585 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2586 }
2587
2588 fn edit_internal<I, S, T>(
2589 &mut self,
2590 edits_iter: I,
2591 autoindent_mode: Option<AutoindentMode>,
2592 coalesce_adjacent: bool,
2593 cx: &mut Context<Self>,
2594 ) -> Option<clock::Lamport>
2595 where
2596 I: IntoIterator<Item = (Range<S>, T)>,
2597 S: ToOffset,
2598 T: Into<Arc<str>>,
2599 {
2600 // Skip invalid edits and coalesce contiguous ones.
2601 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2602
2603 for (range, new_text) in edits_iter {
2604 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2605
2606 if range.start > range.end {
2607 mem::swap(&mut range.start, &mut range.end);
2608 }
2609 let new_text = new_text.into();
2610 if !new_text.is_empty() || !range.is_empty() {
2611 let prev_edit = edits.last_mut();
2612 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2613 if coalesce_adjacent {
2614 prev_range.end >= range.start
2615 } else {
2616 prev_range.end > range.start
2617 }
2618 });
2619
2620 if let Some((prev_range, prev_text)) = prev_edit
2621 && should_coalesce
2622 {
2623 prev_range.end = cmp::max(prev_range.end, range.end);
2624 *prev_text = format!("{prev_text}{new_text}").into();
2625 } else {
2626 edits.push((range, new_text));
2627 }
2628 }
2629 }
2630 if edits.is_empty() {
2631 return None;
2632 }
2633
2634 self.start_transaction();
2635 self.pending_autoindent.take();
2636 let autoindent_request = autoindent_mode
2637 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2638
2639 let edit_operation = self.text.edit(edits.iter().cloned());
2640 let edit_id = edit_operation.timestamp();
2641
2642 if let Some((before_edit, mode)) = autoindent_request {
2643 let mut delta = 0isize;
2644 let mut previous_setting = None;
2645 let entries: Vec<_> = edits
2646 .into_iter()
2647 .enumerate()
2648 .zip(&edit_operation.as_edit().unwrap().new_text)
2649 .filter(|((_, (range, _)), _)| {
2650 let language = before_edit.language_at(range.start);
2651 let language_id = language.map(|l| l.id());
2652 if let Some((cached_language_id, auto_indent)) = previous_setting
2653 && cached_language_id == language_id
2654 {
2655 auto_indent
2656 } else {
2657 // The auto-indent setting is not present in editorconfigs, hence
2658 // we can avoid passing the file here.
2659 let auto_indent =
2660 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2661 previous_setting = Some((language_id, auto_indent));
2662 auto_indent
2663 }
2664 })
2665 .map(|((ix, (range, _)), new_text)| {
2666 let new_text_length = new_text.len();
2667 let old_start = range.start.to_point(&before_edit);
2668 let new_start = (delta + range.start as isize) as usize;
2669 let range_len = range.end - range.start;
2670 delta += new_text_length as isize - range_len as isize;
2671
2672 // Decide what range of the insertion to auto-indent, and whether
2673 // the first line of the insertion should be considered a newly-inserted line
2674 // or an edit to an existing line.
2675 let mut range_of_insertion_to_indent = 0..new_text_length;
2676 let mut first_line_is_new = true;
2677
2678 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2679 let old_line_end = before_edit.line_len(old_start.row);
2680
2681 if old_start.column > old_line_start {
2682 first_line_is_new = false;
2683 }
2684
2685 if !new_text.contains('\n')
2686 && (old_start.column + (range_len as u32) < old_line_end
2687 || old_line_end == old_line_start)
2688 {
2689 first_line_is_new = false;
2690 }
2691
2692 // When inserting text starting with a newline, avoid auto-indenting the
2693 // previous line.
2694 if new_text.starts_with('\n') {
2695 range_of_insertion_to_indent.start += 1;
2696 first_line_is_new = true;
2697 }
2698
2699 let mut original_indent_column = None;
2700 if let AutoindentMode::Block {
2701 original_indent_columns,
2702 } = &mode
2703 {
2704 original_indent_column = Some(if new_text.starts_with('\n') {
2705 indent_size_for_text(
2706 new_text[range_of_insertion_to_indent.clone()].chars(),
2707 )
2708 .len
2709 } else {
2710 original_indent_columns
2711 .get(ix)
2712 .copied()
2713 .flatten()
2714 .unwrap_or_else(|| {
2715 indent_size_for_text(
2716 new_text[range_of_insertion_to_indent.clone()].chars(),
2717 )
2718 .len
2719 })
2720 });
2721
2722 // Avoid auto-indenting the line after the edit.
2723 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2724 range_of_insertion_to_indent.end -= 1;
2725 }
2726 }
2727
2728 AutoindentRequestEntry {
2729 original_indent_column,
2730 old_row: if first_line_is_new {
2731 None
2732 } else {
2733 Some(old_start.row)
2734 },
2735 indent_size: before_edit.language_indent_size_at(range.start, cx),
2736 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2737 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2738 }
2739 })
2740 .collect();
2741
2742 if !entries.is_empty() {
2743 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2744 before_edit,
2745 entries,
2746 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2747 ignore_empty_lines: false,
2748 }));
2749 }
2750 }
2751
2752 self.end_transaction(cx);
2753 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2754 Some(edit_id)
2755 }
2756
2757 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2758 self.was_changed();
2759
2760 if self.edits_since::<usize>(old_version).next().is_none() {
2761 return;
2762 }
2763
2764 self.reparse(cx, true);
2765 cx.emit(BufferEvent::Edited);
2766 if was_dirty != self.is_dirty() {
2767 cx.emit(BufferEvent::DirtyChanged);
2768 }
2769 cx.notify();
2770 }
2771
2772 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2773 where
2774 I: IntoIterator<Item = Range<T>>,
2775 T: ToOffset + Copy,
2776 {
2777 let before_edit = self.snapshot();
2778 let entries = ranges
2779 .into_iter()
2780 .map(|range| AutoindentRequestEntry {
2781 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2782 old_row: None,
2783 indent_size: before_edit.language_indent_size_at(range.start, cx),
2784 original_indent_column: None,
2785 })
2786 .collect();
2787 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2788 before_edit,
2789 entries,
2790 is_block_mode: false,
2791 ignore_empty_lines: true,
2792 }));
2793 self.request_autoindent(cx, Duration::from_micros(300));
2794 }
2795
2796 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2797 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2798 pub fn insert_empty_line(
2799 &mut self,
2800 position: impl ToPoint,
2801 space_above: bool,
2802 space_below: bool,
2803 cx: &mut Context<Self>,
2804 ) -> Point {
2805 let mut position = position.to_point(self);
2806
2807 self.start_transaction();
2808
2809 self.edit(
2810 [(position..position, "\n")],
2811 Some(AutoindentMode::EachLine),
2812 cx,
2813 );
2814
2815 if position.column > 0 {
2816 position += Point::new(1, 0);
2817 }
2818
2819 if !self.is_line_blank(position.row) {
2820 self.edit(
2821 [(position..position, "\n")],
2822 Some(AutoindentMode::EachLine),
2823 cx,
2824 );
2825 }
2826
2827 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2828 self.edit(
2829 [(position..position, "\n")],
2830 Some(AutoindentMode::EachLine),
2831 cx,
2832 );
2833 position.row += 1;
2834 }
2835
2836 if space_below
2837 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2838 {
2839 self.edit(
2840 [(position..position, "\n")],
2841 Some(AutoindentMode::EachLine),
2842 cx,
2843 );
2844 }
2845
2846 self.end_transaction(cx);
2847
2848 position
2849 }
2850
2851 /// Applies the given remote operations to the buffer.
2852 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2853 self.pending_autoindent.take();
2854 let was_dirty = self.is_dirty();
2855 let old_version = self.version.clone();
2856 let mut deferred_ops = Vec::new();
2857 let buffer_ops = ops
2858 .into_iter()
2859 .filter_map(|op| match op {
2860 Operation::Buffer(op) => Some(op),
2861 _ => {
2862 if self.can_apply_op(&op) {
2863 self.apply_op(op, cx);
2864 } else {
2865 deferred_ops.push(op);
2866 }
2867 None
2868 }
2869 })
2870 .collect::<Vec<_>>();
2871 for operation in buffer_ops.iter() {
2872 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2873 }
2874 self.text.apply_ops(buffer_ops);
2875 self.deferred_ops.insert(deferred_ops);
2876 self.flush_deferred_ops(cx);
2877 self.did_edit(&old_version, was_dirty, cx);
2878 // Notify independently of whether the buffer was edited as the operations could include a
2879 // selection update.
2880 cx.notify();
2881 }
2882
2883 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2884 let mut deferred_ops = Vec::new();
2885 for op in self.deferred_ops.drain().iter().cloned() {
2886 if self.can_apply_op(&op) {
2887 self.apply_op(op, cx);
2888 } else {
2889 deferred_ops.push(op);
2890 }
2891 }
2892 self.deferred_ops.insert(deferred_ops);
2893 }
2894
2895 pub fn has_deferred_ops(&self) -> bool {
2896 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2897 }
2898
2899 fn can_apply_op(&self, operation: &Operation) -> bool {
2900 match operation {
2901 Operation::Buffer(_) => {
2902 unreachable!("buffer operations should never be applied at this layer")
2903 }
2904 Operation::UpdateDiagnostics {
2905 diagnostics: diagnostic_set,
2906 ..
2907 } => diagnostic_set.iter().all(|diagnostic| {
2908 self.text.can_resolve(&diagnostic.range.start)
2909 && self.text.can_resolve(&diagnostic.range.end)
2910 }),
2911 Operation::UpdateSelections { selections, .. } => selections
2912 .iter()
2913 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2914 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2915 }
2916 }
2917
2918 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2919 match operation {
2920 Operation::Buffer(_) => {
2921 unreachable!("buffer operations should never be applied at this layer")
2922 }
2923 Operation::UpdateDiagnostics {
2924 server_id,
2925 diagnostics: diagnostic_set,
2926 lamport_timestamp,
2927 } => {
2928 let snapshot = self.snapshot();
2929 self.apply_diagnostic_update(
2930 server_id,
2931 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2932 lamport_timestamp,
2933 cx,
2934 );
2935 }
2936 Operation::UpdateSelections {
2937 selections,
2938 lamport_timestamp,
2939 line_mode,
2940 cursor_shape,
2941 } => {
2942 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2943 && set.lamport_timestamp > lamport_timestamp
2944 {
2945 return;
2946 }
2947
2948 self.remote_selections.insert(
2949 lamport_timestamp.replica_id,
2950 SelectionSet {
2951 selections,
2952 lamport_timestamp,
2953 line_mode,
2954 cursor_shape,
2955 },
2956 );
2957 self.text.lamport_clock.observe(lamport_timestamp);
2958 self.non_text_state_update_count += 1;
2959 }
2960 Operation::UpdateCompletionTriggers {
2961 triggers,
2962 lamport_timestamp,
2963 server_id,
2964 } => {
2965 if triggers.is_empty() {
2966 self.completion_triggers_per_language_server
2967 .remove(&server_id);
2968 self.completion_triggers = self
2969 .completion_triggers_per_language_server
2970 .values()
2971 .flat_map(|triggers| triggers.iter().cloned())
2972 .collect();
2973 } else {
2974 self.completion_triggers_per_language_server
2975 .insert(server_id, triggers.iter().cloned().collect());
2976 self.completion_triggers.extend(triggers);
2977 }
2978 self.text.lamport_clock.observe(lamport_timestamp);
2979 }
2980 Operation::UpdateLineEnding {
2981 line_ending,
2982 lamport_timestamp,
2983 } => {
2984 self.text.set_line_ending(line_ending);
2985 self.text.lamport_clock.observe(lamport_timestamp);
2986 }
2987 }
2988 }
2989
2990 fn apply_diagnostic_update(
2991 &mut self,
2992 server_id: LanguageServerId,
2993 diagnostics: DiagnosticSet,
2994 lamport_timestamp: clock::Lamport,
2995 cx: &mut Context<Self>,
2996 ) {
2997 if lamport_timestamp > self.diagnostics_timestamp {
2998 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2999 if diagnostics.is_empty() {
3000 if let Ok(ix) = ix {
3001 self.diagnostics.remove(ix);
3002 }
3003 } else {
3004 match ix {
3005 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3006 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3007 };
3008 }
3009 self.diagnostics_timestamp = lamport_timestamp;
3010 self.non_text_state_update_count += 1;
3011 self.text.lamport_clock.observe(lamport_timestamp);
3012 cx.notify();
3013 cx.emit(BufferEvent::DiagnosticsUpdated);
3014 }
3015 }
3016
3017 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3018 self.was_changed();
3019 cx.emit(BufferEvent::Operation {
3020 operation,
3021 is_local,
3022 });
3023 }
3024
3025 /// Removes the selections for a given peer.
3026 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3027 self.remote_selections.remove(&replica_id);
3028 cx.notify();
3029 }
3030
3031 /// Undoes the most recent transaction.
3032 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3033 let was_dirty = self.is_dirty();
3034 let old_version = self.version.clone();
3035
3036 if let Some((transaction_id, operation)) = self.text.undo() {
3037 self.send_operation(Operation::Buffer(operation), true, cx);
3038 self.did_edit(&old_version, was_dirty, cx);
3039 Some(transaction_id)
3040 } else {
3041 None
3042 }
3043 }
3044
3045 /// Manually undoes a specific transaction in the buffer's undo history.
3046 pub fn undo_transaction(
3047 &mut self,
3048 transaction_id: TransactionId,
3049 cx: &mut Context<Self>,
3050 ) -> bool {
3051 let was_dirty = self.is_dirty();
3052 let old_version = self.version.clone();
3053 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3054 self.send_operation(Operation::Buffer(operation), true, cx);
3055 self.did_edit(&old_version, was_dirty, cx);
3056 true
3057 } else {
3058 false
3059 }
3060 }
3061
3062 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3063 pub fn undo_to_transaction(
3064 &mut self,
3065 transaction_id: TransactionId,
3066 cx: &mut Context<Self>,
3067 ) -> bool {
3068 let was_dirty = self.is_dirty();
3069 let old_version = self.version.clone();
3070
3071 let operations = self.text.undo_to_transaction(transaction_id);
3072 let undone = !operations.is_empty();
3073 for operation in operations {
3074 self.send_operation(Operation::Buffer(operation), true, cx);
3075 }
3076 if undone {
3077 self.did_edit(&old_version, was_dirty, cx)
3078 }
3079 undone
3080 }
3081
3082 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3083 let was_dirty = self.is_dirty();
3084 let operation = self.text.undo_operations(counts);
3085 let old_version = self.version.clone();
3086 self.send_operation(Operation::Buffer(operation), true, cx);
3087 self.did_edit(&old_version, was_dirty, cx);
3088 }
3089
3090 /// Manually redoes a specific transaction in the buffer's redo history.
3091 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3092 let was_dirty = self.is_dirty();
3093 let old_version = self.version.clone();
3094
3095 if let Some((transaction_id, operation)) = self.text.redo() {
3096 self.send_operation(Operation::Buffer(operation), true, cx);
3097 self.did_edit(&old_version, was_dirty, cx);
3098 Some(transaction_id)
3099 } else {
3100 None
3101 }
3102 }
3103
3104 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3105 pub fn redo_to_transaction(
3106 &mut self,
3107 transaction_id: TransactionId,
3108 cx: &mut Context<Self>,
3109 ) -> bool {
3110 let was_dirty = self.is_dirty();
3111 let old_version = self.version.clone();
3112
3113 let operations = self.text.redo_to_transaction(transaction_id);
3114 let redone = !operations.is_empty();
3115 for operation in operations {
3116 self.send_operation(Operation::Buffer(operation), true, cx);
3117 }
3118 if redone {
3119 self.did_edit(&old_version, was_dirty, cx)
3120 }
3121 redone
3122 }
3123
3124 /// Override current completion triggers with the user-provided completion triggers.
3125 pub fn set_completion_triggers(
3126 &mut self,
3127 server_id: LanguageServerId,
3128 triggers: BTreeSet<String>,
3129 cx: &mut Context<Self>,
3130 ) {
3131 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3132 if triggers.is_empty() {
3133 self.completion_triggers_per_language_server
3134 .remove(&server_id);
3135 self.completion_triggers = self
3136 .completion_triggers_per_language_server
3137 .values()
3138 .flat_map(|triggers| triggers.iter().cloned())
3139 .collect();
3140 } else {
3141 self.completion_triggers_per_language_server
3142 .insert(server_id, triggers.clone());
3143 self.completion_triggers.extend(triggers.iter().cloned());
3144 }
3145 self.send_operation(
3146 Operation::UpdateCompletionTriggers {
3147 triggers: triggers.into_iter().collect(),
3148 lamport_timestamp: self.completion_triggers_timestamp,
3149 server_id,
3150 },
3151 true,
3152 cx,
3153 );
3154 cx.notify();
3155 }
3156
3157 /// Returns a list of strings which trigger a completion menu for this language.
3158 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3159 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3160 &self.completion_triggers
3161 }
3162
3163 /// Call this directly after performing edits to prevent the preview tab
3164 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3165 /// to return false until there are additional edits.
3166 pub fn refresh_preview(&mut self) {
3167 self.preview_version = self.version.clone();
3168 }
3169
3170 /// Whether we should preserve the preview status of a tab containing this buffer.
3171 pub fn preserve_preview(&self) -> bool {
3172 !self.has_edits_since(&self.preview_version)
3173 }
3174}
3175
3176#[doc(hidden)]
3177#[cfg(any(test, feature = "test-support"))]
3178impl Buffer {
3179 pub fn edit_via_marked_text(
3180 &mut self,
3181 marked_string: &str,
3182 autoindent_mode: Option<AutoindentMode>,
3183 cx: &mut Context<Self>,
3184 ) {
3185 let edits = self.edits_for_marked_text(marked_string);
3186 self.edit(edits, autoindent_mode, cx);
3187 }
3188
3189 pub fn set_group_interval(&mut self, group_interval: Duration) {
3190 self.text.set_group_interval(group_interval);
3191 }
3192
3193 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3194 where
3195 T: rand::Rng,
3196 {
3197 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3198 let mut last_end = None;
3199 for _ in 0..old_range_count {
3200 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3201 break;
3202 }
3203
3204 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3205 let mut range = self.random_byte_range(new_start, rng);
3206 if rng.random_bool(0.2) {
3207 mem::swap(&mut range.start, &mut range.end);
3208 }
3209 last_end = Some(range.end);
3210
3211 let new_text_len = rng.random_range(0..10);
3212 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3213 new_text = new_text.to_uppercase();
3214
3215 edits.push((range, new_text));
3216 }
3217 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3218 self.edit(edits, None, cx);
3219 }
3220
3221 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3222 let was_dirty = self.is_dirty();
3223 let old_version = self.version.clone();
3224
3225 let ops = self.text.randomly_undo_redo(rng);
3226 if !ops.is_empty() {
3227 for op in ops {
3228 self.send_operation(Operation::Buffer(op), true, cx);
3229 self.did_edit(&old_version, was_dirty, cx);
3230 }
3231 }
3232 }
3233}
3234
3235impl EventEmitter<BufferEvent> for Buffer {}
3236
3237impl Deref for Buffer {
3238 type Target = TextBuffer;
3239
3240 fn deref(&self) -> &Self::Target {
3241 &self.text
3242 }
3243}
3244
3245impl BufferSnapshot {
3246 /// Returns [`IndentSize`] for a given line that respects user settings and
3247 /// language preferences.
3248 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3249 indent_size_for_line(self, row)
3250 }
3251
3252 /// Returns [`IndentSize`] for a given position that respects user settings
3253 /// and language preferences.
3254 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3255 let settings = language_settings(
3256 self.language_at(position).map(|l| l.name()),
3257 self.file(),
3258 cx,
3259 );
3260 if settings.hard_tabs {
3261 IndentSize::tab()
3262 } else {
3263 IndentSize::spaces(settings.tab_size.get())
3264 }
3265 }
3266
3267 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3268 /// is passed in as `single_indent_size`.
3269 pub fn suggested_indents(
3270 &self,
3271 rows: impl Iterator<Item = u32>,
3272 single_indent_size: IndentSize,
3273 ) -> BTreeMap<u32, IndentSize> {
3274 let mut result = BTreeMap::new();
3275
3276 for row_range in contiguous_ranges(rows, 10) {
3277 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3278 Some(suggestions) => suggestions,
3279 _ => break,
3280 };
3281
3282 for (row, suggestion) in row_range.zip(suggestions) {
3283 let indent_size = if let Some(suggestion) = suggestion {
3284 result
3285 .get(&suggestion.basis_row)
3286 .copied()
3287 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3288 .with_delta(suggestion.delta, single_indent_size)
3289 } else {
3290 self.indent_size_for_line(row)
3291 };
3292
3293 result.insert(row, indent_size);
3294 }
3295 }
3296
3297 result
3298 }
3299
3300 fn suggest_autoindents(
3301 &self,
3302 row_range: Range<u32>,
3303 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3304 let config = &self.language.as_ref()?.config;
3305 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3306
3307 #[derive(Debug, Clone)]
3308 struct StartPosition {
3309 start: Point,
3310 suffix: SharedString,
3311 language: Arc<Language>,
3312 }
3313
3314 // Find the suggested indentation ranges based on the syntax tree.
3315 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3316 let end = Point::new(row_range.end, 0);
3317 let range = (start..end).to_offset(&self.text);
3318 let mut matches = self.syntax.matches_with_options(
3319 range.clone(),
3320 &self.text,
3321 TreeSitterOptions {
3322 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3323 max_start_depth: None,
3324 },
3325 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3326 );
3327 let indent_configs = matches
3328 .grammars()
3329 .iter()
3330 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3331 .collect::<Vec<_>>();
3332
3333 let mut indent_ranges = Vec::<Range<Point>>::new();
3334 let mut start_positions = Vec::<StartPosition>::new();
3335 let mut outdent_positions = Vec::<Point>::new();
3336 while let Some(mat) = matches.peek() {
3337 let mut start: Option<Point> = None;
3338 let mut end: Option<Point> = None;
3339
3340 let config = indent_configs[mat.grammar_index];
3341 for capture in mat.captures {
3342 if capture.index == config.indent_capture_ix {
3343 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3344 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3345 } else if Some(capture.index) == config.start_capture_ix {
3346 start = Some(Point::from_ts_point(capture.node.end_position()));
3347 } else if Some(capture.index) == config.end_capture_ix {
3348 end = Some(Point::from_ts_point(capture.node.start_position()));
3349 } else if Some(capture.index) == config.outdent_capture_ix {
3350 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3351 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3352 start_positions.push(StartPosition {
3353 start: Point::from_ts_point(capture.node.start_position()),
3354 suffix: suffix.clone(),
3355 language: mat.language.clone(),
3356 });
3357 }
3358 }
3359
3360 matches.advance();
3361 if let Some((start, end)) = start.zip(end) {
3362 if start.row == end.row {
3363 continue;
3364 }
3365 let range = start..end;
3366 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3367 Err(ix) => indent_ranges.insert(ix, range),
3368 Ok(ix) => {
3369 let prev_range = &mut indent_ranges[ix];
3370 prev_range.end = prev_range.end.max(range.end);
3371 }
3372 }
3373 }
3374 }
3375
3376 let mut error_ranges = Vec::<Range<Point>>::new();
3377 let mut matches = self
3378 .syntax
3379 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3380 while let Some(mat) = matches.peek() {
3381 let node = mat.captures[0].node;
3382 let start = Point::from_ts_point(node.start_position());
3383 let end = Point::from_ts_point(node.end_position());
3384 let range = start..end;
3385 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3386 Ok(ix) | Err(ix) => ix,
3387 };
3388 let mut end_ix = ix;
3389 while let Some(existing_range) = error_ranges.get(end_ix) {
3390 if existing_range.end < end {
3391 end_ix += 1;
3392 } else {
3393 break;
3394 }
3395 }
3396 error_ranges.splice(ix..end_ix, [range]);
3397 matches.advance();
3398 }
3399
3400 outdent_positions.sort();
3401 for outdent_position in outdent_positions {
3402 // find the innermost indent range containing this outdent_position
3403 // set its end to the outdent position
3404 if let Some(range_to_truncate) = indent_ranges
3405 .iter_mut()
3406 .rfind(|indent_range| indent_range.contains(&outdent_position))
3407 {
3408 range_to_truncate.end = outdent_position;
3409 }
3410 }
3411
3412 start_positions.sort_by_key(|b| b.start);
3413
3414 // Find the suggested indentation increases and decreased based on regexes.
3415 let mut regex_outdent_map = HashMap::default();
3416 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3417 let mut start_positions_iter = start_positions.iter().peekable();
3418
3419 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3420 self.for_each_line(
3421 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3422 ..Point::new(row_range.end, 0),
3423 |row, line| {
3424 let indent_len = self.indent_size_for_line(row).len;
3425 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3426 let row_language_config = row_language
3427 .as_ref()
3428 .map(|lang| lang.config())
3429 .unwrap_or(config);
3430
3431 if row_language_config
3432 .decrease_indent_pattern
3433 .as_ref()
3434 .is_some_and(|regex| regex.is_match(line))
3435 {
3436 indent_change_rows.push((row, Ordering::Less));
3437 }
3438 if row_language_config
3439 .increase_indent_pattern
3440 .as_ref()
3441 .is_some_and(|regex| regex.is_match(line))
3442 {
3443 indent_change_rows.push((row + 1, Ordering::Greater));
3444 }
3445 while let Some(pos) = start_positions_iter.peek() {
3446 if pos.start.row < row {
3447 let pos = start_positions_iter.next().unwrap().clone();
3448 last_seen_suffix
3449 .entry(pos.suffix.to_string())
3450 .or_default()
3451 .push(pos);
3452 } else {
3453 break;
3454 }
3455 }
3456 for rule in &row_language_config.decrease_indent_patterns {
3457 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3458 let row_start_column = self.indent_size_for_line(row).len;
3459 let basis_row = rule
3460 .valid_after
3461 .iter()
3462 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3463 .flatten()
3464 .filter(|pos| {
3465 row_language
3466 .as_ref()
3467 .or(self.language.as_ref())
3468 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3469 })
3470 .filter(|pos| pos.start.column <= row_start_column)
3471 .max_by_key(|pos| pos.start.row);
3472 if let Some(outdent_to) = basis_row {
3473 regex_outdent_map.insert(row, outdent_to.start.row);
3474 }
3475 break;
3476 }
3477 }
3478 },
3479 );
3480
3481 let mut indent_changes = indent_change_rows.into_iter().peekable();
3482 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3483 prev_non_blank_row.unwrap_or(0)
3484 } else {
3485 row_range.start.saturating_sub(1)
3486 };
3487
3488 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3489 Some(row_range.map(move |row| {
3490 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3491
3492 let mut indent_from_prev_row = false;
3493 let mut outdent_from_prev_row = false;
3494 let mut outdent_to_row = u32::MAX;
3495 let mut from_regex = false;
3496
3497 while let Some((indent_row, delta)) = indent_changes.peek() {
3498 match indent_row.cmp(&row) {
3499 Ordering::Equal => match delta {
3500 Ordering::Less => {
3501 from_regex = true;
3502 outdent_from_prev_row = true
3503 }
3504 Ordering::Greater => {
3505 indent_from_prev_row = true;
3506 from_regex = true
3507 }
3508 _ => {}
3509 },
3510
3511 Ordering::Greater => break,
3512 Ordering::Less => {}
3513 }
3514
3515 indent_changes.next();
3516 }
3517
3518 for range in &indent_ranges {
3519 if range.start.row >= row {
3520 break;
3521 }
3522 if range.start.row == prev_row && range.end > row_start {
3523 indent_from_prev_row = true;
3524 }
3525 if range.end > prev_row_start && range.end <= row_start {
3526 outdent_to_row = outdent_to_row.min(range.start.row);
3527 }
3528 }
3529
3530 if let Some(basis_row) = regex_outdent_map.get(&row) {
3531 indent_from_prev_row = false;
3532 outdent_to_row = *basis_row;
3533 from_regex = true;
3534 }
3535
3536 let within_error = error_ranges
3537 .iter()
3538 .any(|e| e.start.row < row && e.end > row_start);
3539
3540 let suggestion = if outdent_to_row == prev_row
3541 || (outdent_from_prev_row && indent_from_prev_row)
3542 {
3543 Some(IndentSuggestion {
3544 basis_row: prev_row,
3545 delta: Ordering::Equal,
3546 within_error: within_error && !from_regex,
3547 })
3548 } else if indent_from_prev_row {
3549 Some(IndentSuggestion {
3550 basis_row: prev_row,
3551 delta: Ordering::Greater,
3552 within_error: within_error && !from_regex,
3553 })
3554 } else if outdent_to_row < prev_row {
3555 Some(IndentSuggestion {
3556 basis_row: outdent_to_row,
3557 delta: Ordering::Equal,
3558 within_error: within_error && !from_regex,
3559 })
3560 } else if outdent_from_prev_row {
3561 Some(IndentSuggestion {
3562 basis_row: prev_row,
3563 delta: Ordering::Less,
3564 within_error: within_error && !from_regex,
3565 })
3566 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3567 {
3568 Some(IndentSuggestion {
3569 basis_row: prev_row,
3570 delta: Ordering::Equal,
3571 within_error: within_error && !from_regex,
3572 })
3573 } else {
3574 None
3575 };
3576
3577 prev_row = row;
3578 prev_row_start = row_start;
3579 suggestion
3580 }))
3581 }
3582
3583 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3584 while row > 0 {
3585 row -= 1;
3586 if !self.is_line_blank(row) {
3587 return Some(row);
3588 }
3589 }
3590 None
3591 }
3592
3593 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3594 let captures = self.syntax.captures(range, &self.text, |grammar| {
3595 grammar
3596 .highlights_config
3597 .as_ref()
3598 .map(|config| &config.query)
3599 });
3600 let highlight_maps = captures
3601 .grammars()
3602 .iter()
3603 .map(|grammar| grammar.highlight_map())
3604 .collect();
3605 (captures, highlight_maps)
3606 }
3607
3608 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3609 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3610 /// returned in chunks where each chunk has a single syntax highlighting style and
3611 /// diagnostic status.
3612 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3613 let range = range.start.to_offset(self)..range.end.to_offset(self);
3614
3615 let mut syntax = None;
3616 if language_aware {
3617 syntax = Some(self.get_highlights(range.clone()));
3618 }
3619 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3620 let diagnostics = language_aware;
3621 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3622 }
3623
3624 pub fn highlighted_text_for_range<T: ToOffset>(
3625 &self,
3626 range: Range<T>,
3627 override_style: Option<HighlightStyle>,
3628 syntax_theme: &SyntaxTheme,
3629 ) -> HighlightedText {
3630 HighlightedText::from_buffer_range(
3631 range,
3632 &self.text,
3633 &self.syntax,
3634 override_style,
3635 syntax_theme,
3636 )
3637 }
3638
3639 /// Invokes the given callback for each line of text in the given range of the buffer.
3640 /// Uses callback to avoid allocating a string for each line.
3641 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3642 let mut line = String::new();
3643 let mut row = range.start.row;
3644 for chunk in self
3645 .as_rope()
3646 .chunks_in_range(range.to_offset(self))
3647 .chain(["\n"])
3648 {
3649 for (newline_ix, text) in chunk.split('\n').enumerate() {
3650 if newline_ix > 0 {
3651 callback(row, &line);
3652 row += 1;
3653 line.clear();
3654 }
3655 line.push_str(text);
3656 }
3657 }
3658 }
3659
3660 /// Iterates over every [`SyntaxLayer`] in the buffer.
3661 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3662 self.syntax_layers_for_range(0..self.len(), true)
3663 }
3664
3665 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3666 let offset = position.to_offset(self);
3667 self.syntax_layers_for_range(offset..offset, false)
3668 .filter(|l| {
3669 if let Some(ranges) = l.included_sub_ranges {
3670 ranges.iter().any(|range| {
3671 let start = range.start.to_offset(self);
3672 start <= offset && {
3673 let end = range.end.to_offset(self);
3674 offset < end
3675 }
3676 })
3677 } else {
3678 l.node().start_byte() <= offset && l.node().end_byte() > offset
3679 }
3680 })
3681 .last()
3682 }
3683
3684 pub fn syntax_layers_for_range<D: ToOffset>(
3685 &self,
3686 range: Range<D>,
3687 include_hidden: bool,
3688 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3689 self.syntax
3690 .layers_for_range(range, &self.text, include_hidden)
3691 }
3692
3693 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3694 &self,
3695 range: Range<D>,
3696 ) -> Option<SyntaxLayer<'_>> {
3697 let range = range.to_offset(self);
3698 self.syntax
3699 .layers_for_range(range, &self.text, false)
3700 .max_by(|a, b| {
3701 if a.depth != b.depth {
3702 a.depth.cmp(&b.depth)
3703 } else if a.offset.0 != b.offset.0 {
3704 a.offset.0.cmp(&b.offset.0)
3705 } else {
3706 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3707 }
3708 })
3709 }
3710
3711 /// Returns the main [`Language`].
3712 pub fn language(&self) -> Option<&Arc<Language>> {
3713 self.language.as_ref()
3714 }
3715
3716 /// Returns the [`Language`] at the given location.
3717 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3718 self.syntax_layer_at(position)
3719 .map(|info| info.language)
3720 .or(self.language.as_ref())
3721 }
3722
3723 /// Returns the settings for the language at the given location.
3724 pub fn settings_at<'a, D: ToOffset>(
3725 &'a self,
3726 position: D,
3727 cx: &'a App,
3728 ) -> Cow<'a, LanguageSettings> {
3729 language_settings(
3730 self.language_at(position).map(|l| l.name()),
3731 self.file.as_ref(),
3732 cx,
3733 )
3734 }
3735
3736 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3737 CharClassifier::new(self.language_scope_at(point))
3738 }
3739
3740 /// Returns the [`LanguageScope`] at the given location.
3741 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3742 let offset = position.to_offset(self);
3743 let mut scope = None;
3744 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3745
3746 // Use the layer that has the smallest node intersecting the given point.
3747 for layer in self
3748 .syntax
3749 .layers_for_range(offset..offset, &self.text, false)
3750 {
3751 let mut cursor = layer.node().walk();
3752
3753 let mut range = None;
3754 loop {
3755 let child_range = cursor.node().byte_range();
3756 if !child_range.contains(&offset) {
3757 break;
3758 }
3759
3760 range = Some(child_range);
3761 if cursor.goto_first_child_for_byte(offset).is_none() {
3762 break;
3763 }
3764 }
3765
3766 if let Some(range) = range
3767 && smallest_range_and_depth.as_ref().is_none_or(
3768 |(smallest_range, smallest_range_depth)| {
3769 if layer.depth > *smallest_range_depth {
3770 true
3771 } else if layer.depth == *smallest_range_depth {
3772 range.len() < smallest_range.len()
3773 } else {
3774 false
3775 }
3776 },
3777 )
3778 {
3779 smallest_range_and_depth = Some((range, layer.depth));
3780 scope = Some(LanguageScope {
3781 language: layer.language.clone(),
3782 override_id: layer.override_id(offset, &self.text),
3783 });
3784 }
3785 }
3786
3787 scope.or_else(|| {
3788 self.language.clone().map(|language| LanguageScope {
3789 language,
3790 override_id: None,
3791 })
3792 })
3793 }
3794
3795 /// Returns a tuple of the range and character kind of the word
3796 /// surrounding the given position.
3797 pub fn surrounding_word<T: ToOffset>(
3798 &self,
3799 start: T,
3800 scope_context: Option<CharScopeContext>,
3801 ) -> (Range<usize>, Option<CharKind>) {
3802 let mut start = start.to_offset(self);
3803 let mut end = start;
3804 let mut next_chars = self.chars_at(start).take(128).peekable();
3805 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3806
3807 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3808 let word_kind = cmp::max(
3809 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3810 next_chars.peek().copied().map(|c| classifier.kind(c)),
3811 );
3812
3813 for ch in prev_chars {
3814 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3815 start -= ch.len_utf8();
3816 } else {
3817 break;
3818 }
3819 }
3820
3821 for ch in next_chars {
3822 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3823 end += ch.len_utf8();
3824 } else {
3825 break;
3826 }
3827 }
3828
3829 (start..end, word_kind)
3830 }
3831
3832 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3833 /// range. When `require_larger` is true, the node found must be larger than the query range.
3834 ///
3835 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3836 /// be moved to the root of the tree.
3837 fn goto_node_enclosing_range(
3838 cursor: &mut tree_sitter::TreeCursor,
3839 query_range: &Range<usize>,
3840 require_larger: bool,
3841 ) -> bool {
3842 let mut ascending = false;
3843 loop {
3844 let mut range = cursor.node().byte_range();
3845 if query_range.is_empty() {
3846 // When the query range is empty and the current node starts after it, move to the
3847 // previous sibling to find the node the containing node.
3848 if range.start > query_range.start {
3849 cursor.goto_previous_sibling();
3850 range = cursor.node().byte_range();
3851 }
3852 } else {
3853 // When the query range is non-empty and the current node ends exactly at the start,
3854 // move to the next sibling to find a node that extends beyond the start.
3855 if range.end == query_range.start {
3856 cursor.goto_next_sibling();
3857 range = cursor.node().byte_range();
3858 }
3859 }
3860
3861 let encloses = range.contains_inclusive(query_range)
3862 && (!require_larger || range.len() > query_range.len());
3863 if !encloses {
3864 ascending = true;
3865 if !cursor.goto_parent() {
3866 return false;
3867 }
3868 continue;
3869 } else if ascending {
3870 return true;
3871 }
3872
3873 // Descend into the current node.
3874 if cursor
3875 .goto_first_child_for_byte(query_range.start)
3876 .is_none()
3877 {
3878 return true;
3879 }
3880 }
3881 }
3882
3883 pub fn syntax_ancestor<'a, T: ToOffset>(
3884 &'a self,
3885 range: Range<T>,
3886 ) -> Option<tree_sitter::Node<'a>> {
3887 let range = range.start.to_offset(self)..range.end.to_offset(self);
3888 let mut result: Option<tree_sitter::Node<'a>> = None;
3889 for layer in self
3890 .syntax
3891 .layers_for_range(range.clone(), &self.text, true)
3892 {
3893 let mut cursor = layer.node().walk();
3894
3895 // Find the node that both contains the range and is larger than it.
3896 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3897 continue;
3898 }
3899
3900 let left_node = cursor.node();
3901 let mut layer_result = left_node;
3902
3903 // For an empty range, try to find another node immediately to the right of the range.
3904 if left_node.end_byte() == range.start {
3905 let mut right_node = None;
3906 while !cursor.goto_next_sibling() {
3907 if !cursor.goto_parent() {
3908 break;
3909 }
3910 }
3911
3912 while cursor.node().start_byte() == range.start {
3913 right_node = Some(cursor.node());
3914 if !cursor.goto_first_child() {
3915 break;
3916 }
3917 }
3918
3919 // If there is a candidate node on both sides of the (empty) range, then
3920 // decide between the two by favoring a named node over an anonymous token.
3921 // If both nodes are the same in that regard, favor the right one.
3922 if let Some(right_node) = right_node
3923 && (right_node.is_named() || !left_node.is_named())
3924 {
3925 layer_result = right_node;
3926 }
3927 }
3928
3929 if let Some(previous_result) = &result
3930 && previous_result.byte_range().len() < layer_result.byte_range().len()
3931 {
3932 continue;
3933 }
3934 result = Some(layer_result);
3935 }
3936
3937 result
3938 }
3939
3940 /// Find the previous sibling syntax node at the given range.
3941 ///
3942 /// This function locates the syntax node that precedes the node containing
3943 /// the given range. It searches hierarchically by:
3944 /// 1. Finding the node that contains the given range
3945 /// 2. Looking for the previous sibling at the same tree level
3946 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3947 ///
3948 /// Returns `None` if there is no previous sibling at any ancestor level.
3949 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3950 &'a self,
3951 range: Range<T>,
3952 ) -> Option<tree_sitter::Node<'a>> {
3953 let range = range.start.to_offset(self)..range.end.to_offset(self);
3954 let mut result: Option<tree_sitter::Node<'a>> = None;
3955
3956 for layer in self
3957 .syntax
3958 .layers_for_range(range.clone(), &self.text, true)
3959 {
3960 let mut cursor = layer.node().walk();
3961
3962 // Find the node that contains the range
3963 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3964 continue;
3965 }
3966
3967 // Look for the previous sibling, moving up ancestor levels if needed
3968 loop {
3969 if cursor.goto_previous_sibling() {
3970 let layer_result = cursor.node();
3971
3972 if let Some(previous_result) = &result {
3973 if previous_result.byte_range().end < layer_result.byte_range().end {
3974 continue;
3975 }
3976 }
3977 result = Some(layer_result);
3978 break;
3979 }
3980
3981 // No sibling found at this level, try moving up to parent
3982 if !cursor.goto_parent() {
3983 break;
3984 }
3985 }
3986 }
3987
3988 result
3989 }
3990
3991 /// Find the next sibling syntax node at the given range.
3992 ///
3993 /// This function locates the syntax node that follows the node containing
3994 /// the given range. It searches hierarchically by:
3995 /// 1. Finding the node that contains the given range
3996 /// 2. Looking for the next sibling at the same tree level
3997 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3998 ///
3999 /// Returns `None` if there is no next sibling at any ancestor level.
4000 pub fn syntax_next_sibling<'a, T: ToOffset>(
4001 &'a self,
4002 range: Range<T>,
4003 ) -> Option<tree_sitter::Node<'a>> {
4004 let range = range.start.to_offset(self)..range.end.to_offset(self);
4005 let mut result: Option<tree_sitter::Node<'a>> = None;
4006
4007 for layer in self
4008 .syntax
4009 .layers_for_range(range.clone(), &self.text, true)
4010 {
4011 let mut cursor = layer.node().walk();
4012
4013 // Find the node that contains the range
4014 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4015 continue;
4016 }
4017
4018 // Look for the next sibling, moving up ancestor levels if needed
4019 loop {
4020 if cursor.goto_next_sibling() {
4021 let layer_result = cursor.node();
4022
4023 if let Some(previous_result) = &result {
4024 if previous_result.byte_range().start > layer_result.byte_range().start {
4025 continue;
4026 }
4027 }
4028 result = Some(layer_result);
4029 break;
4030 }
4031
4032 // No sibling found at this level, try moving up to parent
4033 if !cursor.goto_parent() {
4034 break;
4035 }
4036 }
4037 }
4038
4039 result
4040 }
4041
4042 /// Returns the root syntax node within the given row
4043 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4044 let start_offset = position.to_offset(self);
4045
4046 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4047
4048 let layer = self
4049 .syntax
4050 .layers_for_range(start_offset..start_offset, &self.text, true)
4051 .next()?;
4052
4053 let mut cursor = layer.node().walk();
4054
4055 // Descend to the first leaf that touches the start of the range.
4056 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4057 if cursor.node().end_byte() == start_offset {
4058 cursor.goto_next_sibling();
4059 }
4060 }
4061
4062 // Ascend to the root node within the same row.
4063 while cursor.goto_parent() {
4064 if cursor.node().start_position().row != row {
4065 break;
4066 }
4067 }
4068
4069 Some(cursor.node())
4070 }
4071
4072 /// Returns the outline for the buffer.
4073 ///
4074 /// This method allows passing an optional [`SyntaxTheme`] to
4075 /// syntax-highlight the returned symbols.
4076 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4077 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4078 }
4079
4080 /// Returns all the symbols that contain the given position.
4081 ///
4082 /// This method allows passing an optional [`SyntaxTheme`] to
4083 /// syntax-highlight the returned symbols.
4084 pub fn symbols_containing<T: ToOffset>(
4085 &self,
4086 position: T,
4087 theme: Option<&SyntaxTheme>,
4088 ) -> Vec<OutlineItem<Anchor>> {
4089 let position = position.to_offset(self);
4090 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4091 let end = self.clip_offset(position + 1, Bias::Right);
4092 let mut items = self.outline_items_containing(start..end, false, theme);
4093 let mut prev_depth = None;
4094 items.retain(|item| {
4095 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4096 prev_depth = Some(item.depth);
4097 result
4098 });
4099 items
4100 }
4101
4102 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4103 let range = range.to_offset(self);
4104 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4105 grammar.outline_config.as_ref().map(|c| &c.query)
4106 });
4107 let configs = matches
4108 .grammars()
4109 .iter()
4110 .map(|g| g.outline_config.as_ref().unwrap())
4111 .collect::<Vec<_>>();
4112
4113 while let Some(mat) = matches.peek() {
4114 let config = &configs[mat.grammar_index];
4115 let containing_item_node = maybe!({
4116 let item_node = mat.captures.iter().find_map(|cap| {
4117 if cap.index == config.item_capture_ix {
4118 Some(cap.node)
4119 } else {
4120 None
4121 }
4122 })?;
4123
4124 let item_byte_range = item_node.byte_range();
4125 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4126 None
4127 } else {
4128 Some(item_node)
4129 }
4130 });
4131
4132 if let Some(item_node) = containing_item_node {
4133 return Some(
4134 Point::from_ts_point(item_node.start_position())
4135 ..Point::from_ts_point(item_node.end_position()),
4136 );
4137 }
4138
4139 matches.advance();
4140 }
4141 None
4142 }
4143
4144 pub fn outline_items_containing<T: ToOffset>(
4145 &self,
4146 range: Range<T>,
4147 include_extra_context: bool,
4148 theme: Option<&SyntaxTheme>,
4149 ) -> Vec<OutlineItem<Anchor>> {
4150 self.outline_items_containing_internal(
4151 range,
4152 include_extra_context,
4153 theme,
4154 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4155 )
4156 }
4157
4158 pub fn outline_items_as_points_containing<T: ToOffset>(
4159 &self,
4160 range: Range<T>,
4161 include_extra_context: bool,
4162 theme: Option<&SyntaxTheme>,
4163 ) -> Vec<OutlineItem<Point>> {
4164 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4165 range
4166 })
4167 }
4168
4169 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4170 &self,
4171 range: Range<T>,
4172 include_extra_context: bool,
4173 theme: Option<&SyntaxTheme>,
4174 ) -> Vec<OutlineItem<usize>> {
4175 self.outline_items_containing_internal(
4176 range,
4177 include_extra_context,
4178 theme,
4179 |buffer, range| range.to_offset(buffer),
4180 )
4181 }
4182
4183 fn outline_items_containing_internal<T: ToOffset, U>(
4184 &self,
4185 range: Range<T>,
4186 include_extra_context: bool,
4187 theme: Option<&SyntaxTheme>,
4188 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4189 ) -> Vec<OutlineItem<U>> {
4190 let range = range.to_offset(self);
4191 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4192 grammar.outline_config.as_ref().map(|c| &c.query)
4193 });
4194
4195 let mut items = Vec::new();
4196 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4197 while let Some(mat) = matches.peek() {
4198 let config = matches.grammars()[mat.grammar_index]
4199 .outline_config
4200 .as_ref()
4201 .unwrap();
4202 if let Some(item) =
4203 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4204 {
4205 items.push(item);
4206 } else if let Some(capture) = mat
4207 .captures
4208 .iter()
4209 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4210 {
4211 let capture_range = capture.node.start_position()..capture.node.end_position();
4212 let mut capture_row_range =
4213 capture_range.start.row as u32..capture_range.end.row as u32;
4214 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4215 {
4216 capture_row_range.end -= 1;
4217 }
4218 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4219 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4220 last_row_range.end = capture_row_range.end;
4221 } else {
4222 annotation_row_ranges.push(capture_row_range);
4223 }
4224 } else {
4225 annotation_row_ranges.push(capture_row_range);
4226 }
4227 }
4228 matches.advance();
4229 }
4230
4231 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4232
4233 // Assign depths based on containment relationships and convert to anchors.
4234 let mut item_ends_stack = Vec::<Point>::new();
4235 let mut anchor_items = Vec::new();
4236 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4237 for item in items {
4238 while let Some(last_end) = item_ends_stack.last().copied() {
4239 if last_end < item.range.end {
4240 item_ends_stack.pop();
4241 } else {
4242 break;
4243 }
4244 }
4245
4246 let mut annotation_row_range = None;
4247 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4248 let row_preceding_item = item.range.start.row.saturating_sub(1);
4249 if next_annotation_row_range.end < row_preceding_item {
4250 annotation_row_ranges.next();
4251 } else {
4252 if next_annotation_row_range.end == row_preceding_item {
4253 annotation_row_range = Some(next_annotation_row_range.clone());
4254 annotation_row_ranges.next();
4255 }
4256 break;
4257 }
4258 }
4259
4260 anchor_items.push(OutlineItem {
4261 depth: item_ends_stack.len(),
4262 range: range_callback(self, item.range.clone()),
4263 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4264 text: item.text,
4265 highlight_ranges: item.highlight_ranges,
4266 name_ranges: item.name_ranges,
4267 body_range: item.body_range.map(|r| range_callback(self, r)),
4268 annotation_range: annotation_row_range.map(|annotation_range| {
4269 let point_range = Point::new(annotation_range.start, 0)
4270 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4271 range_callback(self, point_range)
4272 }),
4273 });
4274 item_ends_stack.push(item.range.end);
4275 }
4276
4277 anchor_items
4278 }
4279
4280 fn next_outline_item(
4281 &self,
4282 config: &OutlineConfig,
4283 mat: &SyntaxMapMatch,
4284 range: &Range<usize>,
4285 include_extra_context: bool,
4286 theme: Option<&SyntaxTheme>,
4287 ) -> Option<OutlineItem<Point>> {
4288 let item_node = mat.captures.iter().find_map(|cap| {
4289 if cap.index == config.item_capture_ix {
4290 Some(cap.node)
4291 } else {
4292 None
4293 }
4294 })?;
4295
4296 let item_byte_range = item_node.byte_range();
4297 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4298 return None;
4299 }
4300 let item_point_range = Point::from_ts_point(item_node.start_position())
4301 ..Point::from_ts_point(item_node.end_position());
4302
4303 let mut open_point = None;
4304 let mut close_point = None;
4305
4306 let mut buffer_ranges = Vec::new();
4307 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4308 let mut range = node.start_byte()..node.end_byte();
4309 let start = node.start_position();
4310 if node.end_position().row > start.row {
4311 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4312 }
4313
4314 if !range.is_empty() {
4315 buffer_ranges.push((range, node_is_name));
4316 }
4317 };
4318
4319 for capture in mat.captures {
4320 if capture.index == config.name_capture_ix {
4321 add_to_buffer_ranges(capture.node, true);
4322 } else if Some(capture.index) == config.context_capture_ix
4323 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4324 {
4325 add_to_buffer_ranges(capture.node, false);
4326 } else {
4327 if Some(capture.index) == config.open_capture_ix {
4328 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4329 } else if Some(capture.index) == config.close_capture_ix {
4330 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4331 }
4332 }
4333 }
4334
4335 if buffer_ranges.is_empty() {
4336 return None;
4337 }
4338 let source_range_for_text =
4339 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4340
4341 let mut text = String::new();
4342 let mut highlight_ranges = Vec::new();
4343 let mut name_ranges = Vec::new();
4344 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4345 let mut last_buffer_range_end = 0;
4346 for (buffer_range, is_name) in buffer_ranges {
4347 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4348 if space_added {
4349 text.push(' ');
4350 }
4351 let before_append_len = text.len();
4352 let mut offset = buffer_range.start;
4353 chunks.seek(buffer_range.clone());
4354 for mut chunk in chunks.by_ref() {
4355 if chunk.text.len() > buffer_range.end - offset {
4356 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4357 offset = buffer_range.end;
4358 } else {
4359 offset += chunk.text.len();
4360 }
4361 let style = chunk
4362 .syntax_highlight_id
4363 .zip(theme)
4364 .and_then(|(highlight, theme)| highlight.style(theme));
4365 if let Some(style) = style {
4366 let start = text.len();
4367 let end = start + chunk.text.len();
4368 highlight_ranges.push((start..end, style));
4369 }
4370 text.push_str(chunk.text);
4371 if offset >= buffer_range.end {
4372 break;
4373 }
4374 }
4375 if is_name {
4376 let after_append_len = text.len();
4377 let start = if space_added && !name_ranges.is_empty() {
4378 before_append_len - 1
4379 } else {
4380 before_append_len
4381 };
4382 name_ranges.push(start..after_append_len);
4383 }
4384 last_buffer_range_end = buffer_range.end;
4385 }
4386
4387 Some(OutlineItem {
4388 depth: 0, // We'll calculate the depth later
4389 range: item_point_range,
4390 source_range_for_text: source_range_for_text.to_point(self),
4391 text,
4392 highlight_ranges,
4393 name_ranges,
4394 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4395 annotation_range: None,
4396 })
4397 }
4398
4399 pub fn function_body_fold_ranges<T: ToOffset>(
4400 &self,
4401 within: Range<T>,
4402 ) -> impl Iterator<Item = Range<usize>> + '_ {
4403 self.text_object_ranges(within, TreeSitterOptions::default())
4404 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4405 }
4406
4407 /// For each grammar in the language, runs the provided
4408 /// [`tree_sitter::Query`] against the given range.
4409 pub fn matches(
4410 &self,
4411 range: Range<usize>,
4412 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4413 ) -> SyntaxMapMatches<'_> {
4414 self.syntax.matches(range, self, query)
4415 }
4416
4417 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4418 /// Hence, may return more bracket pairs than the range contains.
4419 ///
4420 /// Will omit known chunks.
4421 /// The resulting bracket match collections are not ordered.
4422 pub fn fetch_bracket_ranges(
4423 &self,
4424 range: Range<usize>,
4425 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4426 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4427 let mut all_bracket_matches = HashMap::default();
4428
4429 for chunk in self
4430 .tree_sitter_data
4431 .chunks
4432 .applicable_chunks(&[range.to_point(self)])
4433 {
4434 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4435 continue;
4436 }
4437 let chunk_range = chunk.anchor_range();
4438 let chunk_range = chunk_range.to_offset(&self);
4439
4440 if let Some(cached_brackets) =
4441 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4442 {
4443 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4444 continue;
4445 }
4446
4447 let mut all_brackets = Vec::new();
4448 let mut opens = Vec::new();
4449 let mut color_pairs = Vec::new();
4450
4451 let mut matches = self.syntax.matches_with_options(
4452 chunk_range.clone(),
4453 &self.text,
4454 TreeSitterOptions {
4455 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4456 max_start_depth: None,
4457 },
4458 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4459 );
4460 let configs = matches
4461 .grammars()
4462 .iter()
4463 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4464 .collect::<Vec<_>>();
4465
4466 while let Some(mat) = matches.peek() {
4467 let mut open = None;
4468 let mut close = None;
4469 let syntax_layer_depth = mat.depth;
4470 let config = configs[mat.grammar_index];
4471 let pattern = &config.patterns[mat.pattern_index];
4472 for capture in mat.captures {
4473 if capture.index == config.open_capture_ix {
4474 open = Some(capture.node.byte_range());
4475 } else if capture.index == config.close_capture_ix {
4476 close = Some(capture.node.byte_range());
4477 }
4478 }
4479
4480 matches.advance();
4481
4482 let Some((open_range, close_range)) = open.zip(close) else {
4483 continue;
4484 };
4485
4486 let bracket_range = open_range.start..=close_range.end;
4487 if !bracket_range.overlaps(&chunk_range) {
4488 continue;
4489 }
4490
4491 let index = all_brackets.len();
4492 all_brackets.push(BracketMatch {
4493 open_range: open_range.clone(),
4494 close_range: close_range.clone(),
4495 newline_only: pattern.newline_only,
4496 syntax_layer_depth,
4497 color_index: None,
4498 });
4499
4500 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4501 // bracket will match the entire tag with all text inside.
4502 // For now, avoid highlighting any pair that has more than single char in each bracket.
4503 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4504 let should_color =
4505 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4506 if should_color {
4507 opens.push(open_range.clone());
4508 color_pairs.push((open_range, close_range, index));
4509 }
4510 }
4511
4512 opens.sort_by_key(|r| (r.start, r.end));
4513 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4514 color_pairs.sort_by_key(|(_, close, _)| close.end);
4515
4516 let mut open_stack = Vec::new();
4517 let mut open_index = 0;
4518 for (open, close, index) in color_pairs {
4519 while open_index < opens.len() && opens[open_index].start < close.start {
4520 open_stack.push(opens[open_index].clone());
4521 open_index += 1;
4522 }
4523
4524 if open_stack.last() == Some(&open) {
4525 let depth_index = open_stack.len() - 1;
4526 all_brackets[index].color_index = Some(depth_index);
4527 open_stack.pop();
4528 }
4529 }
4530
4531 all_brackets.sort_by_key(|bracket_match| {
4532 (bracket_match.open_range.start, bracket_match.open_range.end)
4533 });
4534
4535 if let empty_slot @ None =
4536 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4537 {
4538 *empty_slot = Some(all_brackets.clone());
4539 }
4540 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4541 }
4542
4543 all_bracket_matches
4544 }
4545
4546 pub fn all_bracket_ranges(
4547 &self,
4548 range: Range<usize>,
4549 ) -> impl Iterator<Item = BracketMatch<usize>> {
4550 self.fetch_bracket_ranges(range.clone(), None)
4551 .into_values()
4552 .flatten()
4553 .filter(move |bracket_match| {
4554 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4555 bracket_range.overlaps(&range)
4556 })
4557 }
4558
4559 /// Returns bracket range pairs overlapping or adjacent to `range`
4560 pub fn bracket_ranges<T: ToOffset>(
4561 &self,
4562 range: Range<T>,
4563 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4564 // Find bracket pairs that *inclusively* contain the given range.
4565 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4566 self.all_bracket_ranges(range)
4567 .filter(|pair| !pair.newline_only)
4568 }
4569
4570 pub fn debug_variables_query<T: ToOffset>(
4571 &self,
4572 range: Range<T>,
4573 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4574 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4575
4576 let mut matches = self.syntax.matches_with_options(
4577 range.clone(),
4578 &self.text,
4579 TreeSitterOptions::default(),
4580 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4581 );
4582
4583 let configs = matches
4584 .grammars()
4585 .iter()
4586 .map(|grammar| grammar.debug_variables_config.as_ref())
4587 .collect::<Vec<_>>();
4588
4589 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4590
4591 iter::from_fn(move || {
4592 loop {
4593 while let Some(capture) = captures.pop() {
4594 if capture.0.overlaps(&range) {
4595 return Some(capture);
4596 }
4597 }
4598
4599 let mat = matches.peek()?;
4600
4601 let Some(config) = configs[mat.grammar_index].as_ref() else {
4602 matches.advance();
4603 continue;
4604 };
4605
4606 for capture in mat.captures {
4607 let Some(ix) = config
4608 .objects_by_capture_ix
4609 .binary_search_by_key(&capture.index, |e| e.0)
4610 .ok()
4611 else {
4612 continue;
4613 };
4614 let text_object = config.objects_by_capture_ix[ix].1;
4615 let byte_range = capture.node.byte_range();
4616
4617 let mut found = false;
4618 for (range, existing) in captures.iter_mut() {
4619 if existing == &text_object {
4620 range.start = range.start.min(byte_range.start);
4621 range.end = range.end.max(byte_range.end);
4622 found = true;
4623 break;
4624 }
4625 }
4626
4627 if !found {
4628 captures.push((byte_range, text_object));
4629 }
4630 }
4631
4632 matches.advance();
4633 }
4634 })
4635 }
4636
4637 pub fn text_object_ranges<T: ToOffset>(
4638 &self,
4639 range: Range<T>,
4640 options: TreeSitterOptions,
4641 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4642 let range =
4643 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4644
4645 let mut matches =
4646 self.syntax
4647 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4648 grammar.text_object_config.as_ref().map(|c| &c.query)
4649 });
4650
4651 let configs = matches
4652 .grammars()
4653 .iter()
4654 .map(|grammar| grammar.text_object_config.as_ref())
4655 .collect::<Vec<_>>();
4656
4657 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4658
4659 iter::from_fn(move || {
4660 loop {
4661 while let Some(capture) = captures.pop() {
4662 if capture.0.overlaps(&range) {
4663 return Some(capture);
4664 }
4665 }
4666
4667 let mat = matches.peek()?;
4668
4669 let Some(config) = configs[mat.grammar_index].as_ref() else {
4670 matches.advance();
4671 continue;
4672 };
4673
4674 for capture in mat.captures {
4675 let Some(ix) = config
4676 .text_objects_by_capture_ix
4677 .binary_search_by_key(&capture.index, |e| e.0)
4678 .ok()
4679 else {
4680 continue;
4681 };
4682 let text_object = config.text_objects_by_capture_ix[ix].1;
4683 let byte_range = capture.node.byte_range();
4684
4685 let mut found = false;
4686 for (range, existing) in captures.iter_mut() {
4687 if existing == &text_object {
4688 range.start = range.start.min(byte_range.start);
4689 range.end = range.end.max(byte_range.end);
4690 found = true;
4691 break;
4692 }
4693 }
4694
4695 if !found {
4696 captures.push((byte_range, text_object));
4697 }
4698 }
4699
4700 matches.advance();
4701 }
4702 })
4703 }
4704
4705 /// Returns enclosing bracket ranges containing the given range
4706 pub fn enclosing_bracket_ranges<T: ToOffset>(
4707 &self,
4708 range: Range<T>,
4709 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4710 let range = range.start.to_offset(self)..range.end.to_offset(self);
4711
4712 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4713 let max_depth = result
4714 .iter()
4715 .map(|mat| mat.syntax_layer_depth)
4716 .max()
4717 .unwrap_or(0);
4718 result.into_iter().filter(move |pair| {
4719 pair.open_range.start <= range.start
4720 && pair.close_range.end >= range.end
4721 && pair.syntax_layer_depth == max_depth
4722 })
4723 }
4724
4725 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4726 ///
4727 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4728 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4729 &self,
4730 range: Range<T>,
4731 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4732 ) -> Option<(Range<usize>, Range<usize>)> {
4733 let range = range.start.to_offset(self)..range.end.to_offset(self);
4734
4735 // Get the ranges of the innermost pair of brackets.
4736 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4737
4738 for pair in self.enclosing_bracket_ranges(range) {
4739 if let Some(range_filter) = range_filter
4740 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4741 {
4742 continue;
4743 }
4744
4745 let len = pair.close_range.end - pair.open_range.start;
4746
4747 if let Some((existing_open, existing_close)) = &result {
4748 let existing_len = existing_close.end - existing_open.start;
4749 if len > existing_len {
4750 continue;
4751 }
4752 }
4753
4754 result = Some((pair.open_range, pair.close_range));
4755 }
4756
4757 result
4758 }
4759
4760 /// Returns anchor ranges for any matches of the redaction query.
4761 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4762 /// will be run on the relevant section of the buffer.
4763 pub fn redacted_ranges<T: ToOffset>(
4764 &self,
4765 range: Range<T>,
4766 ) -> impl Iterator<Item = Range<usize>> + '_ {
4767 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4768 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4769 grammar
4770 .redactions_config
4771 .as_ref()
4772 .map(|config| &config.query)
4773 });
4774
4775 let configs = syntax_matches
4776 .grammars()
4777 .iter()
4778 .map(|grammar| grammar.redactions_config.as_ref())
4779 .collect::<Vec<_>>();
4780
4781 iter::from_fn(move || {
4782 let redacted_range = syntax_matches
4783 .peek()
4784 .and_then(|mat| {
4785 configs[mat.grammar_index].and_then(|config| {
4786 mat.captures
4787 .iter()
4788 .find(|capture| capture.index == config.redaction_capture_ix)
4789 })
4790 })
4791 .map(|mat| mat.node.byte_range());
4792 syntax_matches.advance();
4793 redacted_range
4794 })
4795 }
4796
4797 pub fn injections_intersecting_range<T: ToOffset>(
4798 &self,
4799 range: Range<T>,
4800 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4801 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4802
4803 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4804 grammar
4805 .injection_config
4806 .as_ref()
4807 .map(|config| &config.query)
4808 });
4809
4810 let configs = syntax_matches
4811 .grammars()
4812 .iter()
4813 .map(|grammar| grammar.injection_config.as_ref())
4814 .collect::<Vec<_>>();
4815
4816 iter::from_fn(move || {
4817 let ranges = syntax_matches.peek().and_then(|mat| {
4818 let config = &configs[mat.grammar_index]?;
4819 let content_capture_range = mat.captures.iter().find_map(|capture| {
4820 if capture.index == config.content_capture_ix {
4821 Some(capture.node.byte_range())
4822 } else {
4823 None
4824 }
4825 })?;
4826 let language = self.language_at(content_capture_range.start)?;
4827 Some((content_capture_range, language))
4828 });
4829 syntax_matches.advance();
4830 ranges
4831 })
4832 }
4833
4834 pub fn runnable_ranges(
4835 &self,
4836 offset_range: Range<usize>,
4837 ) -> impl Iterator<Item = RunnableRange> + '_ {
4838 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4839 grammar.runnable_config.as_ref().map(|config| &config.query)
4840 });
4841
4842 let test_configs = syntax_matches
4843 .grammars()
4844 .iter()
4845 .map(|grammar| grammar.runnable_config.as_ref())
4846 .collect::<Vec<_>>();
4847
4848 iter::from_fn(move || {
4849 loop {
4850 let mat = syntax_matches.peek()?;
4851
4852 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4853 let mut run_range = None;
4854 let full_range = mat.captures.iter().fold(
4855 Range {
4856 start: usize::MAX,
4857 end: 0,
4858 },
4859 |mut acc, next| {
4860 let byte_range = next.node.byte_range();
4861 if acc.start > byte_range.start {
4862 acc.start = byte_range.start;
4863 }
4864 if acc.end < byte_range.end {
4865 acc.end = byte_range.end;
4866 }
4867 acc
4868 },
4869 );
4870 if full_range.start > full_range.end {
4871 // We did not find a full spanning range of this match.
4872 return None;
4873 }
4874 let extra_captures: SmallVec<[_; 1]> =
4875 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4876 test_configs
4877 .extra_captures
4878 .get(capture.index as usize)
4879 .cloned()
4880 .and_then(|tag_name| match tag_name {
4881 RunnableCapture::Named(name) => {
4882 Some((capture.node.byte_range(), name))
4883 }
4884 RunnableCapture::Run => {
4885 let _ = run_range.insert(capture.node.byte_range());
4886 None
4887 }
4888 })
4889 }));
4890 let run_range = run_range?;
4891 let tags = test_configs
4892 .query
4893 .property_settings(mat.pattern_index)
4894 .iter()
4895 .filter_map(|property| {
4896 if *property.key == *"tag" {
4897 property
4898 .value
4899 .as_ref()
4900 .map(|value| RunnableTag(value.to_string().into()))
4901 } else {
4902 None
4903 }
4904 })
4905 .collect();
4906 let extra_captures = extra_captures
4907 .into_iter()
4908 .map(|(range, name)| {
4909 (
4910 name.to_string(),
4911 self.text_for_range(range).collect::<String>(),
4912 )
4913 })
4914 .collect();
4915 // All tags should have the same range.
4916 Some(RunnableRange {
4917 run_range,
4918 full_range,
4919 runnable: Runnable {
4920 tags,
4921 language: mat.language,
4922 buffer: self.remote_id(),
4923 },
4924 extra_captures,
4925 buffer_id: self.remote_id(),
4926 })
4927 });
4928
4929 syntax_matches.advance();
4930 if test_range.is_some() {
4931 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4932 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4933 return test_range;
4934 }
4935 }
4936 })
4937 }
4938
4939 /// Returns selections for remote peers intersecting the given range.
4940 #[allow(clippy::type_complexity)]
4941 pub fn selections_in_range(
4942 &self,
4943 range: Range<Anchor>,
4944 include_local: bool,
4945 ) -> impl Iterator<
4946 Item = (
4947 ReplicaId,
4948 bool,
4949 CursorShape,
4950 impl Iterator<Item = &Selection<Anchor>> + '_,
4951 ),
4952 > + '_ {
4953 self.remote_selections
4954 .iter()
4955 .filter(move |(replica_id, set)| {
4956 (include_local || **replica_id != self.text.replica_id())
4957 && !set.selections.is_empty()
4958 })
4959 .map(move |(replica_id, set)| {
4960 let start_ix = match set.selections.binary_search_by(|probe| {
4961 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4962 }) {
4963 Ok(ix) | Err(ix) => ix,
4964 };
4965 let end_ix = match set.selections.binary_search_by(|probe| {
4966 probe.start.cmp(&range.end, self).then(Ordering::Less)
4967 }) {
4968 Ok(ix) | Err(ix) => ix,
4969 };
4970
4971 (
4972 *replica_id,
4973 set.line_mode,
4974 set.cursor_shape,
4975 set.selections[start_ix..end_ix].iter(),
4976 )
4977 })
4978 }
4979
4980 /// Returns if the buffer contains any diagnostics.
4981 pub fn has_diagnostics(&self) -> bool {
4982 !self.diagnostics.is_empty()
4983 }
4984
4985 /// Returns all the diagnostics intersecting the given range.
4986 pub fn diagnostics_in_range<'a, T, O>(
4987 &'a self,
4988 search_range: Range<T>,
4989 reversed: bool,
4990 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4991 where
4992 T: 'a + Clone + ToOffset,
4993 O: 'a + FromAnchor,
4994 {
4995 let mut iterators: Vec<_> = self
4996 .diagnostics
4997 .iter()
4998 .map(|(_, collection)| {
4999 collection
5000 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5001 .peekable()
5002 })
5003 .collect();
5004
5005 std::iter::from_fn(move || {
5006 let (next_ix, _) = iterators
5007 .iter_mut()
5008 .enumerate()
5009 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5010 .min_by(|(_, a), (_, b)| {
5011 let cmp = a
5012 .range
5013 .start
5014 .cmp(&b.range.start, self)
5015 // when range is equal, sort by diagnostic severity
5016 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5017 // and stabilize order with group_id
5018 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5019 if reversed { cmp.reverse() } else { cmp }
5020 })?;
5021 iterators[next_ix]
5022 .next()
5023 .map(
5024 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5025 diagnostic,
5026 range: FromAnchor::from_anchor(&range.start, self)
5027 ..FromAnchor::from_anchor(&range.end, self),
5028 },
5029 )
5030 })
5031 }
5032
5033 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5034 /// should be used instead.
5035 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5036 &self.diagnostics
5037 }
5038
5039 /// Returns all the diagnostic groups associated with the given
5040 /// language server ID. If no language server ID is provided,
5041 /// all diagnostics groups are returned.
5042 pub fn diagnostic_groups(
5043 &self,
5044 language_server_id: Option<LanguageServerId>,
5045 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5046 let mut groups = Vec::new();
5047
5048 if let Some(language_server_id) = language_server_id {
5049 if let Ok(ix) = self
5050 .diagnostics
5051 .binary_search_by_key(&language_server_id, |e| e.0)
5052 {
5053 self.diagnostics[ix]
5054 .1
5055 .groups(language_server_id, &mut groups, self);
5056 }
5057 } else {
5058 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5059 diagnostics.groups(*language_server_id, &mut groups, self);
5060 }
5061 }
5062
5063 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5064 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5065 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5066 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5067 });
5068
5069 groups
5070 }
5071
5072 /// Returns an iterator over the diagnostics for the given group.
5073 pub fn diagnostic_group<O>(
5074 &self,
5075 group_id: usize,
5076 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5077 where
5078 O: FromAnchor + 'static,
5079 {
5080 self.diagnostics
5081 .iter()
5082 .flat_map(move |(_, set)| set.group(group_id, self))
5083 }
5084
5085 /// An integer version number that accounts for all updates besides
5086 /// the buffer's text itself (which is versioned via a version vector).
5087 pub fn non_text_state_update_count(&self) -> usize {
5088 self.non_text_state_update_count
5089 }
5090
5091 /// An integer version that changes when the buffer's syntax changes.
5092 pub fn syntax_update_count(&self) -> usize {
5093 self.syntax.update_count()
5094 }
5095
5096 /// Returns a snapshot of underlying file.
5097 pub fn file(&self) -> Option<&Arc<dyn File>> {
5098 self.file.as_ref()
5099 }
5100
5101 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5102 if let Some(file) = self.file() {
5103 if file.path().file_name().is_none() || include_root {
5104 Some(file.full_path(cx).to_string_lossy().into_owned())
5105 } else {
5106 Some(file.path().display(file.path_style(cx)).to_string())
5107 }
5108 } else {
5109 None
5110 }
5111 }
5112
5113 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5114 let query_str = query.fuzzy_contents;
5115 if query_str.is_some_and(|query| query.is_empty()) {
5116 return BTreeMap::default();
5117 }
5118
5119 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5120 language,
5121 override_id: None,
5122 }));
5123
5124 let mut query_ix = 0;
5125 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5126 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5127
5128 let mut words = BTreeMap::default();
5129 let mut current_word_start_ix = None;
5130 let mut chunk_ix = query.range.start;
5131 for chunk in self.chunks(query.range, false) {
5132 for (i, c) in chunk.text.char_indices() {
5133 let ix = chunk_ix + i;
5134 if classifier.is_word(c) {
5135 if current_word_start_ix.is_none() {
5136 current_word_start_ix = Some(ix);
5137 }
5138
5139 if let Some(query_chars) = &query_chars
5140 && query_ix < query_len
5141 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5142 {
5143 query_ix += 1;
5144 }
5145 continue;
5146 } else if let Some(word_start) = current_word_start_ix.take()
5147 && query_ix == query_len
5148 {
5149 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5150 let mut word_text = self.text_for_range(word_start..ix).peekable();
5151 let first_char = word_text
5152 .peek()
5153 .and_then(|first_chunk| first_chunk.chars().next());
5154 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5155 if !query.skip_digits
5156 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5157 {
5158 words.insert(word_text.collect(), word_range);
5159 }
5160 }
5161 query_ix = 0;
5162 }
5163 chunk_ix += chunk.text.len();
5164 }
5165
5166 words
5167 }
5168}
5169
5170pub struct WordsQuery<'a> {
5171 /// Only returns words with all chars from the fuzzy string in them.
5172 pub fuzzy_contents: Option<&'a str>,
5173 /// Skips words that start with a digit.
5174 pub skip_digits: bool,
5175 /// Buffer offset range, to look for words.
5176 pub range: Range<usize>,
5177}
5178
5179fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5180 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5181}
5182
5183fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5184 let mut result = IndentSize::spaces(0);
5185 for c in text {
5186 let kind = match c {
5187 ' ' => IndentKind::Space,
5188 '\t' => IndentKind::Tab,
5189 _ => break,
5190 };
5191 if result.len == 0 {
5192 result.kind = kind;
5193 }
5194 result.len += 1;
5195 }
5196 result
5197}
5198
5199impl Clone for BufferSnapshot {
5200 fn clone(&self) -> Self {
5201 Self {
5202 text: self.text.clone(),
5203 syntax: self.syntax.clone(),
5204 file: self.file.clone(),
5205 remote_selections: self.remote_selections.clone(),
5206 diagnostics: self.diagnostics.clone(),
5207 language: self.language.clone(),
5208 tree_sitter_data: self.tree_sitter_data.clone(),
5209 non_text_state_update_count: self.non_text_state_update_count,
5210 capability: self.capability,
5211 }
5212 }
5213}
5214
5215impl Deref for BufferSnapshot {
5216 type Target = text::BufferSnapshot;
5217
5218 fn deref(&self) -> &Self::Target {
5219 &self.text
5220 }
5221}
5222
5223unsafe impl Send for BufferChunks<'_> {}
5224
5225impl<'a> BufferChunks<'a> {
5226 pub(crate) fn new(
5227 text: &'a Rope,
5228 range: Range<usize>,
5229 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5230 diagnostics: bool,
5231 buffer_snapshot: Option<&'a BufferSnapshot>,
5232 ) -> Self {
5233 let mut highlights = None;
5234 if let Some((captures, highlight_maps)) = syntax {
5235 highlights = Some(BufferChunkHighlights {
5236 captures,
5237 next_capture: None,
5238 stack: Default::default(),
5239 highlight_maps,
5240 })
5241 }
5242
5243 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5244 let chunks = text.chunks_in_range(range.clone());
5245
5246 let mut this = BufferChunks {
5247 range,
5248 buffer_snapshot,
5249 chunks,
5250 diagnostic_endpoints,
5251 error_depth: 0,
5252 warning_depth: 0,
5253 information_depth: 0,
5254 hint_depth: 0,
5255 unnecessary_depth: 0,
5256 underline: true,
5257 highlights,
5258 };
5259 this.initialize_diagnostic_endpoints();
5260 this
5261 }
5262
5263 /// Seeks to the given byte offset in the buffer.
5264 pub fn seek(&mut self, range: Range<usize>) {
5265 let old_range = std::mem::replace(&mut self.range, range.clone());
5266 self.chunks.set_range(self.range.clone());
5267 if let Some(highlights) = self.highlights.as_mut() {
5268 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5269 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5270 highlights
5271 .stack
5272 .retain(|(end_offset, _)| *end_offset > range.start);
5273 if let Some(capture) = &highlights.next_capture
5274 && range.start >= capture.node.start_byte()
5275 {
5276 let next_capture_end = capture.node.end_byte();
5277 if range.start < next_capture_end {
5278 highlights.stack.push((
5279 next_capture_end,
5280 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5281 ));
5282 }
5283 highlights.next_capture.take();
5284 }
5285 } else if let Some(snapshot) = self.buffer_snapshot {
5286 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5287 *highlights = BufferChunkHighlights {
5288 captures,
5289 next_capture: None,
5290 stack: Default::default(),
5291 highlight_maps,
5292 };
5293 } else {
5294 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5295 // Seeking such BufferChunks is not supported.
5296 debug_assert!(
5297 false,
5298 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5299 );
5300 }
5301
5302 highlights.captures.set_byte_range(self.range.clone());
5303 self.initialize_diagnostic_endpoints();
5304 }
5305 }
5306
5307 fn initialize_diagnostic_endpoints(&mut self) {
5308 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5309 && let Some(buffer) = self.buffer_snapshot
5310 {
5311 let mut diagnostic_endpoints = Vec::new();
5312 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5313 diagnostic_endpoints.push(DiagnosticEndpoint {
5314 offset: entry.range.start,
5315 is_start: true,
5316 severity: entry.diagnostic.severity,
5317 is_unnecessary: entry.diagnostic.is_unnecessary,
5318 underline: entry.diagnostic.underline,
5319 });
5320 diagnostic_endpoints.push(DiagnosticEndpoint {
5321 offset: entry.range.end,
5322 is_start: false,
5323 severity: entry.diagnostic.severity,
5324 is_unnecessary: entry.diagnostic.is_unnecessary,
5325 underline: entry.diagnostic.underline,
5326 });
5327 }
5328 diagnostic_endpoints
5329 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5330 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5331 self.hint_depth = 0;
5332 self.error_depth = 0;
5333 self.warning_depth = 0;
5334 self.information_depth = 0;
5335 }
5336 }
5337
5338 /// The current byte offset in the buffer.
5339 pub fn offset(&self) -> usize {
5340 self.range.start
5341 }
5342
5343 pub fn range(&self) -> Range<usize> {
5344 self.range.clone()
5345 }
5346
5347 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5348 let depth = match endpoint.severity {
5349 DiagnosticSeverity::ERROR => &mut self.error_depth,
5350 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5351 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5352 DiagnosticSeverity::HINT => &mut self.hint_depth,
5353 _ => return,
5354 };
5355 if endpoint.is_start {
5356 *depth += 1;
5357 } else {
5358 *depth -= 1;
5359 }
5360
5361 if endpoint.is_unnecessary {
5362 if endpoint.is_start {
5363 self.unnecessary_depth += 1;
5364 } else {
5365 self.unnecessary_depth -= 1;
5366 }
5367 }
5368 }
5369
5370 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5371 if self.error_depth > 0 {
5372 Some(DiagnosticSeverity::ERROR)
5373 } else if self.warning_depth > 0 {
5374 Some(DiagnosticSeverity::WARNING)
5375 } else if self.information_depth > 0 {
5376 Some(DiagnosticSeverity::INFORMATION)
5377 } else if self.hint_depth > 0 {
5378 Some(DiagnosticSeverity::HINT)
5379 } else {
5380 None
5381 }
5382 }
5383
5384 fn current_code_is_unnecessary(&self) -> bool {
5385 self.unnecessary_depth > 0
5386 }
5387}
5388
5389impl<'a> Iterator for BufferChunks<'a> {
5390 type Item = Chunk<'a>;
5391
5392 fn next(&mut self) -> Option<Self::Item> {
5393 let mut next_capture_start = usize::MAX;
5394 let mut next_diagnostic_endpoint = usize::MAX;
5395
5396 if let Some(highlights) = self.highlights.as_mut() {
5397 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5398 if *parent_capture_end <= self.range.start {
5399 highlights.stack.pop();
5400 } else {
5401 break;
5402 }
5403 }
5404
5405 if highlights.next_capture.is_none() {
5406 highlights.next_capture = highlights.captures.next();
5407 }
5408
5409 while let Some(capture) = highlights.next_capture.as_ref() {
5410 if self.range.start < capture.node.start_byte() {
5411 next_capture_start = capture.node.start_byte();
5412 break;
5413 } else {
5414 let highlight_id =
5415 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5416 highlights
5417 .stack
5418 .push((capture.node.end_byte(), highlight_id));
5419 highlights.next_capture = highlights.captures.next();
5420 }
5421 }
5422 }
5423
5424 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5425 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5426 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5427 if endpoint.offset <= self.range.start {
5428 self.update_diagnostic_depths(endpoint);
5429 diagnostic_endpoints.next();
5430 self.underline = endpoint.underline;
5431 } else {
5432 next_diagnostic_endpoint = endpoint.offset;
5433 break;
5434 }
5435 }
5436 }
5437 self.diagnostic_endpoints = diagnostic_endpoints;
5438
5439 if let Some(ChunkBitmaps {
5440 text: chunk,
5441 chars: chars_map,
5442 tabs,
5443 }) = self.chunks.peek_with_bitmaps()
5444 {
5445 let chunk_start = self.range.start;
5446 let mut chunk_end = (self.chunks.offset() + chunk.len())
5447 .min(next_capture_start)
5448 .min(next_diagnostic_endpoint);
5449 let mut highlight_id = None;
5450 if let Some(highlights) = self.highlights.as_ref()
5451 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5452 {
5453 chunk_end = chunk_end.min(*parent_capture_end);
5454 highlight_id = Some(*parent_highlight_id);
5455 }
5456 let bit_start = chunk_start - self.chunks.offset();
5457 let bit_end = chunk_end - self.chunks.offset();
5458
5459 let slice = &chunk[bit_start..bit_end];
5460
5461 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5462 let tabs = (tabs >> bit_start) & mask;
5463 let chars = (chars_map >> bit_start) & mask;
5464
5465 self.range.start = chunk_end;
5466 if self.range.start == self.chunks.offset() + chunk.len() {
5467 self.chunks.next().unwrap();
5468 }
5469
5470 Some(Chunk {
5471 text: slice,
5472 syntax_highlight_id: highlight_id,
5473 underline: self.underline,
5474 diagnostic_severity: self.current_diagnostic_severity(),
5475 is_unnecessary: self.current_code_is_unnecessary(),
5476 tabs,
5477 chars,
5478 ..Chunk::default()
5479 })
5480 } else {
5481 None
5482 }
5483 }
5484}
5485
5486impl operation_queue::Operation for Operation {
5487 fn lamport_timestamp(&self) -> clock::Lamport {
5488 match self {
5489 Operation::Buffer(_) => {
5490 unreachable!("buffer operations should never be deferred at this layer")
5491 }
5492 Operation::UpdateDiagnostics {
5493 lamport_timestamp, ..
5494 }
5495 | Operation::UpdateSelections {
5496 lamport_timestamp, ..
5497 }
5498 | Operation::UpdateCompletionTriggers {
5499 lamport_timestamp, ..
5500 }
5501 | Operation::UpdateLineEnding {
5502 lamport_timestamp, ..
5503 } => *lamport_timestamp,
5504 }
5505 }
5506}
5507
5508impl Default for Diagnostic {
5509 fn default() -> Self {
5510 Self {
5511 source: Default::default(),
5512 source_kind: DiagnosticSourceKind::Other,
5513 code: None,
5514 code_description: None,
5515 severity: DiagnosticSeverity::ERROR,
5516 message: Default::default(),
5517 markdown: None,
5518 group_id: 0,
5519 is_primary: false,
5520 is_disk_based: false,
5521 is_unnecessary: false,
5522 underline: true,
5523 data: None,
5524 registration_id: None,
5525 }
5526 }
5527}
5528
5529impl IndentSize {
5530 /// Returns an [`IndentSize`] representing the given spaces.
5531 pub fn spaces(len: u32) -> Self {
5532 Self {
5533 len,
5534 kind: IndentKind::Space,
5535 }
5536 }
5537
5538 /// Returns an [`IndentSize`] representing a tab.
5539 pub fn tab() -> Self {
5540 Self {
5541 len: 1,
5542 kind: IndentKind::Tab,
5543 }
5544 }
5545
5546 /// An iterator over the characters represented by this [`IndentSize`].
5547 pub fn chars(&self) -> impl Iterator<Item = char> {
5548 iter::repeat(self.char()).take(self.len as usize)
5549 }
5550
5551 /// The character representation of this [`IndentSize`].
5552 pub fn char(&self) -> char {
5553 match self.kind {
5554 IndentKind::Space => ' ',
5555 IndentKind::Tab => '\t',
5556 }
5557 }
5558
5559 /// Consumes the current [`IndentSize`] and returns a new one that has
5560 /// been shrunk or enlarged by the given size along the given direction.
5561 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5562 match direction {
5563 Ordering::Less => {
5564 if self.kind == size.kind && self.len >= size.len {
5565 self.len -= size.len;
5566 }
5567 }
5568 Ordering::Equal => {}
5569 Ordering::Greater => {
5570 if self.len == 0 {
5571 self = size;
5572 } else if self.kind == size.kind {
5573 self.len += size.len;
5574 }
5575 }
5576 }
5577 self
5578 }
5579
5580 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5581 match self.kind {
5582 IndentKind::Space => self.len as usize,
5583 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5584 }
5585 }
5586}
5587
5588#[cfg(any(test, feature = "test-support"))]
5589pub struct TestFile {
5590 pub path: Arc<RelPath>,
5591 pub root_name: String,
5592 pub local_root: Option<PathBuf>,
5593}
5594
5595#[cfg(any(test, feature = "test-support"))]
5596impl File for TestFile {
5597 fn path(&self) -> &Arc<RelPath> {
5598 &self.path
5599 }
5600
5601 fn full_path(&self, _: &gpui::App) -> PathBuf {
5602 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5603 }
5604
5605 fn as_local(&self) -> Option<&dyn LocalFile> {
5606 if self.local_root.is_some() {
5607 Some(self)
5608 } else {
5609 None
5610 }
5611 }
5612
5613 fn disk_state(&self) -> DiskState {
5614 unimplemented!()
5615 }
5616
5617 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5618 self.path().file_name().unwrap_or(self.root_name.as_ref())
5619 }
5620
5621 fn worktree_id(&self, _: &App) -> WorktreeId {
5622 WorktreeId::from_usize(0)
5623 }
5624
5625 fn to_proto(&self, _: &App) -> rpc::proto::File {
5626 unimplemented!()
5627 }
5628
5629 fn is_private(&self) -> bool {
5630 false
5631 }
5632
5633 fn path_style(&self, _cx: &App) -> PathStyle {
5634 PathStyle::local()
5635 }
5636}
5637
5638#[cfg(any(test, feature = "test-support"))]
5639impl LocalFile for TestFile {
5640 fn abs_path(&self, _cx: &App) -> PathBuf {
5641 PathBuf::from(self.local_root.as_ref().unwrap())
5642 .join(&self.root_name)
5643 .join(self.path.as_std_path())
5644 }
5645
5646 fn load(&self, _cx: &App) -> Task<Result<String>> {
5647 unimplemented!()
5648 }
5649
5650 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5651 unimplemented!()
5652 }
5653}
5654
5655pub(crate) fn contiguous_ranges(
5656 values: impl Iterator<Item = u32>,
5657 max_len: usize,
5658) -> impl Iterator<Item = Range<u32>> {
5659 let mut values = values;
5660 let mut current_range: Option<Range<u32>> = None;
5661 std::iter::from_fn(move || {
5662 loop {
5663 if let Some(value) = values.next() {
5664 if let Some(range) = &mut current_range
5665 && value == range.end
5666 && range.len() < max_len
5667 {
5668 range.end += 1;
5669 continue;
5670 }
5671
5672 let prev_range = current_range.clone();
5673 current_range = Some(value..(value + 1));
5674 if prev_range.is_some() {
5675 return prev_range;
5676 }
5677 } else {
5678 return current_range.take();
5679 }
5680 }
5681 })
5682}
5683
5684#[derive(Default, Debug)]
5685pub struct CharClassifier {
5686 scope: Option<LanguageScope>,
5687 scope_context: Option<CharScopeContext>,
5688 ignore_punctuation: bool,
5689}
5690
5691impl CharClassifier {
5692 pub fn new(scope: Option<LanguageScope>) -> Self {
5693 Self {
5694 scope,
5695 scope_context: None,
5696 ignore_punctuation: false,
5697 }
5698 }
5699
5700 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5701 Self {
5702 scope_context,
5703 ..self
5704 }
5705 }
5706
5707 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5708 Self {
5709 ignore_punctuation,
5710 ..self
5711 }
5712 }
5713
5714 pub fn is_whitespace(&self, c: char) -> bool {
5715 self.kind(c) == CharKind::Whitespace
5716 }
5717
5718 pub fn is_word(&self, c: char) -> bool {
5719 self.kind(c) == CharKind::Word
5720 }
5721
5722 pub fn is_punctuation(&self, c: char) -> bool {
5723 self.kind(c) == CharKind::Punctuation
5724 }
5725
5726 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5727 if c.is_alphanumeric() || c == '_' {
5728 return CharKind::Word;
5729 }
5730
5731 if let Some(scope) = &self.scope {
5732 let characters = match self.scope_context {
5733 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5734 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5735 None => scope.word_characters(),
5736 };
5737 if let Some(characters) = characters
5738 && characters.contains(&c)
5739 {
5740 return CharKind::Word;
5741 }
5742 }
5743
5744 if c.is_whitespace() {
5745 return CharKind::Whitespace;
5746 }
5747
5748 if ignore_punctuation {
5749 CharKind::Word
5750 } else {
5751 CharKind::Punctuation
5752 }
5753 }
5754
5755 pub fn kind(&self, c: char) -> CharKind {
5756 self.kind_with(c, self.ignore_punctuation)
5757 }
5758}
5759
5760/// Find all of the ranges of whitespace that occur at the ends of lines
5761/// in the given rope.
5762///
5763/// This could also be done with a regex search, but this implementation
5764/// avoids copying text.
5765pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5766 let mut ranges = Vec::new();
5767
5768 let mut offset = 0;
5769 let mut prev_chunk_trailing_whitespace_range = 0..0;
5770 for chunk in rope.chunks() {
5771 let mut prev_line_trailing_whitespace_range = 0..0;
5772 for (i, line) in chunk.split('\n').enumerate() {
5773 let line_end_offset = offset + line.len();
5774 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5775 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5776
5777 if i == 0 && trimmed_line_len == 0 {
5778 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5779 }
5780 if !prev_line_trailing_whitespace_range.is_empty() {
5781 ranges.push(prev_line_trailing_whitespace_range);
5782 }
5783
5784 offset = line_end_offset + 1;
5785 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5786 }
5787
5788 offset -= 1;
5789 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5790 }
5791
5792 if !prev_chunk_trailing_whitespace_range.is_empty() {
5793 ranges.push(prev_chunk_trailing_whitespace_range);
5794 }
5795
5796 ranges
5797}