1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a mutable replica, but toggled to be only readable.
89 Read,
90 /// The buffer is a read-only replica.
91 ReadOnly,
92}
93
94impl Capability {
95 /// Returns `true` if the capability is `ReadWrite`.
96 pub fn editable(self) -> bool {
97 matches!(self, Capability::ReadWrite)
98 }
99}
100
101pub type BufferRow = u32;
102
103/// An in-memory representation of a source code file, including its text,
104/// syntax trees, git status, and diagnostics.
105pub struct Buffer {
106 text: TextBuffer,
107 branch_state: Option<BufferBranchState>,
108 /// Filesystem state, `None` when there is no path.
109 file: Option<Arc<dyn File>>,
110 /// The mtime of the file when this buffer was last loaded from
111 /// or saved to disk.
112 saved_mtime: Option<MTime>,
113 /// The version vector when this buffer was last loaded from
114 /// or saved to disk.
115 saved_version: clock::Global,
116 preview_version: clock::Global,
117 transaction_depth: usize,
118 was_dirty_before_starting_transaction: Option<bool>,
119 reload_task: Option<Task<Result<()>>>,
120 language: Option<Arc<Language>>,
121 autoindent_requests: Vec<Arc<AutoindentRequest>>,
122 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
123 pending_autoindent: Option<Task<()>>,
124 sync_parse_timeout: Option<Duration>,
125 syntax_map: Mutex<SyntaxMap>,
126 reparse: Option<Task<()>>,
127 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
128 non_text_state_update_count: usize,
129 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
130 remote_selections: TreeMap<ReplicaId, SelectionSet>,
131 diagnostics_timestamp: clock::Lamport,
132 completion_triggers: BTreeSet<String>,
133 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
134 completion_triggers_timestamp: clock::Lamport,
135 deferred_ops: OperationQueue<Operation>,
136 capability: Capability,
137 has_conflict: bool,
138 /// Memoize calls to has_changes_since(saved_version).
139 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
140 has_unsaved_edits: Cell<(clock::Global, bool)>,
141 change_bits: Vec<rc::Weak<Cell<bool>>>,
142 _subscriptions: Vec<gpui::Subscription>,
143 tree_sitter_data: Arc<TreeSitterData>,
144 encoding: &'static Encoding,
145 has_bom: bool,
146}
147
148#[derive(Debug)]
149pub struct TreeSitterData {
150 chunks: RowChunks,
151 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
152}
153
154const MAX_ROWS_IN_A_CHUNK: u32 = 50;
155
156impl TreeSitterData {
157 fn clear(&mut self, snapshot: text::BufferSnapshot) {
158 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
159 self.brackets_by_chunks.get_mut().clear();
160 self.brackets_by_chunks
161 .get_mut()
162 .resize(self.chunks.len(), None);
163 }
164
165 fn new(snapshot: text::BufferSnapshot) -> Self {
166 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
167 Self {
168 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
169 chunks,
170 }
171 }
172
173 fn version(&self) -> &clock::Global {
174 self.chunks.version()
175 }
176}
177
178#[derive(Copy, Clone, Debug, PartialEq, Eq)]
179pub enum ParseStatus {
180 Idle,
181 Parsing,
182}
183
184struct BufferBranchState {
185 base_buffer: Entity<Buffer>,
186 merged_operations: Vec<Lamport>,
187}
188
189/// An immutable, cheaply cloneable representation of a fixed
190/// state of a buffer.
191pub struct BufferSnapshot {
192 pub text: text::BufferSnapshot,
193 pub syntax: SyntaxSnapshot,
194 file: Option<Arc<dyn File>>,
195 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
196 remote_selections: TreeMap<ReplicaId, SelectionSet>,
197 language: Option<Arc<Language>>,
198 non_text_state_update_count: usize,
199 tree_sitter_data: Arc<TreeSitterData>,
200 pub capability: Capability,
201}
202
203/// The kind and amount of indentation in a particular line. For now,
204/// assumes that indentation is all the same character.
205#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
206pub struct IndentSize {
207 /// The number of bytes that comprise the indentation.
208 pub len: u32,
209 /// The kind of whitespace used for indentation.
210 pub kind: IndentKind,
211}
212
213/// A whitespace character that's used for indentation.
214#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
215pub enum IndentKind {
216 /// An ASCII space character.
217 #[default]
218 Space,
219 /// An ASCII tab character.
220 Tab,
221}
222
223/// The shape of a selection cursor.
224#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
225pub enum CursorShape {
226 /// A vertical bar
227 #[default]
228 Bar,
229 /// A block that surrounds the following character
230 Block,
231 /// An underline that runs along the following character
232 Underline,
233 /// A box drawn around the following character
234 Hollow,
235}
236
237impl From<settings::CursorShape> for CursorShape {
238 fn from(shape: settings::CursorShape) -> Self {
239 match shape {
240 settings::CursorShape::Bar => CursorShape::Bar,
241 settings::CursorShape::Block => CursorShape::Block,
242 settings::CursorShape::Underline => CursorShape::Underline,
243 settings::CursorShape::Hollow => CursorShape::Hollow,
244 }
245 }
246}
247
248#[derive(Clone, Debug)]
249struct SelectionSet {
250 line_mode: bool,
251 cursor_shape: CursorShape,
252 selections: Arc<[Selection<Anchor>]>,
253 lamport_timestamp: clock::Lamport,
254}
255
256/// A diagnostic associated with a certain range of a buffer.
257#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
258pub struct Diagnostic {
259 /// The name of the service that produced this diagnostic.
260 pub source: Option<String>,
261 /// The ID provided by the dynamic registration that produced this diagnostic.
262 pub registration_id: Option<SharedString>,
263 /// A machine-readable code that identifies this diagnostic.
264 pub code: Option<NumberOrString>,
265 pub code_description: Option<lsp::Uri>,
266 /// Whether this diagnostic is a hint, warning, or error.
267 pub severity: DiagnosticSeverity,
268 /// The human-readable message associated with this diagnostic.
269 pub message: String,
270 /// The human-readable message (in markdown format)
271 pub markdown: Option<String>,
272 /// An id that identifies the group to which this diagnostic belongs.
273 ///
274 /// When a language server produces a diagnostic with
275 /// one or more associated diagnostics, those diagnostics are all
276 /// assigned a single group ID.
277 pub group_id: usize,
278 /// Whether this diagnostic is the primary diagnostic for its group.
279 ///
280 /// In a given group, the primary diagnostic is the top-level diagnostic
281 /// returned by the language server. The non-primary diagnostics are the
282 /// associated diagnostics.
283 pub is_primary: bool,
284 /// Whether this diagnostic is considered to originate from an analysis of
285 /// files on disk, as opposed to any unsaved buffer contents. This is a
286 /// property of a given diagnostic source, and is configured for a given
287 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
288 /// for the language server.
289 pub is_disk_based: bool,
290 /// Whether this diagnostic marks unnecessary code.
291 pub is_unnecessary: bool,
292 /// Quick separation of diagnostics groups based by their source.
293 pub source_kind: DiagnosticSourceKind,
294 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
295 pub data: Option<Value>,
296 /// Whether to underline the corresponding text range in the editor.
297 pub underline: bool,
298}
299
300#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
301pub enum DiagnosticSourceKind {
302 Pulled,
303 Pushed,
304 Other,
305}
306
307/// An operation used to synchronize this buffer with its other replicas.
308#[derive(Clone, Debug, PartialEq)]
309pub enum Operation {
310 /// A text operation.
311 Buffer(text::Operation),
312
313 /// An update to the buffer's diagnostics.
314 UpdateDiagnostics {
315 /// The id of the language server that produced the new diagnostics.
316 server_id: LanguageServerId,
317 /// The diagnostics.
318 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 },
322
323 /// An update to the most recent selections in this buffer.
324 UpdateSelections {
325 /// The selections.
326 selections: Arc<[Selection<Anchor>]>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// Whether the selections are in 'line mode'.
330 line_mode: bool,
331 /// The [`CursorShape`] associated with these selections.
332 cursor_shape: CursorShape,
333 },
334
335 /// An update to the characters that should trigger autocompletion
336 /// for this buffer.
337 UpdateCompletionTriggers {
338 /// The characters that trigger autocompletion.
339 triggers: Vec<String>,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 /// The language server ID.
343 server_id: LanguageServerId,
344 },
345
346 /// An update to the line ending type of this buffer.
347 UpdateLineEnding {
348 /// The line ending type.
349 line_ending: LineEnding,
350 /// The buffer's lamport timestamp.
351 lamport_timestamp: clock::Lamport,
352 },
353}
354
355/// An event that occurs in a buffer.
356#[derive(Clone, Debug, PartialEq)]
357pub enum BufferEvent {
358 /// The buffer was changed in a way that must be
359 /// propagated to its other replicas.
360 Operation {
361 operation: Operation,
362 is_local: bool,
363 },
364 /// The buffer was edited.
365 Edited,
366 /// The buffer's `dirty` bit changed.
367 DirtyChanged,
368 /// The buffer was saved.
369 Saved,
370 /// The buffer's file was changed on disk.
371 FileHandleChanged,
372 /// The buffer was reloaded.
373 Reloaded,
374 /// The buffer is in need of a reload
375 ReloadNeeded,
376 /// The buffer's language was changed.
377 /// The boolean indicates whether this buffer did not have a language before, but does now.
378 LanguageChanged(bool),
379 /// The buffer's syntax trees were updated.
380 Reparsed,
381 /// The buffer's diagnostics were updated.
382 DiagnosticsUpdated,
383 /// The buffer gained or lost editing capabilities.
384 CapabilityChanged,
385}
386
387/// The file associated with a buffer.
388pub trait File: Send + Sync + Any {
389 /// Returns the [`LocalFile`] associated with this file, if the
390 /// file is local.
391 fn as_local(&self) -> Option<&dyn LocalFile>;
392
393 /// Returns whether this file is local.
394 fn is_local(&self) -> bool {
395 self.as_local().is_some()
396 }
397
398 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
399 /// only available in some states, such as modification time.
400 fn disk_state(&self) -> DiskState;
401
402 /// Returns the path of this file relative to the worktree's root directory.
403 fn path(&self) -> &Arc<RelPath>;
404
405 /// Returns the path of this file relative to the worktree's parent directory (this means it
406 /// includes the name of the worktree's root folder).
407 fn full_path(&self, cx: &App) -> PathBuf;
408
409 /// Returns the path style of this file.
410 fn path_style(&self, cx: &App) -> PathStyle;
411
412 /// Returns the last component of this handle's absolute path. If this handle refers to the root
413 /// of its worktree, then this method will return the name of the worktree itself.
414 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
415
416 /// Returns the id of the worktree to which this file belongs.
417 ///
418 /// This is needed for looking up project-specific settings.
419 fn worktree_id(&self, cx: &App) -> WorktreeId;
420
421 /// Converts this file into a protobuf message.
422 fn to_proto(&self, cx: &App) -> rpc::proto::File;
423
424 /// Return whether Zed considers this to be a private file.
425 fn is_private(&self) -> bool;
426
427 fn can_open(&self) -> bool {
428 !self.is_local()
429 }
430}
431
432/// The file's storage status - whether it's stored (`Present`), and if so when it was last
433/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
434/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
435/// indicator for new files.
436#[derive(Copy, Clone, Debug, PartialEq)]
437pub enum DiskState {
438 /// File created in Zed that has not been saved.
439 New,
440 /// File present on the filesystem.
441 Present { mtime: MTime },
442 /// Deleted file that was previously present.
443 Deleted,
444 /// An old version of a file that was previously present
445 /// usually from a version control system. e.g. A git blob
446 Historic { was_deleted: bool },
447}
448
449impl DiskState {
450 /// Returns the file's last known modification time on disk.
451 pub fn mtime(self) -> Option<MTime> {
452 match self {
453 DiskState::New => None,
454 DiskState::Present { mtime } => Some(mtime),
455 DiskState::Deleted => None,
456 DiskState::Historic { .. } => None,
457 }
458 }
459
460 pub fn exists(&self) -> bool {
461 match self {
462 DiskState::New => false,
463 DiskState::Present { .. } => true,
464 DiskState::Deleted => false,
465 DiskState::Historic { .. } => false,
466 }
467 }
468
469 /// Returns true if this state represents a deleted file.
470 pub fn is_deleted(&self) -> bool {
471 match self {
472 DiskState::Deleted => true,
473 DiskState::Historic { was_deleted } => *was_deleted,
474 _ => false,
475 }
476 }
477}
478
479/// The file associated with a buffer, in the case where the file is on the local disk.
480pub trait LocalFile: File {
481 /// Returns the absolute path of this file
482 fn abs_path(&self, cx: &App) -> PathBuf;
483
484 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
485 fn load(&self, cx: &App) -> Task<Result<String>>;
486
487 /// Loads the file's contents from disk.
488 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
489}
490
491/// The auto-indent behavior associated with an editing operation.
492/// For some editing operations, each affected line of text has its
493/// indentation recomputed. For other operations, the entire block
494/// of edited text is adjusted uniformly.
495#[derive(Clone, Debug)]
496pub enum AutoindentMode {
497 /// Indent each line of inserted text.
498 EachLine,
499 /// Apply the same indentation adjustment to all of the lines
500 /// in a given insertion.
501 Block {
502 /// The original indentation column of the first line of each
503 /// insertion, if it has been copied.
504 ///
505 /// Knowing this makes it possible to preserve the relative indentation
506 /// of every line in the insertion from when it was copied.
507 ///
508 /// If the original indent column is `a`, and the first line of insertion
509 /// is then auto-indented to column `b`, then every other line of
510 /// the insertion will be auto-indented to column `b - a`
511 original_indent_columns: Vec<Option<u32>>,
512 },
513}
514
515#[derive(Clone)]
516struct AutoindentRequest {
517 before_edit: BufferSnapshot,
518 entries: Vec<AutoindentRequestEntry>,
519 is_block_mode: bool,
520 ignore_empty_lines: bool,
521}
522
523#[derive(Debug, Clone)]
524struct AutoindentRequestEntry {
525 /// A range of the buffer whose indentation should be adjusted.
526 range: Range<Anchor>,
527 /// The row of the edit start in the buffer before the edit was applied.
528 /// This is stored here because the anchor in range is created after
529 /// the edit, so it cannot be used with the before_edit snapshot.
530 old_row: Option<u32>,
531 indent_size: IndentSize,
532 original_indent_column: Option<u32>,
533}
534
535#[derive(Debug)]
536struct IndentSuggestion {
537 basis_row: u32,
538 delta: Ordering,
539 within_error: bool,
540}
541
542struct BufferChunkHighlights<'a> {
543 captures: SyntaxMapCaptures<'a>,
544 next_capture: Option<SyntaxMapCapture<'a>>,
545 stack: Vec<(usize, HighlightId)>,
546 highlight_maps: Vec<HighlightMap>,
547}
548
549/// An iterator that yields chunks of a buffer's text, along with their
550/// syntax highlights and diagnostic status.
551pub struct BufferChunks<'a> {
552 buffer_snapshot: Option<&'a BufferSnapshot>,
553 range: Range<usize>,
554 chunks: text::Chunks<'a>,
555 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
556 error_depth: usize,
557 warning_depth: usize,
558 information_depth: usize,
559 hint_depth: usize,
560 unnecessary_depth: usize,
561 underline: bool,
562 highlights: Option<BufferChunkHighlights<'a>>,
563}
564
565/// A chunk of a buffer's text, along with its syntax highlight and
566/// diagnostic status.
567#[derive(Clone, Debug, Default)]
568pub struct Chunk<'a> {
569 /// The text of the chunk.
570 pub text: &'a str,
571 /// The syntax highlighting style of the chunk.
572 pub syntax_highlight_id: Option<HighlightId>,
573 /// The highlight style that has been applied to this chunk in
574 /// the editor.
575 pub highlight_style: Option<HighlightStyle>,
576 /// The severity of diagnostic associated with this chunk, if any.
577 pub diagnostic_severity: Option<DiagnosticSeverity>,
578 /// A bitset of which characters are tabs in this string.
579 pub tabs: u128,
580 /// Bitmap of character indices in this chunk
581 pub chars: u128,
582 /// Whether this chunk of text is marked as unnecessary.
583 pub is_unnecessary: bool,
584 /// Whether this chunk of text was originally a tab character.
585 pub is_tab: bool,
586 /// Whether this chunk of text was originally an inlay.
587 pub is_inlay: bool,
588 /// Whether to underline the corresponding text range in the editor.
589 pub underline: bool,
590}
591
592/// A set of edits to a given version of a buffer, computed asynchronously.
593#[derive(Debug)]
594pub struct Diff {
595 pub base_version: clock::Global,
596 pub line_ending: LineEnding,
597 pub edits: Vec<(Range<usize>, Arc<str>)>,
598}
599
600#[derive(Debug, Clone, Copy)]
601pub(crate) struct DiagnosticEndpoint {
602 offset: usize,
603 is_start: bool,
604 underline: bool,
605 severity: DiagnosticSeverity,
606 is_unnecessary: bool,
607}
608
609/// A class of characters, used for characterizing a run of text.
610#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
611pub enum CharKind {
612 /// Whitespace.
613 Whitespace,
614 /// Punctuation.
615 Punctuation,
616 /// Word.
617 Word,
618}
619
620/// Context for character classification within a specific scope.
621#[derive(Copy, Clone, Eq, PartialEq, Debug)]
622pub enum CharScopeContext {
623 /// Character classification for completion queries.
624 ///
625 /// This context treats certain characters as word constituents that would
626 /// normally be considered punctuation, such as '-' in Tailwind classes
627 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
628 Completion,
629 /// Character classification for linked edits.
630 ///
631 /// This context handles characters that should be treated as part of
632 /// identifiers during linked editing operations, such as '.' in JSX
633 /// component names like `<Animated.View>`.
634 LinkedEdit,
635}
636
637/// A runnable is a set of data about a region that could be resolved into a task
638pub struct Runnable {
639 pub tags: SmallVec<[RunnableTag; 1]>,
640 pub language: Arc<Language>,
641 pub buffer: BufferId,
642}
643
644#[derive(Default, Clone, Debug)]
645pub struct HighlightedText {
646 pub text: SharedString,
647 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
648}
649
650#[derive(Default, Debug)]
651struct HighlightedTextBuilder {
652 pub text: String,
653 highlights: Vec<(Range<usize>, HighlightStyle)>,
654}
655
656impl HighlightedText {
657 pub fn from_buffer_range<T: ToOffset>(
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) -> Self {
664 let mut highlighted_text = HighlightedTextBuilder::default();
665 highlighted_text.add_text_from_buffer_range(
666 range,
667 snapshot,
668 syntax_snapshot,
669 override_style,
670 syntax_theme,
671 );
672 highlighted_text.build()
673 }
674
675 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
676 gpui::StyledText::new(self.text.clone())
677 .with_default_highlights(default_style, self.highlights.iter().cloned())
678 }
679
680 /// Returns the first line without leading whitespace unless highlighted
681 /// and a boolean indicating if there are more lines after
682 pub fn first_line_preview(self) -> (Self, bool) {
683 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
684 let first_line = &self.text[..newline_ix];
685
686 // Trim leading whitespace, unless an edit starts prior to it.
687 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
688 if let Some((first_highlight_range, _)) = self.highlights.first() {
689 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
690 }
691
692 let preview_text = &first_line[preview_start_ix..];
693 let preview_highlights = self
694 .highlights
695 .into_iter()
696 .skip_while(|(range, _)| range.end <= preview_start_ix)
697 .take_while(|(range, _)| range.start < newline_ix)
698 .filter_map(|(mut range, highlight)| {
699 range.start = range.start.saturating_sub(preview_start_ix);
700 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
701 if range.is_empty() {
702 None
703 } else {
704 Some((range, highlight))
705 }
706 });
707
708 let preview = Self {
709 text: SharedString::new(preview_text),
710 highlights: preview_highlights.collect(),
711 };
712
713 (preview, self.text.len() > newline_ix)
714 }
715}
716
717impl HighlightedTextBuilder {
718 pub fn build(self) -> HighlightedText {
719 HighlightedText {
720 text: self.text.into(),
721 highlights: self.highlights,
722 }
723 }
724
725 pub fn add_text_from_buffer_range<T: ToOffset>(
726 &mut self,
727 range: Range<T>,
728 snapshot: &text::BufferSnapshot,
729 syntax_snapshot: &SyntaxSnapshot,
730 override_style: Option<HighlightStyle>,
731 syntax_theme: &SyntaxTheme,
732 ) {
733 let range = range.to_offset(snapshot);
734 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
735 let start = self.text.len();
736 self.text.push_str(chunk.text);
737 let end = self.text.len();
738
739 if let Some(highlight_style) = chunk
740 .syntax_highlight_id
741 .and_then(|id| id.style(syntax_theme))
742 {
743 let highlight_style = override_style.map_or(highlight_style, |override_style| {
744 highlight_style.highlight(override_style)
745 });
746 self.highlights.push((start..end, highlight_style));
747 } else if let Some(override_style) = override_style {
748 self.highlights.push((start..end, override_style));
749 }
750 }
751 }
752
753 fn highlighted_chunks<'a>(
754 range: Range<usize>,
755 snapshot: &'a text::BufferSnapshot,
756 syntax_snapshot: &'a SyntaxSnapshot,
757 ) -> BufferChunks<'a> {
758 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
759 grammar
760 .highlights_config
761 .as_ref()
762 .map(|config| &config.query)
763 });
764
765 let highlight_maps = captures
766 .grammars()
767 .iter()
768 .map(|grammar| grammar.highlight_map())
769 .collect();
770
771 BufferChunks::new(
772 snapshot.as_rope(),
773 range,
774 Some((captures, highlight_maps)),
775 false,
776 None,
777 )
778 }
779}
780
781#[derive(Clone)]
782pub struct EditPreview {
783 old_snapshot: text::BufferSnapshot,
784 applied_edits_snapshot: text::BufferSnapshot,
785 syntax_snapshot: SyntaxSnapshot,
786}
787
788impl EditPreview {
789 pub fn as_unified_diff(
790 &self,
791 file: Option<&Arc<dyn File>>,
792 edits: &[(Range<Anchor>, impl AsRef<str>)],
793 ) -> Option<String> {
794 let (first, _) = edits.first()?;
795 let (last, _) = edits.last()?;
796
797 let start = first.start.to_point(&self.old_snapshot);
798 let old_end = last.end.to_point(&self.old_snapshot);
799 let new_end = last
800 .end
801 .bias_right(&self.old_snapshot)
802 .to_point(&self.applied_edits_snapshot);
803
804 let start = Point::new(start.row.saturating_sub(3), 0);
805 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
806 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
807
808 let diff_body = unified_diff_with_offsets(
809 &self
810 .old_snapshot
811 .text_for_range(start..old_end)
812 .collect::<String>(),
813 &self
814 .applied_edits_snapshot
815 .text_for_range(start..new_end)
816 .collect::<String>(),
817 start.row,
818 start.row,
819 );
820
821 let path = file.map(|f| f.path().as_unix_str());
822 let header = match path {
823 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
824 None => String::new(),
825 };
826
827 Some(format!("{}{}", header, diff_body))
828 }
829
830 pub fn highlight_edits(
831 &self,
832 current_snapshot: &BufferSnapshot,
833 edits: &[(Range<Anchor>, impl AsRef<str>)],
834 include_deletions: bool,
835 cx: &App,
836 ) -> HighlightedText {
837 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
838 return HighlightedText::default();
839 };
840
841 let mut highlighted_text = HighlightedTextBuilder::default();
842
843 let visible_range_in_preview_snapshot =
844 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
845 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
846
847 let insertion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().created_background),
849 ..Default::default()
850 };
851 let deletion_highlight_style = HighlightStyle {
852 background_color: Some(cx.theme().status().deleted_background),
853 ..Default::default()
854 };
855 let syntax_theme = cx.theme().syntax();
856
857 for (range, edit_text) in edits {
858 let edit_new_end_in_preview_snapshot = range
859 .end
860 .bias_right(&self.old_snapshot)
861 .to_offset(&self.applied_edits_snapshot);
862 let edit_start_in_preview_snapshot =
863 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
864
865 let unchanged_range_in_preview_snapshot =
866 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
867 if !unchanged_range_in_preview_snapshot.is_empty() {
868 highlighted_text.add_text_from_buffer_range(
869 unchanged_range_in_preview_snapshot,
870 &self.applied_edits_snapshot,
871 &self.syntax_snapshot,
872 None,
873 syntax_theme,
874 );
875 }
876
877 let range_in_current_snapshot = range.to_offset(current_snapshot);
878 if include_deletions && !range_in_current_snapshot.is_empty() {
879 highlighted_text.add_text_from_buffer_range(
880 range_in_current_snapshot,
881 ¤t_snapshot.text,
882 ¤t_snapshot.syntax,
883 Some(deletion_highlight_style),
884 syntax_theme,
885 );
886 }
887
888 if !edit_text.as_ref().is_empty() {
889 highlighted_text.add_text_from_buffer_range(
890 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
891 &self.applied_edits_snapshot,
892 &self.syntax_snapshot,
893 Some(insertion_highlight_style),
894 syntax_theme,
895 );
896 }
897
898 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
899 }
900
901 highlighted_text.add_text_from_buffer_range(
902 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
903 &self.applied_edits_snapshot,
904 &self.syntax_snapshot,
905 None,
906 syntax_theme,
907 );
908
909 highlighted_text.build()
910 }
911
912 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
913 cx.new(|cx| {
914 let mut buffer = Buffer::local_normalized(
915 self.applied_edits_snapshot.as_rope().clone(),
916 self.applied_edits_snapshot.line_ending(),
917 cx,
918 );
919 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
920 buffer
921 })
922 }
923
924 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
925 let (first, _) = edits.first()?;
926 let (last, _) = edits.last()?;
927
928 let start = first
929 .start
930 .bias_left(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932 let end = last
933 .end
934 .bias_right(&self.old_snapshot)
935 .to_point(&self.applied_edits_snapshot);
936
937 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
938 let range = Point::new(start.row, 0)
939 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
940
941 Some(range)
942 }
943}
944
945#[derive(Clone, Debug, PartialEq, Eq)]
946pub struct BracketMatch<T> {
947 pub open_range: Range<T>,
948 pub close_range: Range<T>,
949 pub newline_only: bool,
950 pub syntax_layer_depth: usize,
951 pub color_index: Option<usize>,
952}
953
954impl<T> BracketMatch<T> {
955 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
956 (self.open_range, self.close_range)
957 }
958}
959
960impl Buffer {
961 /// Create a new buffer with the given base text.
962 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
963 Self::build(
964 TextBuffer::new(
965 ReplicaId::LOCAL,
966 cx.entity_id().as_non_zero_u64().into(),
967 base_text.into(),
968 ),
969 None,
970 Capability::ReadWrite,
971 )
972 }
973
974 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
975 pub fn local_normalized(
976 base_text_normalized: Rope,
977 line_ending: LineEnding,
978 cx: &Context<Self>,
979 ) -> Self {
980 Self::build(
981 TextBuffer::new_normalized(
982 ReplicaId::LOCAL,
983 cx.entity_id().as_non_zero_u64().into(),
984 line_ending,
985 base_text_normalized,
986 ),
987 None,
988 Capability::ReadWrite,
989 )
990 }
991
992 /// Create a new buffer that is a replica of a remote buffer.
993 pub fn remote(
994 remote_id: BufferId,
995 replica_id: ReplicaId,
996 capability: Capability,
997 base_text: impl Into<String>,
998 ) -> Self {
999 Self::build(
1000 TextBuffer::new(replica_id, remote_id, base_text.into()),
1001 None,
1002 capability,
1003 )
1004 }
1005
1006 /// Create a new buffer that is a replica of a remote buffer, populating its
1007 /// state from the given protobuf message.
1008 pub fn from_proto(
1009 replica_id: ReplicaId,
1010 capability: Capability,
1011 message: proto::BufferState,
1012 file: Option<Arc<dyn File>>,
1013 ) -> Result<Self> {
1014 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1015 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1016 let mut this = Self::build(buffer, file, capability);
1017 this.text.set_line_ending(proto::deserialize_line_ending(
1018 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1019 ));
1020 this.saved_version = proto::deserialize_version(&message.saved_version);
1021 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1022 Ok(this)
1023 }
1024
1025 /// Serialize the buffer's state to a protobuf message.
1026 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1027 proto::BufferState {
1028 id: self.remote_id().into(),
1029 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1030 base_text: self.base_text().to_string(),
1031 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1032 saved_version: proto::serialize_version(&self.saved_version),
1033 saved_mtime: self.saved_mtime.map(|time| time.into()),
1034 }
1035 }
1036
1037 /// Serialize as protobufs all of the changes to the buffer since the given version.
1038 pub fn serialize_ops(
1039 &self,
1040 since: Option<clock::Global>,
1041 cx: &App,
1042 ) -> Task<Vec<proto::Operation>> {
1043 let mut operations = Vec::new();
1044 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1045
1046 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1047 proto::serialize_operation(&Operation::UpdateSelections {
1048 selections: set.selections.clone(),
1049 lamport_timestamp: set.lamport_timestamp,
1050 line_mode: set.line_mode,
1051 cursor_shape: set.cursor_shape,
1052 })
1053 }));
1054
1055 for (server_id, diagnostics) in &self.diagnostics {
1056 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1057 lamport_timestamp: self.diagnostics_timestamp,
1058 server_id: *server_id,
1059 diagnostics: diagnostics.iter().cloned().collect(),
1060 }));
1061 }
1062
1063 for (server_id, completions) in &self.completion_triggers_per_language_server {
1064 operations.push(proto::serialize_operation(
1065 &Operation::UpdateCompletionTriggers {
1066 triggers: completions.iter().cloned().collect(),
1067 lamport_timestamp: self.completion_triggers_timestamp,
1068 server_id: *server_id,
1069 },
1070 ));
1071 }
1072
1073 let text_operations = self.text.operations().clone();
1074 cx.background_spawn(async move {
1075 let since = since.unwrap_or_default();
1076 operations.extend(
1077 text_operations
1078 .iter()
1079 .filter(|(_, op)| !since.observed(op.timestamp()))
1080 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1081 );
1082 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1083 operations
1084 })
1085 }
1086
1087 /// Assign a language to the buffer, returning the buffer.
1088 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1089 self.set_language_async(Some(language), cx);
1090 self
1091 }
1092
1093 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1094 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1095 self.set_language(Some(language), cx);
1096 self
1097 }
1098
1099 /// Returns the [`Capability`] of this buffer.
1100 pub fn capability(&self) -> Capability {
1101 self.capability
1102 }
1103
1104 /// Whether this buffer can only be read.
1105 pub fn read_only(&self) -> bool {
1106 !self.capability.editable()
1107 }
1108
1109 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1110 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1111 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1112 let snapshot = buffer.snapshot();
1113 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1114 let tree_sitter_data = TreeSitterData::new(snapshot);
1115 Self {
1116 saved_mtime,
1117 tree_sitter_data: Arc::new(tree_sitter_data),
1118 saved_version: buffer.version(),
1119 preview_version: buffer.version(),
1120 reload_task: None,
1121 transaction_depth: 0,
1122 was_dirty_before_starting_transaction: None,
1123 has_unsaved_edits: Cell::new((buffer.version(), false)),
1124 text: buffer,
1125 branch_state: None,
1126 file,
1127 capability,
1128 syntax_map,
1129 reparse: None,
1130 non_text_state_update_count: 0,
1131 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1132 Some(Duration::from_millis(10))
1133 } else {
1134 Some(Duration::from_millis(1))
1135 },
1136 parse_status: watch::channel(ParseStatus::Idle),
1137 autoindent_requests: Default::default(),
1138 wait_for_autoindent_txs: Default::default(),
1139 pending_autoindent: Default::default(),
1140 language: None,
1141 remote_selections: Default::default(),
1142 diagnostics: Default::default(),
1143 diagnostics_timestamp: Lamport::MIN,
1144 completion_triggers: Default::default(),
1145 completion_triggers_per_language_server: Default::default(),
1146 completion_triggers_timestamp: Lamport::MIN,
1147 deferred_ops: OperationQueue::new(),
1148 has_conflict: false,
1149 change_bits: Default::default(),
1150 _subscriptions: Vec::new(),
1151 encoding: encoding_rs::UTF_8,
1152 has_bom: false,
1153 }
1154 }
1155
1156 pub fn build_snapshot(
1157 text: Rope,
1158 language: Option<Arc<Language>>,
1159 language_registry: Option<Arc<LanguageRegistry>>,
1160 cx: &mut App,
1161 ) -> impl Future<Output = BufferSnapshot> + use<> {
1162 let entity_id = cx.reserve_entity::<Self>().entity_id();
1163 let buffer_id = entity_id.as_non_zero_u64().into();
1164 async move {
1165 let text =
1166 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1167 .snapshot();
1168 let mut syntax = SyntaxMap::new(&text).snapshot();
1169 if let Some(language) = language.clone() {
1170 let language_registry = language_registry.clone();
1171 syntax.reparse(&text, language_registry, language);
1172 }
1173 let tree_sitter_data = TreeSitterData::new(text.clone());
1174 BufferSnapshot {
1175 text,
1176 syntax,
1177 file: None,
1178 diagnostics: Default::default(),
1179 remote_selections: Default::default(),
1180 tree_sitter_data: Arc::new(tree_sitter_data),
1181 language,
1182 non_text_state_update_count: 0,
1183 capability: Capability::ReadOnly,
1184 }
1185 }
1186 }
1187
1188 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1189 let entity_id = cx.reserve_entity::<Self>().entity_id();
1190 let buffer_id = entity_id.as_non_zero_u64().into();
1191 let text = TextBuffer::new_normalized(
1192 ReplicaId::LOCAL,
1193 buffer_id,
1194 Default::default(),
1195 Rope::new(),
1196 )
1197 .snapshot();
1198 let syntax = SyntaxMap::new(&text).snapshot();
1199 let tree_sitter_data = TreeSitterData::new(text.clone());
1200 BufferSnapshot {
1201 text,
1202 syntax,
1203 tree_sitter_data: Arc::new(tree_sitter_data),
1204 file: None,
1205 diagnostics: Default::default(),
1206 remote_selections: Default::default(),
1207 language: None,
1208 non_text_state_update_count: 0,
1209 capability: Capability::ReadOnly,
1210 }
1211 }
1212
1213 #[cfg(any(test, feature = "test-support"))]
1214 pub fn build_snapshot_sync(
1215 text: Rope,
1216 language: Option<Arc<Language>>,
1217 language_registry: Option<Arc<LanguageRegistry>>,
1218 cx: &mut App,
1219 ) -> BufferSnapshot {
1220 let entity_id = cx.reserve_entity::<Self>().entity_id();
1221 let buffer_id = entity_id.as_non_zero_u64().into();
1222 let text =
1223 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1224 .snapshot();
1225 let mut syntax = SyntaxMap::new(&text).snapshot();
1226 if let Some(language) = language.clone() {
1227 syntax.reparse(&text, language_registry, language);
1228 }
1229 let tree_sitter_data = TreeSitterData::new(text.clone());
1230 BufferSnapshot {
1231 text,
1232 syntax,
1233 tree_sitter_data: Arc::new(tree_sitter_data),
1234 file: None,
1235 diagnostics: Default::default(),
1236 remote_selections: Default::default(),
1237 language,
1238 non_text_state_update_count: 0,
1239 capability: Capability::ReadOnly,
1240 }
1241 }
1242
1243 /// Retrieve a snapshot of the buffer's current state. This is computationally
1244 /// cheap, and allows reading from the buffer on a background thread.
1245 pub fn snapshot(&self) -> BufferSnapshot {
1246 let text = self.text.snapshot();
1247 let mut syntax_map = self.syntax_map.lock();
1248 syntax_map.interpolate(&text);
1249 let syntax = syntax_map.snapshot();
1250
1251 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1252 Arc::new(TreeSitterData::new(text.clone()))
1253 } else {
1254 self.tree_sitter_data.clone()
1255 };
1256
1257 BufferSnapshot {
1258 text,
1259 syntax,
1260 tree_sitter_data,
1261 file: self.file.clone(),
1262 remote_selections: self.remote_selections.clone(),
1263 diagnostics: self.diagnostics.clone(),
1264 language: self.language.clone(),
1265 non_text_state_update_count: self.non_text_state_update_count,
1266 capability: self.capability,
1267 }
1268 }
1269
1270 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1271 let this = cx.entity();
1272 cx.new(|cx| {
1273 let mut branch = Self {
1274 branch_state: Some(BufferBranchState {
1275 base_buffer: this.clone(),
1276 merged_operations: Default::default(),
1277 }),
1278 language: self.language.clone(),
1279 has_conflict: self.has_conflict,
1280 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1281 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1282 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1283 };
1284 if let Some(language_registry) = self.language_registry() {
1285 branch.set_language_registry(language_registry);
1286 }
1287
1288 // Reparse the branch buffer so that we get syntax highlighting immediately.
1289 branch.reparse(cx, true);
1290
1291 branch
1292 })
1293 }
1294
1295 pub fn preview_edits(
1296 &self,
1297 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1298 cx: &App,
1299 ) -> Task<EditPreview> {
1300 let registry = self.language_registry();
1301 let language = self.language().cloned();
1302 let old_snapshot = self.text.snapshot();
1303 let mut branch_buffer = self.text.branch();
1304 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1305 cx.background_spawn(async move {
1306 if !edits.is_empty() {
1307 if let Some(language) = language.clone() {
1308 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1309 }
1310
1311 branch_buffer.edit(edits.iter().cloned());
1312 let snapshot = branch_buffer.snapshot();
1313 syntax_snapshot.interpolate(&snapshot);
1314
1315 if let Some(language) = language {
1316 syntax_snapshot.reparse(&snapshot, registry, language);
1317 }
1318 }
1319 EditPreview {
1320 old_snapshot,
1321 applied_edits_snapshot: branch_buffer.snapshot(),
1322 syntax_snapshot,
1323 }
1324 })
1325 }
1326
1327 /// Applies all of the changes in this buffer that intersect any of the
1328 /// given `ranges` to its base buffer.
1329 ///
1330 /// If `ranges` is empty, then all changes will be applied. This buffer must
1331 /// be a branch buffer to call this method.
1332 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1333 let Some(base_buffer) = self.base_buffer() else {
1334 debug_panic!("not a branch buffer");
1335 return;
1336 };
1337
1338 let mut ranges = if ranges.is_empty() {
1339 &[0..usize::MAX]
1340 } else {
1341 ranges.as_slice()
1342 }
1343 .iter()
1344 .peekable();
1345
1346 let mut edits = Vec::new();
1347 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1348 let mut is_included = false;
1349 while let Some(range) = ranges.peek() {
1350 if range.end < edit.new.start {
1351 ranges.next().unwrap();
1352 } else {
1353 if range.start <= edit.new.end {
1354 is_included = true;
1355 }
1356 break;
1357 }
1358 }
1359
1360 if is_included {
1361 edits.push((
1362 edit.old.clone(),
1363 self.text_for_range(edit.new.clone()).collect::<String>(),
1364 ));
1365 }
1366 }
1367
1368 let operation = base_buffer.update(cx, |base_buffer, cx| {
1369 // cx.emit(BufferEvent::DiffBaseChanged);
1370 base_buffer.edit(edits, None, cx)
1371 });
1372
1373 if let Some(operation) = operation
1374 && let Some(BufferBranchState {
1375 merged_operations, ..
1376 }) = &mut self.branch_state
1377 {
1378 merged_operations.push(operation);
1379 }
1380 }
1381
1382 fn on_base_buffer_event(
1383 &mut self,
1384 _: Entity<Buffer>,
1385 event: &BufferEvent,
1386 cx: &mut Context<Self>,
1387 ) {
1388 let BufferEvent::Operation { operation, .. } = event else {
1389 return;
1390 };
1391 let Some(BufferBranchState {
1392 merged_operations, ..
1393 }) = &mut self.branch_state
1394 else {
1395 return;
1396 };
1397
1398 let mut operation_to_undo = None;
1399 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1400 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1401 {
1402 merged_operations.remove(ix);
1403 operation_to_undo = Some(operation.timestamp);
1404 }
1405
1406 self.apply_ops([operation.clone()], cx);
1407
1408 if let Some(timestamp) = operation_to_undo {
1409 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1410 self.undo_operations(counts, cx);
1411 }
1412 }
1413
1414 #[cfg(test)]
1415 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1416 &self.text
1417 }
1418
1419 /// Retrieve a snapshot of the buffer's raw text, without any
1420 /// language-related state like the syntax tree or diagnostics.
1421 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1422 self.text.snapshot()
1423 }
1424
1425 /// The file associated with the buffer, if any.
1426 pub fn file(&self) -> Option<&Arc<dyn File>> {
1427 self.file.as_ref()
1428 }
1429
1430 /// The version of the buffer that was last saved or reloaded from disk.
1431 pub fn saved_version(&self) -> &clock::Global {
1432 &self.saved_version
1433 }
1434
1435 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1436 pub fn saved_mtime(&self) -> Option<MTime> {
1437 self.saved_mtime
1438 }
1439
1440 /// Returns the character encoding of the buffer's file.
1441 pub fn encoding(&self) -> &'static Encoding {
1442 self.encoding
1443 }
1444
1445 /// Sets the character encoding of the buffer.
1446 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1447 self.encoding = encoding;
1448 }
1449
1450 /// Returns whether the buffer has a Byte Order Mark.
1451 pub fn has_bom(&self) -> bool {
1452 self.has_bom
1453 }
1454
1455 /// Sets whether the buffer has a Byte Order Mark.
1456 pub fn set_has_bom(&mut self, has_bom: bool) {
1457 self.has_bom = has_bom;
1458 }
1459
1460 /// Assign a language to the buffer.
1461 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1462 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1463 }
1464
1465 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1466 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1467 self.set_language_(language, true, cx);
1468 }
1469
1470 fn set_language_(
1471 &mut self,
1472 language: Option<Arc<Language>>,
1473 may_block: bool,
1474 cx: &mut Context<Self>,
1475 ) {
1476 self.non_text_state_update_count += 1;
1477 self.syntax_map.lock().clear(&self.text);
1478 let old_language = std::mem::replace(&mut self.language, language);
1479 self.was_changed();
1480 self.reparse(cx, may_block);
1481 let has_fresh_language =
1482 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1483 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1484 }
1485
1486 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1487 /// other languages if parts of the buffer are written in different languages.
1488 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1489 self.syntax_map
1490 .lock()
1491 .set_language_registry(language_registry);
1492 }
1493
1494 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1495 self.syntax_map.lock().language_registry()
1496 }
1497
1498 /// Assign the line ending type to the buffer.
1499 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1500 self.text.set_line_ending(line_ending);
1501
1502 let lamport_timestamp = self.text.lamport_clock.tick();
1503 self.send_operation(
1504 Operation::UpdateLineEnding {
1505 line_ending,
1506 lamport_timestamp,
1507 },
1508 true,
1509 cx,
1510 );
1511 }
1512
1513 /// Assign the buffer a new [`Capability`].
1514 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1515 if self.capability != capability {
1516 self.capability = capability;
1517 cx.emit(BufferEvent::CapabilityChanged)
1518 }
1519 }
1520
1521 /// This method is called to signal that the buffer has been saved.
1522 pub fn did_save(
1523 &mut self,
1524 version: clock::Global,
1525 mtime: Option<MTime>,
1526 cx: &mut Context<Self>,
1527 ) {
1528 self.saved_version = version.clone();
1529 self.has_unsaved_edits.set((version, false));
1530 self.has_conflict = false;
1531 self.saved_mtime = mtime;
1532 self.was_changed();
1533 cx.emit(BufferEvent::Saved);
1534 cx.notify();
1535 }
1536
1537 /// Reloads the contents of the buffer from disk.
1538 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1539 let (tx, rx) = futures::channel::oneshot::channel();
1540 let prev_version = self.text.version();
1541 self.reload_task = Some(cx.spawn(async move |this, cx| {
1542 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1543 let file = this.file.as_ref()?.as_local()?;
1544 Some((
1545 file.disk_state().mtime(),
1546 file.load_bytes(cx),
1547 this.encoding,
1548 ))
1549 })?
1550 else {
1551 return Ok(());
1552 };
1553
1554 let bytes = load_bytes_task.await?;
1555 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1556 let new_text = cow.into_owned();
1557
1558 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1559 this.update(cx, |this, cx| {
1560 if this.version() == diff.base_version {
1561 this.finalize_last_transaction();
1562 this.apply_diff(diff, cx);
1563 tx.send(this.finalize_last_transaction().cloned()).ok();
1564 this.has_conflict = false;
1565 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1566 } else {
1567 if !diff.edits.is_empty()
1568 || this
1569 .edits_since::<usize>(&diff.base_version)
1570 .next()
1571 .is_some()
1572 {
1573 this.has_conflict = true;
1574 }
1575
1576 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1577 }
1578
1579 this.reload_task.take();
1580 })
1581 }));
1582 rx
1583 }
1584
1585 /// This method is called to signal that the buffer has been reloaded.
1586 pub fn did_reload(
1587 &mut self,
1588 version: clock::Global,
1589 line_ending: LineEnding,
1590 mtime: Option<MTime>,
1591 cx: &mut Context<Self>,
1592 ) {
1593 self.saved_version = version;
1594 self.has_unsaved_edits
1595 .set((self.saved_version.clone(), false));
1596 self.text.set_line_ending(line_ending);
1597 self.saved_mtime = mtime;
1598 cx.emit(BufferEvent::Reloaded);
1599 cx.notify();
1600 }
1601
1602 /// Updates the [`File`] backing this buffer. This should be called when
1603 /// the file has changed or has been deleted.
1604 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1605 let was_dirty = self.is_dirty();
1606 let mut file_changed = false;
1607
1608 if let Some(old_file) = self.file.as_ref() {
1609 if new_file.path() != old_file.path() {
1610 file_changed = true;
1611 }
1612
1613 let old_state = old_file.disk_state();
1614 let new_state = new_file.disk_state();
1615 if old_state != new_state {
1616 file_changed = true;
1617 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1618 cx.emit(BufferEvent::ReloadNeeded)
1619 }
1620 }
1621 } else {
1622 file_changed = true;
1623 };
1624
1625 self.file = Some(new_file);
1626 if file_changed {
1627 self.was_changed();
1628 self.non_text_state_update_count += 1;
1629 if was_dirty != self.is_dirty() {
1630 cx.emit(BufferEvent::DirtyChanged);
1631 }
1632 cx.emit(BufferEvent::FileHandleChanged);
1633 cx.notify();
1634 }
1635 }
1636
1637 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1638 Some(self.branch_state.as_ref()?.base_buffer.clone())
1639 }
1640
1641 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1642 pub fn language(&self) -> Option<&Arc<Language>> {
1643 self.language.as_ref()
1644 }
1645
1646 /// Returns the [`Language`] at the given location.
1647 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1648 let offset = position.to_offset(self);
1649 let mut is_first = true;
1650 let start_anchor = self.anchor_before(offset);
1651 let end_anchor = self.anchor_after(offset);
1652 self.syntax_map
1653 .lock()
1654 .layers_for_range(offset..offset, &self.text, false)
1655 .filter(|layer| {
1656 if is_first {
1657 is_first = false;
1658 return true;
1659 }
1660
1661 layer
1662 .included_sub_ranges
1663 .map(|sub_ranges| {
1664 sub_ranges.iter().any(|sub_range| {
1665 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1666 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1667 !is_before_start && !is_after_end
1668 })
1669 })
1670 .unwrap_or(true)
1671 })
1672 .last()
1673 .map(|info| info.language.clone())
1674 .or_else(|| self.language.clone())
1675 }
1676
1677 /// Returns each [`Language`] for the active syntax layers at the given location.
1678 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1679 let offset = position.to_offset(self);
1680 let mut languages: Vec<Arc<Language>> = self
1681 .syntax_map
1682 .lock()
1683 .layers_for_range(offset..offset, &self.text, false)
1684 .map(|info| info.language.clone())
1685 .collect();
1686
1687 if languages.is_empty()
1688 && let Some(buffer_language) = self.language()
1689 {
1690 languages.push(buffer_language.clone());
1691 }
1692
1693 languages
1694 }
1695
1696 /// An integer version number that accounts for all updates besides
1697 /// the buffer's text itself (which is versioned via a version vector).
1698 pub fn non_text_state_update_count(&self) -> usize {
1699 self.non_text_state_update_count
1700 }
1701
1702 /// Whether the buffer is being parsed in the background.
1703 #[cfg(any(test, feature = "test-support"))]
1704 pub fn is_parsing(&self) -> bool {
1705 self.reparse.is_some()
1706 }
1707
1708 /// Indicates whether the buffer contains any regions that may be
1709 /// written in a language that hasn't been loaded yet.
1710 pub fn contains_unknown_injections(&self) -> bool {
1711 self.syntax_map.lock().contains_unknown_injections()
1712 }
1713
1714 #[cfg(any(test, feature = "test-support"))]
1715 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1716 self.sync_parse_timeout = timeout;
1717 }
1718
1719 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1720 match Arc::get_mut(&mut self.tree_sitter_data) {
1721 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1722 None => {
1723 let tree_sitter_data = TreeSitterData::new(snapshot);
1724 self.tree_sitter_data = Arc::new(tree_sitter_data)
1725 }
1726 }
1727 }
1728
1729 /// Called after an edit to synchronize the buffer's main parse tree with
1730 /// the buffer's new underlying state.
1731 ///
1732 /// Locks the syntax map and interpolates the edits since the last reparse
1733 /// into the foreground syntax tree.
1734 ///
1735 /// Then takes a stable snapshot of the syntax map before unlocking it.
1736 /// The snapshot with the interpolated edits is sent to a background thread,
1737 /// where we ask Tree-sitter to perform an incremental parse.
1738 ///
1739 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1740 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1741 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1742 ///
1743 /// If we time out waiting on the parse, we spawn a second task waiting
1744 /// until the parse does complete and return with the interpolated tree still
1745 /// in the foreground. When the background parse completes, call back into
1746 /// the main thread and assign the foreground parse state.
1747 ///
1748 /// If the buffer or grammar changed since the start of the background parse,
1749 /// initiate an additional reparse recursively. To avoid concurrent parses
1750 /// for the same buffer, we only initiate a new parse if we are not already
1751 /// parsing in the background.
1752 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1753 if self.text.version() != *self.tree_sitter_data.version() {
1754 self.invalidate_tree_sitter_data(self.text.snapshot());
1755 }
1756 if self.reparse.is_some() {
1757 return;
1758 }
1759 let language = if let Some(language) = self.language.clone() {
1760 language
1761 } else {
1762 return;
1763 };
1764
1765 let text = self.text_snapshot();
1766 let parsed_version = self.version();
1767
1768 let mut syntax_map = self.syntax_map.lock();
1769 syntax_map.interpolate(&text);
1770 let language_registry = syntax_map.language_registry();
1771 let mut syntax_snapshot = syntax_map.snapshot();
1772 drop(syntax_map);
1773
1774 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1775 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1776 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1777 &text,
1778 language_registry.clone(),
1779 language.clone(),
1780 sync_parse_timeout,
1781 ) {
1782 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1783 self.reparse = None;
1784 return;
1785 }
1786 }
1787
1788 let parse_task = cx.background_spawn({
1789 let language = language.clone();
1790 let language_registry = language_registry.clone();
1791 async move {
1792 syntax_snapshot.reparse(&text, language_registry, language);
1793 syntax_snapshot
1794 }
1795 });
1796
1797 self.reparse = Some(cx.spawn(async move |this, cx| {
1798 let new_syntax_map = parse_task.await;
1799 this.update(cx, move |this, cx| {
1800 let grammar_changed = || {
1801 this.language
1802 .as_ref()
1803 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1804 };
1805 let language_registry_changed = || {
1806 new_syntax_map.contains_unknown_injections()
1807 && language_registry.is_some_and(|registry| {
1808 registry.version() != new_syntax_map.language_registry_version()
1809 })
1810 };
1811 let parse_again = this.version.changed_since(&parsed_version)
1812 || language_registry_changed()
1813 || grammar_changed();
1814 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1815 this.reparse = None;
1816 if parse_again {
1817 this.reparse(cx, false);
1818 }
1819 })
1820 .ok();
1821 }));
1822 }
1823
1824 fn did_finish_parsing(
1825 &mut self,
1826 syntax_snapshot: SyntaxSnapshot,
1827 block_budget: Duration,
1828 cx: &mut Context<Self>,
1829 ) {
1830 self.non_text_state_update_count += 1;
1831 self.syntax_map.lock().did_parse(syntax_snapshot);
1832 self.was_changed();
1833 self.request_autoindent(cx, block_budget);
1834 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1835 self.invalidate_tree_sitter_data(self.text.snapshot());
1836 cx.emit(BufferEvent::Reparsed);
1837 cx.notify();
1838 }
1839
1840 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1841 self.parse_status.1.clone()
1842 }
1843
1844 /// Wait until the buffer is no longer parsing
1845 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1846 let mut parse_status = self.parse_status();
1847 async move {
1848 while *parse_status.borrow() != ParseStatus::Idle {
1849 if parse_status.changed().await.is_err() {
1850 break;
1851 }
1852 }
1853 }
1854 }
1855
1856 /// Assign to the buffer a set of diagnostics created by a given language server.
1857 pub fn update_diagnostics(
1858 &mut self,
1859 server_id: LanguageServerId,
1860 diagnostics: DiagnosticSet,
1861 cx: &mut Context<Self>,
1862 ) {
1863 let lamport_timestamp = self.text.lamport_clock.tick();
1864 let op = Operation::UpdateDiagnostics {
1865 server_id,
1866 diagnostics: diagnostics.iter().cloned().collect(),
1867 lamport_timestamp,
1868 };
1869
1870 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1871 self.send_operation(op, true, cx);
1872 }
1873
1874 pub fn buffer_diagnostics(
1875 &self,
1876 for_server: Option<LanguageServerId>,
1877 ) -> Vec<&DiagnosticEntry<Anchor>> {
1878 match for_server {
1879 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1880 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1881 Err(_) => Vec::new(),
1882 },
1883 None => self
1884 .diagnostics
1885 .iter()
1886 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1887 .collect(),
1888 }
1889 }
1890
1891 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1892 if let Some(indent_sizes) = self.compute_autoindents() {
1893 let indent_sizes = cx.background_spawn(indent_sizes);
1894 match cx
1895 .background_executor()
1896 .block_with_timeout(block_budget, indent_sizes)
1897 {
1898 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1899 Err(indent_sizes) => {
1900 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1901 let indent_sizes = indent_sizes.await;
1902 this.update(cx, |this, cx| {
1903 this.apply_autoindents(indent_sizes, cx);
1904 })
1905 .ok();
1906 }));
1907 }
1908 }
1909 } else {
1910 self.autoindent_requests.clear();
1911 for tx in self.wait_for_autoindent_txs.drain(..) {
1912 tx.send(()).ok();
1913 }
1914 }
1915 }
1916
1917 fn compute_autoindents(
1918 &self,
1919 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1920 let max_rows_between_yields = 100;
1921 let snapshot = self.snapshot();
1922 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1923 return None;
1924 }
1925
1926 let autoindent_requests = self.autoindent_requests.clone();
1927 Some(async move {
1928 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1929 for request in autoindent_requests {
1930 // Resolve each edited range to its row in the current buffer and in the
1931 // buffer before this batch of edits.
1932 let mut row_ranges = Vec::new();
1933 let mut old_to_new_rows = BTreeMap::new();
1934 let mut language_indent_sizes_by_new_row = Vec::new();
1935 for entry in &request.entries {
1936 let position = entry.range.start;
1937 let new_row = position.to_point(&snapshot).row;
1938 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1939 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1940
1941 if let Some(old_row) = entry.old_row {
1942 old_to_new_rows.insert(old_row, new_row);
1943 }
1944 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1945 }
1946
1947 // Build a map containing the suggested indentation for each of the edited lines
1948 // with respect to the state of the buffer before these edits. This map is keyed
1949 // by the rows for these lines in the current state of the buffer.
1950 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1951 let old_edited_ranges =
1952 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1953 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1954 let mut language_indent_size = IndentSize::default();
1955 for old_edited_range in old_edited_ranges {
1956 let suggestions = request
1957 .before_edit
1958 .suggest_autoindents(old_edited_range.clone())
1959 .into_iter()
1960 .flatten();
1961 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1962 if let Some(suggestion) = suggestion {
1963 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1964
1965 // Find the indent size based on the language for this row.
1966 while let Some((row, size)) = language_indent_sizes.peek() {
1967 if *row > new_row {
1968 break;
1969 }
1970 language_indent_size = *size;
1971 language_indent_sizes.next();
1972 }
1973
1974 let suggested_indent = old_to_new_rows
1975 .get(&suggestion.basis_row)
1976 .and_then(|from_row| {
1977 Some(old_suggestions.get(from_row).copied()?.0)
1978 })
1979 .unwrap_or_else(|| {
1980 request
1981 .before_edit
1982 .indent_size_for_line(suggestion.basis_row)
1983 })
1984 .with_delta(suggestion.delta, language_indent_size);
1985 old_suggestions
1986 .insert(new_row, (suggested_indent, suggestion.within_error));
1987 }
1988 }
1989 yield_now().await;
1990 }
1991
1992 // Compute new suggestions for each line, but only include them in the result
1993 // if they differ from the old suggestion for that line.
1994 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1995 let mut language_indent_size = IndentSize::default();
1996 for (row_range, original_indent_column) in row_ranges {
1997 let new_edited_row_range = if request.is_block_mode {
1998 row_range.start..row_range.start + 1
1999 } else {
2000 row_range.clone()
2001 };
2002
2003 let suggestions = snapshot
2004 .suggest_autoindents(new_edited_row_range.clone())
2005 .into_iter()
2006 .flatten();
2007 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2008 if let Some(suggestion) = suggestion {
2009 // Find the indent size based on the language for this row.
2010 while let Some((row, size)) = language_indent_sizes.peek() {
2011 if *row > new_row {
2012 break;
2013 }
2014 language_indent_size = *size;
2015 language_indent_sizes.next();
2016 }
2017
2018 let suggested_indent = indent_sizes
2019 .get(&suggestion.basis_row)
2020 .copied()
2021 .map(|e| e.0)
2022 .unwrap_or_else(|| {
2023 snapshot.indent_size_for_line(suggestion.basis_row)
2024 })
2025 .with_delta(suggestion.delta, language_indent_size);
2026
2027 if old_suggestions.get(&new_row).is_none_or(
2028 |(old_indentation, was_within_error)| {
2029 suggested_indent != *old_indentation
2030 && (!suggestion.within_error || *was_within_error)
2031 },
2032 ) {
2033 indent_sizes.insert(
2034 new_row,
2035 (suggested_indent, request.ignore_empty_lines),
2036 );
2037 }
2038 }
2039 }
2040
2041 if let (true, Some(original_indent_column)) =
2042 (request.is_block_mode, original_indent_column)
2043 {
2044 let new_indent =
2045 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2046 *indent
2047 } else {
2048 snapshot.indent_size_for_line(row_range.start)
2049 };
2050 let delta = new_indent.len as i64 - original_indent_column as i64;
2051 if delta != 0 {
2052 for row in row_range.skip(1) {
2053 indent_sizes.entry(row).or_insert_with(|| {
2054 let mut size = snapshot.indent_size_for_line(row);
2055 if size.kind == new_indent.kind {
2056 match delta.cmp(&0) {
2057 Ordering::Greater => size.len += delta as u32,
2058 Ordering::Less => {
2059 size.len = size.len.saturating_sub(-delta as u32)
2060 }
2061 Ordering::Equal => {}
2062 }
2063 }
2064 (size, request.ignore_empty_lines)
2065 });
2066 }
2067 }
2068 }
2069
2070 yield_now().await;
2071 }
2072 }
2073
2074 indent_sizes
2075 .into_iter()
2076 .filter_map(|(row, (indent, ignore_empty_lines))| {
2077 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2078 None
2079 } else {
2080 Some((row, indent))
2081 }
2082 })
2083 .collect()
2084 })
2085 }
2086
2087 fn apply_autoindents(
2088 &mut self,
2089 indent_sizes: BTreeMap<u32, IndentSize>,
2090 cx: &mut Context<Self>,
2091 ) {
2092 self.autoindent_requests.clear();
2093 for tx in self.wait_for_autoindent_txs.drain(..) {
2094 tx.send(()).ok();
2095 }
2096
2097 let edits: Vec<_> = indent_sizes
2098 .into_iter()
2099 .filter_map(|(row, indent_size)| {
2100 let current_size = indent_size_for_line(self, row);
2101 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2102 })
2103 .collect();
2104
2105 let preserve_preview = self.preserve_preview();
2106 self.edit(edits, None, cx);
2107 if preserve_preview {
2108 self.refresh_preview();
2109 }
2110 }
2111
2112 /// Create a minimal edit that will cause the given row to be indented
2113 /// with the given size. After applying this edit, the length of the line
2114 /// will always be at least `new_size.len`.
2115 pub fn edit_for_indent_size_adjustment(
2116 row: u32,
2117 current_size: IndentSize,
2118 new_size: IndentSize,
2119 ) -> Option<(Range<Point>, String)> {
2120 if new_size.kind == current_size.kind {
2121 match new_size.len.cmp(¤t_size.len) {
2122 Ordering::Greater => {
2123 let point = Point::new(row, 0);
2124 Some((
2125 point..point,
2126 iter::repeat(new_size.char())
2127 .take((new_size.len - current_size.len) as usize)
2128 .collect::<String>(),
2129 ))
2130 }
2131
2132 Ordering::Less => Some((
2133 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2134 String::new(),
2135 )),
2136
2137 Ordering::Equal => None,
2138 }
2139 } else {
2140 Some((
2141 Point::new(row, 0)..Point::new(row, current_size.len),
2142 iter::repeat(new_size.char())
2143 .take(new_size.len as usize)
2144 .collect::<String>(),
2145 ))
2146 }
2147 }
2148
2149 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2150 /// and the given new text.
2151 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2152 let old_text = self.as_rope().clone();
2153 let base_version = self.version();
2154 cx.background_executor()
2155 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2156 let old_text = old_text.to_string();
2157 let line_ending = LineEnding::detect(&new_text);
2158 LineEnding::normalize(&mut new_text);
2159 let edits = text_diff(&old_text, &new_text);
2160 Diff {
2161 base_version,
2162 line_ending,
2163 edits,
2164 }
2165 })
2166 }
2167
2168 /// Spawns a background task that searches the buffer for any whitespace
2169 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2170 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2171 let old_text = self.as_rope().clone();
2172 let line_ending = self.line_ending();
2173 let base_version = self.version();
2174 cx.background_spawn(async move {
2175 let ranges = trailing_whitespace_ranges(&old_text);
2176 let empty = Arc::<str>::from("");
2177 Diff {
2178 base_version,
2179 line_ending,
2180 edits: ranges
2181 .into_iter()
2182 .map(|range| (range, empty.clone()))
2183 .collect(),
2184 }
2185 })
2186 }
2187
2188 /// Ensures that the buffer ends with a single newline character, and
2189 /// no other whitespace. Skips if the buffer is empty.
2190 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2191 let len = self.len();
2192 if len == 0 {
2193 return;
2194 }
2195 let mut offset = len;
2196 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2197 let non_whitespace_len = chunk
2198 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2199 .len();
2200 offset -= chunk.len();
2201 offset += non_whitespace_len;
2202 if non_whitespace_len != 0 {
2203 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2204 return;
2205 }
2206 break;
2207 }
2208 }
2209 self.edit([(offset..len, "\n")], None, cx);
2210 }
2211
2212 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2213 /// calculated, then adjust the diff to account for those changes, and discard any
2214 /// parts of the diff that conflict with those changes.
2215 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2216 let snapshot = self.snapshot();
2217 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2218 let mut delta = 0;
2219 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2220 while let Some(edit_since) = edits_since.peek() {
2221 // If the edit occurs after a diff hunk, then it does not
2222 // affect that hunk.
2223 if edit_since.old.start > range.end {
2224 break;
2225 }
2226 // If the edit precedes the diff hunk, then adjust the hunk
2227 // to reflect the edit.
2228 else if edit_since.old.end < range.start {
2229 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2230 edits_since.next();
2231 }
2232 // If the edit intersects a diff hunk, then discard that hunk.
2233 else {
2234 return None;
2235 }
2236 }
2237
2238 let start = (range.start as i64 + delta) as usize;
2239 let end = (range.end as i64 + delta) as usize;
2240 Some((start..end, new_text))
2241 });
2242
2243 self.start_transaction();
2244 self.text.set_line_ending(diff.line_ending);
2245 self.edit(adjusted_edits, None, cx);
2246 self.end_transaction(cx)
2247 }
2248
2249 pub fn has_unsaved_edits(&self) -> bool {
2250 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2251
2252 if last_version == self.version {
2253 self.has_unsaved_edits
2254 .set((last_version, has_unsaved_edits));
2255 return has_unsaved_edits;
2256 }
2257
2258 let has_edits = self.has_edits_since(&self.saved_version);
2259 self.has_unsaved_edits
2260 .set((self.version.clone(), has_edits));
2261 has_edits
2262 }
2263
2264 /// Checks if the buffer has unsaved changes.
2265 pub fn is_dirty(&self) -> bool {
2266 if self.capability == Capability::ReadOnly {
2267 return false;
2268 }
2269 if self.has_conflict {
2270 return true;
2271 }
2272 match self.file.as_ref().map(|f| f.disk_state()) {
2273 Some(DiskState::New) | Some(DiskState::Deleted) => {
2274 !self.is_empty() && self.has_unsaved_edits()
2275 }
2276 _ => self.has_unsaved_edits(),
2277 }
2278 }
2279
2280 /// Marks the buffer as having a conflict regardless of current buffer state.
2281 pub fn set_conflict(&mut self) {
2282 self.has_conflict = true;
2283 }
2284
2285 /// Checks if the buffer and its file have both changed since the buffer
2286 /// was last saved or reloaded.
2287 pub fn has_conflict(&self) -> bool {
2288 if self.has_conflict {
2289 return true;
2290 }
2291 let Some(file) = self.file.as_ref() else {
2292 return false;
2293 };
2294 match file.disk_state() {
2295 DiskState::New => false,
2296 DiskState::Present { mtime } => match self.saved_mtime {
2297 Some(saved_mtime) => {
2298 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2299 }
2300 None => true,
2301 },
2302 DiskState::Deleted => false,
2303 DiskState::Historic { .. } => false,
2304 }
2305 }
2306
2307 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2308 pub fn subscribe(&mut self) -> Subscription<usize> {
2309 self.text.subscribe()
2310 }
2311
2312 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2313 ///
2314 /// This allows downstream code to check if the buffer's text has changed without
2315 /// waiting for an effect cycle, which would be required if using eents.
2316 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2317 if let Err(ix) = self
2318 .change_bits
2319 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2320 {
2321 self.change_bits.insert(ix, bit);
2322 }
2323 }
2324
2325 /// Set the change bit for all "listeners".
2326 fn was_changed(&mut self) {
2327 self.change_bits.retain(|change_bit| {
2328 change_bit
2329 .upgrade()
2330 .inspect(|bit| {
2331 _ = bit.replace(true);
2332 })
2333 .is_some()
2334 });
2335 }
2336
2337 /// Starts a transaction, if one is not already in-progress. When undoing or
2338 /// redoing edits, all of the edits performed within a transaction are undone
2339 /// or redone together.
2340 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2341 self.start_transaction_at(Instant::now())
2342 }
2343
2344 /// Starts a transaction, providing the current time. Subsequent transactions
2345 /// that occur within a short period of time will be grouped together. This
2346 /// is controlled by the buffer's undo grouping duration.
2347 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2348 self.transaction_depth += 1;
2349 if self.was_dirty_before_starting_transaction.is_none() {
2350 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2351 }
2352 self.text.start_transaction_at(now)
2353 }
2354
2355 /// Terminates the current transaction, if this is the outermost transaction.
2356 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2357 self.end_transaction_at(Instant::now(), cx)
2358 }
2359
2360 /// Terminates the current transaction, providing the current time. Subsequent transactions
2361 /// that occur within a short period of time will be grouped together. This
2362 /// is controlled by the buffer's undo grouping duration.
2363 pub fn end_transaction_at(
2364 &mut self,
2365 now: Instant,
2366 cx: &mut Context<Self>,
2367 ) -> Option<TransactionId> {
2368 assert!(self.transaction_depth > 0);
2369 self.transaction_depth -= 1;
2370 let was_dirty = if self.transaction_depth == 0 {
2371 self.was_dirty_before_starting_transaction.take().unwrap()
2372 } else {
2373 false
2374 };
2375 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2376 self.did_edit(&start_version, was_dirty, cx);
2377 Some(transaction_id)
2378 } else {
2379 None
2380 }
2381 }
2382
2383 /// Manually add a transaction to the buffer's undo history.
2384 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2385 self.text.push_transaction(transaction, now);
2386 }
2387
2388 /// Differs from `push_transaction` in that it does not clear the redo
2389 /// stack. Intended to be used to create a parent transaction to merge
2390 /// potential child transactions into.
2391 ///
2392 /// The caller is responsible for removing it from the undo history using
2393 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2394 /// are merged into this transaction, the caller is responsible for ensuring
2395 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2396 /// cleared is to create transactions with the usual `start_transaction` and
2397 /// `end_transaction` methods and merging the resulting transactions into
2398 /// the transaction created by this method
2399 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2400 self.text.push_empty_transaction(now)
2401 }
2402
2403 /// Prevent the last transaction from being grouped with any subsequent transactions,
2404 /// even if they occur with the buffer's undo grouping duration.
2405 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2406 self.text.finalize_last_transaction()
2407 }
2408
2409 /// Manually group all changes since a given transaction.
2410 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2411 self.text.group_until_transaction(transaction_id);
2412 }
2413
2414 /// Manually remove a transaction from the buffer's undo history
2415 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2416 self.text.forget_transaction(transaction_id)
2417 }
2418
2419 /// Retrieve a transaction from the buffer's undo history
2420 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2421 self.text.get_transaction(transaction_id)
2422 }
2423
2424 /// Manually merge two transactions in the buffer's undo history.
2425 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2426 self.text.merge_transactions(transaction, destination);
2427 }
2428
2429 /// Waits for the buffer to receive operations with the given timestamps.
2430 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2431 &mut self,
2432 edit_ids: It,
2433 ) -> impl Future<Output = Result<()>> + use<It> {
2434 self.text.wait_for_edits(edit_ids)
2435 }
2436
2437 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2438 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2439 &mut self,
2440 anchors: It,
2441 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2442 self.text.wait_for_anchors(anchors)
2443 }
2444
2445 /// Waits for the buffer to receive operations up to the given version.
2446 pub fn wait_for_version(
2447 &mut self,
2448 version: clock::Global,
2449 ) -> impl Future<Output = Result<()>> + use<> {
2450 self.text.wait_for_version(version)
2451 }
2452
2453 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2454 /// [`Buffer::wait_for_version`] to resolve with an error.
2455 pub fn give_up_waiting(&mut self) {
2456 self.text.give_up_waiting();
2457 }
2458
2459 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2460 let mut rx = None;
2461 if !self.autoindent_requests.is_empty() {
2462 let channel = oneshot::channel();
2463 self.wait_for_autoindent_txs.push(channel.0);
2464 rx = Some(channel.1);
2465 }
2466 rx
2467 }
2468
2469 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2470 pub fn set_active_selections(
2471 &mut self,
2472 selections: Arc<[Selection<Anchor>]>,
2473 line_mode: bool,
2474 cursor_shape: CursorShape,
2475 cx: &mut Context<Self>,
2476 ) {
2477 let lamport_timestamp = self.text.lamport_clock.tick();
2478 self.remote_selections.insert(
2479 self.text.replica_id(),
2480 SelectionSet {
2481 selections: selections.clone(),
2482 lamport_timestamp,
2483 line_mode,
2484 cursor_shape,
2485 },
2486 );
2487 self.send_operation(
2488 Operation::UpdateSelections {
2489 selections,
2490 line_mode,
2491 lamport_timestamp,
2492 cursor_shape,
2493 },
2494 true,
2495 cx,
2496 );
2497 self.non_text_state_update_count += 1;
2498 cx.notify();
2499 }
2500
2501 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2502 /// this replica.
2503 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2504 if self
2505 .remote_selections
2506 .get(&self.text.replica_id())
2507 .is_none_or(|set| !set.selections.is_empty())
2508 {
2509 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2510 }
2511 }
2512
2513 pub fn set_agent_selections(
2514 &mut self,
2515 selections: Arc<[Selection<Anchor>]>,
2516 line_mode: bool,
2517 cursor_shape: CursorShape,
2518 cx: &mut Context<Self>,
2519 ) {
2520 let lamport_timestamp = self.text.lamport_clock.tick();
2521 self.remote_selections.insert(
2522 ReplicaId::AGENT,
2523 SelectionSet {
2524 selections,
2525 lamport_timestamp,
2526 line_mode,
2527 cursor_shape,
2528 },
2529 );
2530 self.non_text_state_update_count += 1;
2531 cx.notify();
2532 }
2533
2534 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2535 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2536 }
2537
2538 /// Replaces the buffer's entire text.
2539 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2540 where
2541 T: Into<Arc<str>>,
2542 {
2543 self.autoindent_requests.clear();
2544 self.edit([(0..self.len(), text)], None, cx)
2545 }
2546
2547 /// Appends the given text to the end of the buffer.
2548 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2549 where
2550 T: Into<Arc<str>>,
2551 {
2552 self.edit([(self.len()..self.len(), text)], None, cx)
2553 }
2554
2555 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2556 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2557 ///
2558 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2559 /// request for the edited ranges, which will be processed when the buffer finishes
2560 /// parsing.
2561 ///
2562 /// Parsing takes place at the end of a transaction, and may compute synchronously
2563 /// or asynchronously, depending on the changes.
2564 pub fn edit<I, S, T>(
2565 &mut self,
2566 edits_iter: I,
2567 autoindent_mode: Option<AutoindentMode>,
2568 cx: &mut Context<Self>,
2569 ) -> Option<clock::Lamport>
2570 where
2571 I: IntoIterator<Item = (Range<S>, T)>,
2572 S: ToOffset,
2573 T: Into<Arc<str>>,
2574 {
2575 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2576 }
2577
2578 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2579 pub fn edit_non_coalesce<I, S, T>(
2580 &mut self,
2581 edits_iter: I,
2582 autoindent_mode: Option<AutoindentMode>,
2583 cx: &mut Context<Self>,
2584 ) -> Option<clock::Lamport>
2585 where
2586 I: IntoIterator<Item = (Range<S>, T)>,
2587 S: ToOffset,
2588 T: Into<Arc<str>>,
2589 {
2590 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2591 }
2592
2593 fn edit_internal<I, S, T>(
2594 &mut self,
2595 edits_iter: I,
2596 autoindent_mode: Option<AutoindentMode>,
2597 coalesce_adjacent: bool,
2598 cx: &mut Context<Self>,
2599 ) -> Option<clock::Lamport>
2600 where
2601 I: IntoIterator<Item = (Range<S>, T)>,
2602 S: ToOffset,
2603 T: Into<Arc<str>>,
2604 {
2605 // Skip invalid edits and coalesce contiguous ones.
2606 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2607
2608 for (range, new_text) in edits_iter {
2609 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2610
2611 if range.start > range.end {
2612 mem::swap(&mut range.start, &mut range.end);
2613 }
2614 let new_text = new_text.into();
2615 if !new_text.is_empty() || !range.is_empty() {
2616 let prev_edit = edits.last_mut();
2617 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2618 if coalesce_adjacent {
2619 prev_range.end >= range.start
2620 } else {
2621 prev_range.end > range.start
2622 }
2623 });
2624
2625 if let Some((prev_range, prev_text)) = prev_edit
2626 && should_coalesce
2627 {
2628 prev_range.end = cmp::max(prev_range.end, range.end);
2629 *prev_text = format!("{prev_text}{new_text}").into();
2630 } else {
2631 edits.push((range, new_text));
2632 }
2633 }
2634 }
2635 if edits.is_empty() {
2636 return None;
2637 }
2638
2639 self.start_transaction();
2640 self.pending_autoindent.take();
2641 let autoindent_request = autoindent_mode
2642 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2643
2644 let edit_operation = self.text.edit(edits.iter().cloned());
2645 let edit_id = edit_operation.timestamp();
2646
2647 if let Some((before_edit, mode)) = autoindent_request {
2648 let mut delta = 0isize;
2649 let mut previous_setting = None;
2650 let entries: Vec<_> = edits
2651 .into_iter()
2652 .enumerate()
2653 .zip(&edit_operation.as_edit().unwrap().new_text)
2654 .filter(|((_, (range, _)), _)| {
2655 let language = before_edit.language_at(range.start);
2656 let language_id = language.map(|l| l.id());
2657 if let Some((cached_language_id, auto_indent)) = previous_setting
2658 && cached_language_id == language_id
2659 {
2660 auto_indent
2661 } else {
2662 // The auto-indent setting is not present in editorconfigs, hence
2663 // we can avoid passing the file here.
2664 let auto_indent =
2665 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2666 previous_setting = Some((language_id, auto_indent));
2667 auto_indent
2668 }
2669 })
2670 .map(|((ix, (range, _)), new_text)| {
2671 let new_text_length = new_text.len();
2672 let old_start = range.start.to_point(&before_edit);
2673 let new_start = (delta + range.start as isize) as usize;
2674 let range_len = range.end - range.start;
2675 delta += new_text_length as isize - range_len as isize;
2676
2677 // Decide what range of the insertion to auto-indent, and whether
2678 // the first line of the insertion should be considered a newly-inserted line
2679 // or an edit to an existing line.
2680 let mut range_of_insertion_to_indent = 0..new_text_length;
2681 let mut first_line_is_new = true;
2682
2683 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2684 let old_line_end = before_edit.line_len(old_start.row);
2685
2686 if old_start.column > old_line_start {
2687 first_line_is_new = false;
2688 }
2689
2690 if !new_text.contains('\n')
2691 && (old_start.column + (range_len as u32) < old_line_end
2692 || old_line_end == old_line_start)
2693 {
2694 first_line_is_new = false;
2695 }
2696
2697 // When inserting text starting with a newline, avoid auto-indenting the
2698 // previous line.
2699 if new_text.starts_with('\n') {
2700 range_of_insertion_to_indent.start += 1;
2701 first_line_is_new = true;
2702 }
2703
2704 let mut original_indent_column = None;
2705 if let AutoindentMode::Block {
2706 original_indent_columns,
2707 } = &mode
2708 {
2709 original_indent_column = Some(if new_text.starts_with('\n') {
2710 indent_size_for_text(
2711 new_text[range_of_insertion_to_indent.clone()].chars(),
2712 )
2713 .len
2714 } else {
2715 original_indent_columns
2716 .get(ix)
2717 .copied()
2718 .flatten()
2719 .unwrap_or_else(|| {
2720 indent_size_for_text(
2721 new_text[range_of_insertion_to_indent.clone()].chars(),
2722 )
2723 .len
2724 })
2725 });
2726
2727 // Avoid auto-indenting the line after the edit.
2728 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2729 range_of_insertion_to_indent.end -= 1;
2730 }
2731 }
2732
2733 AutoindentRequestEntry {
2734 original_indent_column,
2735 old_row: if first_line_is_new {
2736 None
2737 } else {
2738 Some(old_start.row)
2739 },
2740 indent_size: before_edit.language_indent_size_at(range.start, cx),
2741 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2742 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2743 }
2744 })
2745 .collect();
2746
2747 if !entries.is_empty() {
2748 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2749 before_edit,
2750 entries,
2751 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2752 ignore_empty_lines: false,
2753 }));
2754 }
2755 }
2756
2757 self.end_transaction(cx);
2758 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2759 Some(edit_id)
2760 }
2761
2762 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2763 self.was_changed();
2764
2765 if self.edits_since::<usize>(old_version).next().is_none() {
2766 return;
2767 }
2768
2769 self.reparse(cx, true);
2770 cx.emit(BufferEvent::Edited);
2771 if was_dirty != self.is_dirty() {
2772 cx.emit(BufferEvent::DirtyChanged);
2773 }
2774 cx.notify();
2775 }
2776
2777 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2778 where
2779 I: IntoIterator<Item = Range<T>>,
2780 T: ToOffset + Copy,
2781 {
2782 let before_edit = self.snapshot();
2783 let entries = ranges
2784 .into_iter()
2785 .map(|range| AutoindentRequestEntry {
2786 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2787 old_row: None,
2788 indent_size: before_edit.language_indent_size_at(range.start, cx),
2789 original_indent_column: None,
2790 })
2791 .collect();
2792 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2793 before_edit,
2794 entries,
2795 is_block_mode: false,
2796 ignore_empty_lines: true,
2797 }));
2798 self.request_autoindent(cx, Duration::from_micros(300));
2799 }
2800
2801 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2802 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2803 pub fn insert_empty_line(
2804 &mut self,
2805 position: impl ToPoint,
2806 space_above: bool,
2807 space_below: bool,
2808 cx: &mut Context<Self>,
2809 ) -> Point {
2810 let mut position = position.to_point(self);
2811
2812 self.start_transaction();
2813
2814 self.edit(
2815 [(position..position, "\n")],
2816 Some(AutoindentMode::EachLine),
2817 cx,
2818 );
2819
2820 if position.column > 0 {
2821 position += Point::new(1, 0);
2822 }
2823
2824 if !self.is_line_blank(position.row) {
2825 self.edit(
2826 [(position..position, "\n")],
2827 Some(AutoindentMode::EachLine),
2828 cx,
2829 );
2830 }
2831
2832 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2833 self.edit(
2834 [(position..position, "\n")],
2835 Some(AutoindentMode::EachLine),
2836 cx,
2837 );
2838 position.row += 1;
2839 }
2840
2841 if space_below
2842 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2843 {
2844 self.edit(
2845 [(position..position, "\n")],
2846 Some(AutoindentMode::EachLine),
2847 cx,
2848 );
2849 }
2850
2851 self.end_transaction(cx);
2852
2853 position
2854 }
2855
2856 /// Applies the given remote operations to the buffer.
2857 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2858 self.pending_autoindent.take();
2859 let was_dirty = self.is_dirty();
2860 let old_version = self.version.clone();
2861 let mut deferred_ops = Vec::new();
2862 let buffer_ops = ops
2863 .into_iter()
2864 .filter_map(|op| match op {
2865 Operation::Buffer(op) => Some(op),
2866 _ => {
2867 if self.can_apply_op(&op) {
2868 self.apply_op(op, cx);
2869 } else {
2870 deferred_ops.push(op);
2871 }
2872 None
2873 }
2874 })
2875 .collect::<Vec<_>>();
2876 for operation in buffer_ops.iter() {
2877 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2878 }
2879 self.text.apply_ops(buffer_ops);
2880 self.deferred_ops.insert(deferred_ops);
2881 self.flush_deferred_ops(cx);
2882 self.did_edit(&old_version, was_dirty, cx);
2883 // Notify independently of whether the buffer was edited as the operations could include a
2884 // selection update.
2885 cx.notify();
2886 }
2887
2888 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2889 let mut deferred_ops = Vec::new();
2890 for op in self.deferred_ops.drain().iter().cloned() {
2891 if self.can_apply_op(&op) {
2892 self.apply_op(op, cx);
2893 } else {
2894 deferred_ops.push(op);
2895 }
2896 }
2897 self.deferred_ops.insert(deferred_ops);
2898 }
2899
2900 pub fn has_deferred_ops(&self) -> bool {
2901 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2902 }
2903
2904 fn can_apply_op(&self, operation: &Operation) -> bool {
2905 match operation {
2906 Operation::Buffer(_) => {
2907 unreachable!("buffer operations should never be applied at this layer")
2908 }
2909 Operation::UpdateDiagnostics {
2910 diagnostics: diagnostic_set,
2911 ..
2912 } => diagnostic_set.iter().all(|diagnostic| {
2913 self.text.can_resolve(&diagnostic.range.start)
2914 && self.text.can_resolve(&diagnostic.range.end)
2915 }),
2916 Operation::UpdateSelections { selections, .. } => selections
2917 .iter()
2918 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2919 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2920 }
2921 }
2922
2923 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2924 match operation {
2925 Operation::Buffer(_) => {
2926 unreachable!("buffer operations should never be applied at this layer")
2927 }
2928 Operation::UpdateDiagnostics {
2929 server_id,
2930 diagnostics: diagnostic_set,
2931 lamport_timestamp,
2932 } => {
2933 let snapshot = self.snapshot();
2934 self.apply_diagnostic_update(
2935 server_id,
2936 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2937 lamport_timestamp,
2938 cx,
2939 );
2940 }
2941 Operation::UpdateSelections {
2942 selections,
2943 lamport_timestamp,
2944 line_mode,
2945 cursor_shape,
2946 } => {
2947 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2948 && set.lamport_timestamp > lamport_timestamp
2949 {
2950 return;
2951 }
2952
2953 self.remote_selections.insert(
2954 lamport_timestamp.replica_id,
2955 SelectionSet {
2956 selections,
2957 lamport_timestamp,
2958 line_mode,
2959 cursor_shape,
2960 },
2961 );
2962 self.text.lamport_clock.observe(lamport_timestamp);
2963 self.non_text_state_update_count += 1;
2964 }
2965 Operation::UpdateCompletionTriggers {
2966 triggers,
2967 lamport_timestamp,
2968 server_id,
2969 } => {
2970 if triggers.is_empty() {
2971 self.completion_triggers_per_language_server
2972 .remove(&server_id);
2973 self.completion_triggers = self
2974 .completion_triggers_per_language_server
2975 .values()
2976 .flat_map(|triggers| triggers.iter().cloned())
2977 .collect();
2978 } else {
2979 self.completion_triggers_per_language_server
2980 .insert(server_id, triggers.iter().cloned().collect());
2981 self.completion_triggers.extend(triggers);
2982 }
2983 self.text.lamport_clock.observe(lamport_timestamp);
2984 }
2985 Operation::UpdateLineEnding {
2986 line_ending,
2987 lamport_timestamp,
2988 } => {
2989 self.text.set_line_ending(line_ending);
2990 self.text.lamport_clock.observe(lamport_timestamp);
2991 }
2992 }
2993 }
2994
2995 fn apply_diagnostic_update(
2996 &mut self,
2997 server_id: LanguageServerId,
2998 diagnostics: DiagnosticSet,
2999 lamport_timestamp: clock::Lamport,
3000 cx: &mut Context<Self>,
3001 ) {
3002 if lamport_timestamp > self.diagnostics_timestamp {
3003 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3004 if diagnostics.is_empty() {
3005 if let Ok(ix) = ix {
3006 self.diagnostics.remove(ix);
3007 }
3008 } else {
3009 match ix {
3010 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3011 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3012 };
3013 }
3014 self.diagnostics_timestamp = lamport_timestamp;
3015 self.non_text_state_update_count += 1;
3016 self.text.lamport_clock.observe(lamport_timestamp);
3017 cx.notify();
3018 cx.emit(BufferEvent::DiagnosticsUpdated);
3019 }
3020 }
3021
3022 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3023 self.was_changed();
3024 cx.emit(BufferEvent::Operation {
3025 operation,
3026 is_local,
3027 });
3028 }
3029
3030 /// Removes the selections for a given peer.
3031 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3032 self.remote_selections.remove(&replica_id);
3033 cx.notify();
3034 }
3035
3036 /// Undoes the most recent transaction.
3037 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3038 let was_dirty = self.is_dirty();
3039 let old_version = self.version.clone();
3040
3041 if let Some((transaction_id, operation)) = self.text.undo() {
3042 self.send_operation(Operation::Buffer(operation), true, cx);
3043 self.did_edit(&old_version, was_dirty, cx);
3044 Some(transaction_id)
3045 } else {
3046 None
3047 }
3048 }
3049
3050 /// Manually undoes a specific transaction in the buffer's undo history.
3051 pub fn undo_transaction(
3052 &mut self,
3053 transaction_id: TransactionId,
3054 cx: &mut Context<Self>,
3055 ) -> bool {
3056 let was_dirty = self.is_dirty();
3057 let old_version = self.version.clone();
3058 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3059 self.send_operation(Operation::Buffer(operation), true, cx);
3060 self.did_edit(&old_version, was_dirty, cx);
3061 true
3062 } else {
3063 false
3064 }
3065 }
3066
3067 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3068 pub fn undo_to_transaction(
3069 &mut self,
3070 transaction_id: TransactionId,
3071 cx: &mut Context<Self>,
3072 ) -> bool {
3073 let was_dirty = self.is_dirty();
3074 let old_version = self.version.clone();
3075
3076 let operations = self.text.undo_to_transaction(transaction_id);
3077 let undone = !operations.is_empty();
3078 for operation in operations {
3079 self.send_operation(Operation::Buffer(operation), true, cx);
3080 }
3081 if undone {
3082 self.did_edit(&old_version, was_dirty, cx)
3083 }
3084 undone
3085 }
3086
3087 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3088 let was_dirty = self.is_dirty();
3089 let operation = self.text.undo_operations(counts);
3090 let old_version = self.version.clone();
3091 self.send_operation(Operation::Buffer(operation), true, cx);
3092 self.did_edit(&old_version, was_dirty, cx);
3093 }
3094
3095 /// Manually redoes a specific transaction in the buffer's redo history.
3096 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3097 let was_dirty = self.is_dirty();
3098 let old_version = self.version.clone();
3099
3100 if let Some((transaction_id, operation)) = self.text.redo() {
3101 self.send_operation(Operation::Buffer(operation), true, cx);
3102 self.did_edit(&old_version, was_dirty, cx);
3103 Some(transaction_id)
3104 } else {
3105 None
3106 }
3107 }
3108
3109 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3110 pub fn redo_to_transaction(
3111 &mut self,
3112 transaction_id: TransactionId,
3113 cx: &mut Context<Self>,
3114 ) -> bool {
3115 let was_dirty = self.is_dirty();
3116 let old_version = self.version.clone();
3117
3118 let operations = self.text.redo_to_transaction(transaction_id);
3119 let redone = !operations.is_empty();
3120 for operation in operations {
3121 self.send_operation(Operation::Buffer(operation), true, cx);
3122 }
3123 if redone {
3124 self.did_edit(&old_version, was_dirty, cx)
3125 }
3126 redone
3127 }
3128
3129 /// Override current completion triggers with the user-provided completion triggers.
3130 pub fn set_completion_triggers(
3131 &mut self,
3132 server_id: LanguageServerId,
3133 triggers: BTreeSet<String>,
3134 cx: &mut Context<Self>,
3135 ) {
3136 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3137 if triggers.is_empty() {
3138 self.completion_triggers_per_language_server
3139 .remove(&server_id);
3140 self.completion_triggers = self
3141 .completion_triggers_per_language_server
3142 .values()
3143 .flat_map(|triggers| triggers.iter().cloned())
3144 .collect();
3145 } else {
3146 self.completion_triggers_per_language_server
3147 .insert(server_id, triggers.clone());
3148 self.completion_triggers.extend(triggers.iter().cloned());
3149 }
3150 self.send_operation(
3151 Operation::UpdateCompletionTriggers {
3152 triggers: triggers.into_iter().collect(),
3153 lamport_timestamp: self.completion_triggers_timestamp,
3154 server_id,
3155 },
3156 true,
3157 cx,
3158 );
3159 cx.notify();
3160 }
3161
3162 /// Returns a list of strings which trigger a completion menu for this language.
3163 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3164 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3165 &self.completion_triggers
3166 }
3167
3168 /// Call this directly after performing edits to prevent the preview tab
3169 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3170 /// to return false until there are additional edits.
3171 pub fn refresh_preview(&mut self) {
3172 self.preview_version = self.version.clone();
3173 }
3174
3175 /// Whether we should preserve the preview status of a tab containing this buffer.
3176 pub fn preserve_preview(&self) -> bool {
3177 !self.has_edits_since(&self.preview_version)
3178 }
3179}
3180
3181#[doc(hidden)]
3182#[cfg(any(test, feature = "test-support"))]
3183impl Buffer {
3184 pub fn edit_via_marked_text(
3185 &mut self,
3186 marked_string: &str,
3187 autoindent_mode: Option<AutoindentMode>,
3188 cx: &mut Context<Self>,
3189 ) {
3190 let edits = self.edits_for_marked_text(marked_string);
3191 self.edit(edits, autoindent_mode, cx);
3192 }
3193
3194 pub fn set_group_interval(&mut self, group_interval: Duration) {
3195 self.text.set_group_interval(group_interval);
3196 }
3197
3198 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3199 where
3200 T: rand::Rng,
3201 {
3202 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3203 let mut last_end = None;
3204 for _ in 0..old_range_count {
3205 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3206 break;
3207 }
3208
3209 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3210 let mut range = self.random_byte_range(new_start, rng);
3211 if rng.random_bool(0.2) {
3212 mem::swap(&mut range.start, &mut range.end);
3213 }
3214 last_end = Some(range.end);
3215
3216 let new_text_len = rng.random_range(0..10);
3217 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3218 new_text = new_text.to_uppercase();
3219
3220 edits.push((range, new_text));
3221 }
3222 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3223 self.edit(edits, None, cx);
3224 }
3225
3226 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3227 let was_dirty = self.is_dirty();
3228 let old_version = self.version.clone();
3229
3230 let ops = self.text.randomly_undo_redo(rng);
3231 if !ops.is_empty() {
3232 for op in ops {
3233 self.send_operation(Operation::Buffer(op), true, cx);
3234 self.did_edit(&old_version, was_dirty, cx);
3235 }
3236 }
3237 }
3238}
3239
3240impl EventEmitter<BufferEvent> for Buffer {}
3241
3242impl Deref for Buffer {
3243 type Target = TextBuffer;
3244
3245 fn deref(&self) -> &Self::Target {
3246 &self.text
3247 }
3248}
3249
3250impl BufferSnapshot {
3251 /// Returns [`IndentSize`] for a given line that respects user settings and
3252 /// language preferences.
3253 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3254 indent_size_for_line(self, row)
3255 }
3256
3257 /// Returns [`IndentSize`] for a given position that respects user settings
3258 /// and language preferences.
3259 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3260 let settings = language_settings(
3261 self.language_at(position).map(|l| l.name()),
3262 self.file(),
3263 cx,
3264 );
3265 if settings.hard_tabs {
3266 IndentSize::tab()
3267 } else {
3268 IndentSize::spaces(settings.tab_size.get())
3269 }
3270 }
3271
3272 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3273 /// is passed in as `single_indent_size`.
3274 pub fn suggested_indents(
3275 &self,
3276 rows: impl Iterator<Item = u32>,
3277 single_indent_size: IndentSize,
3278 ) -> BTreeMap<u32, IndentSize> {
3279 let mut result = BTreeMap::new();
3280
3281 for row_range in contiguous_ranges(rows, 10) {
3282 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3283 Some(suggestions) => suggestions,
3284 _ => break,
3285 };
3286
3287 for (row, suggestion) in row_range.zip(suggestions) {
3288 let indent_size = if let Some(suggestion) = suggestion {
3289 result
3290 .get(&suggestion.basis_row)
3291 .copied()
3292 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3293 .with_delta(suggestion.delta, single_indent_size)
3294 } else {
3295 self.indent_size_for_line(row)
3296 };
3297
3298 result.insert(row, indent_size);
3299 }
3300 }
3301
3302 result
3303 }
3304
3305 fn suggest_autoindents(
3306 &self,
3307 row_range: Range<u32>,
3308 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3309 let config = &self.language.as_ref()?.config;
3310 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3311
3312 #[derive(Debug, Clone)]
3313 struct StartPosition {
3314 start: Point,
3315 suffix: SharedString,
3316 language: Arc<Language>,
3317 }
3318
3319 // Find the suggested indentation ranges based on the syntax tree.
3320 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3321 let end = Point::new(row_range.end, 0);
3322 let range = (start..end).to_offset(&self.text);
3323 let mut matches = self.syntax.matches_with_options(
3324 range.clone(),
3325 &self.text,
3326 TreeSitterOptions {
3327 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3328 max_start_depth: None,
3329 },
3330 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3331 );
3332 let indent_configs = matches
3333 .grammars()
3334 .iter()
3335 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3336 .collect::<Vec<_>>();
3337
3338 let mut indent_ranges = Vec::<Range<Point>>::new();
3339 let mut start_positions = Vec::<StartPosition>::new();
3340 let mut outdent_positions = Vec::<Point>::new();
3341 while let Some(mat) = matches.peek() {
3342 let mut start: Option<Point> = None;
3343 let mut end: Option<Point> = None;
3344
3345 let config = indent_configs[mat.grammar_index];
3346 for capture in mat.captures {
3347 if capture.index == config.indent_capture_ix {
3348 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3349 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3350 } else if Some(capture.index) == config.start_capture_ix {
3351 start = Some(Point::from_ts_point(capture.node.end_position()));
3352 } else if Some(capture.index) == config.end_capture_ix {
3353 end = Some(Point::from_ts_point(capture.node.start_position()));
3354 } else if Some(capture.index) == config.outdent_capture_ix {
3355 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3356 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3357 start_positions.push(StartPosition {
3358 start: Point::from_ts_point(capture.node.start_position()),
3359 suffix: suffix.clone(),
3360 language: mat.language.clone(),
3361 });
3362 }
3363 }
3364
3365 matches.advance();
3366 if let Some((start, end)) = start.zip(end) {
3367 if start.row == end.row {
3368 continue;
3369 }
3370 let range = start..end;
3371 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3372 Err(ix) => indent_ranges.insert(ix, range),
3373 Ok(ix) => {
3374 let prev_range = &mut indent_ranges[ix];
3375 prev_range.end = prev_range.end.max(range.end);
3376 }
3377 }
3378 }
3379 }
3380
3381 let mut error_ranges = Vec::<Range<Point>>::new();
3382 let mut matches = self
3383 .syntax
3384 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3385 while let Some(mat) = matches.peek() {
3386 let node = mat.captures[0].node;
3387 let start = Point::from_ts_point(node.start_position());
3388 let end = Point::from_ts_point(node.end_position());
3389 let range = start..end;
3390 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3391 Ok(ix) | Err(ix) => ix,
3392 };
3393 let mut end_ix = ix;
3394 while let Some(existing_range) = error_ranges.get(end_ix) {
3395 if existing_range.end < end {
3396 end_ix += 1;
3397 } else {
3398 break;
3399 }
3400 }
3401 error_ranges.splice(ix..end_ix, [range]);
3402 matches.advance();
3403 }
3404
3405 outdent_positions.sort();
3406 for outdent_position in outdent_positions {
3407 // find the innermost indent range containing this outdent_position
3408 // set its end to the outdent position
3409 if let Some(range_to_truncate) = indent_ranges
3410 .iter_mut()
3411 .rfind(|indent_range| indent_range.contains(&outdent_position))
3412 {
3413 range_to_truncate.end = outdent_position;
3414 }
3415 }
3416
3417 start_positions.sort_by_key(|b| b.start);
3418
3419 // Find the suggested indentation increases and decreased based on regexes.
3420 let mut regex_outdent_map = HashMap::default();
3421 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3422 let mut start_positions_iter = start_positions.iter().peekable();
3423
3424 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3425 self.for_each_line(
3426 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3427 ..Point::new(row_range.end, 0),
3428 |row, line| {
3429 let indent_len = self.indent_size_for_line(row).len;
3430 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3431 let row_language_config = row_language
3432 .as_ref()
3433 .map(|lang| lang.config())
3434 .unwrap_or(config);
3435
3436 if row_language_config
3437 .decrease_indent_pattern
3438 .as_ref()
3439 .is_some_and(|regex| regex.is_match(line))
3440 {
3441 indent_change_rows.push((row, Ordering::Less));
3442 }
3443 if row_language_config
3444 .increase_indent_pattern
3445 .as_ref()
3446 .is_some_and(|regex| regex.is_match(line))
3447 {
3448 indent_change_rows.push((row + 1, Ordering::Greater));
3449 }
3450 while let Some(pos) = start_positions_iter.peek() {
3451 if pos.start.row < row {
3452 let pos = start_positions_iter.next().unwrap().clone();
3453 last_seen_suffix
3454 .entry(pos.suffix.to_string())
3455 .or_default()
3456 .push(pos);
3457 } else {
3458 break;
3459 }
3460 }
3461 for rule in &row_language_config.decrease_indent_patterns {
3462 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3463 let row_start_column = self.indent_size_for_line(row).len;
3464 let basis_row = rule
3465 .valid_after
3466 .iter()
3467 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3468 .flatten()
3469 .filter(|pos| {
3470 row_language
3471 .as_ref()
3472 .or(self.language.as_ref())
3473 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3474 })
3475 .filter(|pos| pos.start.column <= row_start_column)
3476 .max_by_key(|pos| pos.start.row);
3477 if let Some(outdent_to) = basis_row {
3478 regex_outdent_map.insert(row, outdent_to.start.row);
3479 }
3480 break;
3481 }
3482 }
3483 },
3484 );
3485
3486 let mut indent_changes = indent_change_rows.into_iter().peekable();
3487 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3488 prev_non_blank_row.unwrap_or(0)
3489 } else {
3490 row_range.start.saturating_sub(1)
3491 };
3492
3493 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3494 Some(row_range.map(move |row| {
3495 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3496
3497 let mut indent_from_prev_row = false;
3498 let mut outdent_from_prev_row = false;
3499 let mut outdent_to_row = u32::MAX;
3500 let mut from_regex = false;
3501
3502 while let Some((indent_row, delta)) = indent_changes.peek() {
3503 match indent_row.cmp(&row) {
3504 Ordering::Equal => match delta {
3505 Ordering::Less => {
3506 from_regex = true;
3507 outdent_from_prev_row = true
3508 }
3509 Ordering::Greater => {
3510 indent_from_prev_row = true;
3511 from_regex = true
3512 }
3513 _ => {}
3514 },
3515
3516 Ordering::Greater => break,
3517 Ordering::Less => {}
3518 }
3519
3520 indent_changes.next();
3521 }
3522
3523 for range in &indent_ranges {
3524 if range.start.row >= row {
3525 break;
3526 }
3527 if range.start.row == prev_row && range.end > row_start {
3528 indent_from_prev_row = true;
3529 }
3530 if range.end > prev_row_start && range.end <= row_start {
3531 outdent_to_row = outdent_to_row.min(range.start.row);
3532 }
3533 }
3534
3535 if let Some(basis_row) = regex_outdent_map.get(&row) {
3536 indent_from_prev_row = false;
3537 outdent_to_row = *basis_row;
3538 from_regex = true;
3539 }
3540
3541 let within_error = error_ranges
3542 .iter()
3543 .any(|e| e.start.row < row && e.end > row_start);
3544
3545 let suggestion = if outdent_to_row == prev_row
3546 || (outdent_from_prev_row && indent_from_prev_row)
3547 {
3548 Some(IndentSuggestion {
3549 basis_row: prev_row,
3550 delta: Ordering::Equal,
3551 within_error: within_error && !from_regex,
3552 })
3553 } else if indent_from_prev_row {
3554 Some(IndentSuggestion {
3555 basis_row: prev_row,
3556 delta: Ordering::Greater,
3557 within_error: within_error && !from_regex,
3558 })
3559 } else if outdent_to_row < prev_row {
3560 Some(IndentSuggestion {
3561 basis_row: outdent_to_row,
3562 delta: Ordering::Equal,
3563 within_error: within_error && !from_regex,
3564 })
3565 } else if outdent_from_prev_row {
3566 Some(IndentSuggestion {
3567 basis_row: prev_row,
3568 delta: Ordering::Less,
3569 within_error: within_error && !from_regex,
3570 })
3571 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3572 {
3573 Some(IndentSuggestion {
3574 basis_row: prev_row,
3575 delta: Ordering::Equal,
3576 within_error: within_error && !from_regex,
3577 })
3578 } else {
3579 None
3580 };
3581
3582 prev_row = row;
3583 prev_row_start = row_start;
3584 suggestion
3585 }))
3586 }
3587
3588 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3589 while row > 0 {
3590 row -= 1;
3591 if !self.is_line_blank(row) {
3592 return Some(row);
3593 }
3594 }
3595 None
3596 }
3597
3598 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3599 let captures = self.syntax.captures(range, &self.text, |grammar| {
3600 grammar
3601 .highlights_config
3602 .as_ref()
3603 .map(|config| &config.query)
3604 });
3605 let highlight_maps = captures
3606 .grammars()
3607 .iter()
3608 .map(|grammar| grammar.highlight_map())
3609 .collect();
3610 (captures, highlight_maps)
3611 }
3612
3613 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3614 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3615 /// returned in chunks where each chunk has a single syntax highlighting style and
3616 /// diagnostic status.
3617 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3618 let range = range.start.to_offset(self)..range.end.to_offset(self);
3619
3620 let mut syntax = None;
3621 if language_aware {
3622 syntax = Some(self.get_highlights(range.clone()));
3623 }
3624 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3625 let diagnostics = language_aware;
3626 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3627 }
3628
3629 pub fn highlighted_text_for_range<T: ToOffset>(
3630 &self,
3631 range: Range<T>,
3632 override_style: Option<HighlightStyle>,
3633 syntax_theme: &SyntaxTheme,
3634 ) -> HighlightedText {
3635 HighlightedText::from_buffer_range(
3636 range,
3637 &self.text,
3638 &self.syntax,
3639 override_style,
3640 syntax_theme,
3641 )
3642 }
3643
3644 /// Invokes the given callback for each line of text in the given range of the buffer.
3645 /// Uses callback to avoid allocating a string for each line.
3646 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3647 let mut line = String::new();
3648 let mut row = range.start.row;
3649 for chunk in self
3650 .as_rope()
3651 .chunks_in_range(range.to_offset(self))
3652 .chain(["\n"])
3653 {
3654 for (newline_ix, text) in chunk.split('\n').enumerate() {
3655 if newline_ix > 0 {
3656 callback(row, &line);
3657 row += 1;
3658 line.clear();
3659 }
3660 line.push_str(text);
3661 }
3662 }
3663 }
3664
3665 /// Iterates over every [`SyntaxLayer`] in the buffer.
3666 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3667 self.syntax_layers_for_range(0..self.len(), true)
3668 }
3669
3670 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3671 let offset = position.to_offset(self);
3672 self.syntax_layers_for_range(offset..offset, false)
3673 .filter(|l| {
3674 if let Some(ranges) = l.included_sub_ranges {
3675 ranges.iter().any(|range| {
3676 let start = range.start.to_offset(self);
3677 start <= offset && {
3678 let end = range.end.to_offset(self);
3679 offset < end
3680 }
3681 })
3682 } else {
3683 l.node().start_byte() <= offset && l.node().end_byte() > offset
3684 }
3685 })
3686 .last()
3687 }
3688
3689 pub fn syntax_layers_for_range<D: ToOffset>(
3690 &self,
3691 range: Range<D>,
3692 include_hidden: bool,
3693 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3694 self.syntax
3695 .layers_for_range(range, &self.text, include_hidden)
3696 }
3697
3698 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3699 &self,
3700 range: Range<D>,
3701 ) -> Option<SyntaxLayer<'_>> {
3702 let range = range.to_offset(self);
3703 self.syntax
3704 .layers_for_range(range, &self.text, false)
3705 .max_by(|a, b| {
3706 if a.depth != b.depth {
3707 a.depth.cmp(&b.depth)
3708 } else if a.offset.0 != b.offset.0 {
3709 a.offset.0.cmp(&b.offset.0)
3710 } else {
3711 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3712 }
3713 })
3714 }
3715
3716 /// Returns the main [`Language`].
3717 pub fn language(&self) -> Option<&Arc<Language>> {
3718 self.language.as_ref()
3719 }
3720
3721 /// Returns the [`Language`] at the given location.
3722 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3723 self.syntax_layer_at(position)
3724 .map(|info| info.language)
3725 .or(self.language.as_ref())
3726 }
3727
3728 /// Returns the settings for the language at the given location.
3729 pub fn settings_at<'a, D: ToOffset>(
3730 &'a self,
3731 position: D,
3732 cx: &'a App,
3733 ) -> Cow<'a, LanguageSettings> {
3734 language_settings(
3735 self.language_at(position).map(|l| l.name()),
3736 self.file.as_ref(),
3737 cx,
3738 )
3739 }
3740
3741 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3742 CharClassifier::new(self.language_scope_at(point))
3743 }
3744
3745 /// Returns the [`LanguageScope`] at the given location.
3746 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3747 let offset = position.to_offset(self);
3748 let mut scope = None;
3749 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3750
3751 // Use the layer that has the smallest node intersecting the given point.
3752 for layer in self
3753 .syntax
3754 .layers_for_range(offset..offset, &self.text, false)
3755 {
3756 let mut cursor = layer.node().walk();
3757
3758 let mut range = None;
3759 loop {
3760 let child_range = cursor.node().byte_range();
3761 if !child_range.contains(&offset) {
3762 break;
3763 }
3764
3765 range = Some(child_range);
3766 if cursor.goto_first_child_for_byte(offset).is_none() {
3767 break;
3768 }
3769 }
3770
3771 if let Some(range) = range
3772 && smallest_range_and_depth.as_ref().is_none_or(
3773 |(smallest_range, smallest_range_depth)| {
3774 if layer.depth > *smallest_range_depth {
3775 true
3776 } else if layer.depth == *smallest_range_depth {
3777 range.len() < smallest_range.len()
3778 } else {
3779 false
3780 }
3781 },
3782 )
3783 {
3784 smallest_range_and_depth = Some((range, layer.depth));
3785 scope = Some(LanguageScope {
3786 language: layer.language.clone(),
3787 override_id: layer.override_id(offset, &self.text),
3788 });
3789 }
3790 }
3791
3792 scope.or_else(|| {
3793 self.language.clone().map(|language| LanguageScope {
3794 language,
3795 override_id: None,
3796 })
3797 })
3798 }
3799
3800 /// Returns a tuple of the range and character kind of the word
3801 /// surrounding the given position.
3802 pub fn surrounding_word<T: ToOffset>(
3803 &self,
3804 start: T,
3805 scope_context: Option<CharScopeContext>,
3806 ) -> (Range<usize>, Option<CharKind>) {
3807 let mut start = start.to_offset(self);
3808 let mut end = start;
3809 let mut next_chars = self.chars_at(start).take(128).peekable();
3810 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3811
3812 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3813 let word_kind = cmp::max(
3814 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3815 next_chars.peek().copied().map(|c| classifier.kind(c)),
3816 );
3817
3818 for ch in prev_chars {
3819 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3820 start -= ch.len_utf8();
3821 } else {
3822 break;
3823 }
3824 }
3825
3826 for ch in next_chars {
3827 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3828 end += ch.len_utf8();
3829 } else {
3830 break;
3831 }
3832 }
3833
3834 (start..end, word_kind)
3835 }
3836
3837 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3838 /// range. When `require_larger` is true, the node found must be larger than the query range.
3839 ///
3840 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3841 /// be moved to the root of the tree.
3842 fn goto_node_enclosing_range(
3843 cursor: &mut tree_sitter::TreeCursor,
3844 query_range: &Range<usize>,
3845 require_larger: bool,
3846 ) -> bool {
3847 let mut ascending = false;
3848 loop {
3849 let mut range = cursor.node().byte_range();
3850 if query_range.is_empty() {
3851 // When the query range is empty and the current node starts after it, move to the
3852 // previous sibling to find the node the containing node.
3853 if range.start > query_range.start {
3854 cursor.goto_previous_sibling();
3855 range = cursor.node().byte_range();
3856 }
3857 } else {
3858 // When the query range is non-empty and the current node ends exactly at the start,
3859 // move to the next sibling to find a node that extends beyond the start.
3860 if range.end == query_range.start {
3861 cursor.goto_next_sibling();
3862 range = cursor.node().byte_range();
3863 }
3864 }
3865
3866 let encloses = range.contains_inclusive(query_range)
3867 && (!require_larger || range.len() > query_range.len());
3868 if !encloses {
3869 ascending = true;
3870 if !cursor.goto_parent() {
3871 return false;
3872 }
3873 continue;
3874 } else if ascending {
3875 return true;
3876 }
3877
3878 // Descend into the current node.
3879 if cursor
3880 .goto_first_child_for_byte(query_range.start)
3881 .is_none()
3882 {
3883 return true;
3884 }
3885 }
3886 }
3887
3888 pub fn syntax_ancestor<'a, T: ToOffset>(
3889 &'a self,
3890 range: Range<T>,
3891 ) -> Option<tree_sitter::Node<'a>> {
3892 let range = range.start.to_offset(self)..range.end.to_offset(self);
3893 let mut result: Option<tree_sitter::Node<'a>> = None;
3894 for layer in self
3895 .syntax
3896 .layers_for_range(range.clone(), &self.text, true)
3897 {
3898 let mut cursor = layer.node().walk();
3899
3900 // Find the node that both contains the range and is larger than it.
3901 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3902 continue;
3903 }
3904
3905 let left_node = cursor.node();
3906 let mut layer_result = left_node;
3907
3908 // For an empty range, try to find another node immediately to the right of the range.
3909 if left_node.end_byte() == range.start {
3910 let mut right_node = None;
3911 while !cursor.goto_next_sibling() {
3912 if !cursor.goto_parent() {
3913 break;
3914 }
3915 }
3916
3917 while cursor.node().start_byte() == range.start {
3918 right_node = Some(cursor.node());
3919 if !cursor.goto_first_child() {
3920 break;
3921 }
3922 }
3923
3924 // If there is a candidate node on both sides of the (empty) range, then
3925 // decide between the two by favoring a named node over an anonymous token.
3926 // If both nodes are the same in that regard, favor the right one.
3927 if let Some(right_node) = right_node
3928 && (right_node.is_named() || !left_node.is_named())
3929 {
3930 layer_result = right_node;
3931 }
3932 }
3933
3934 if let Some(previous_result) = &result
3935 && previous_result.byte_range().len() < layer_result.byte_range().len()
3936 {
3937 continue;
3938 }
3939 result = Some(layer_result);
3940 }
3941
3942 result
3943 }
3944
3945 /// Find the previous sibling syntax node at the given range.
3946 ///
3947 /// This function locates the syntax node that precedes the node containing
3948 /// the given range. It searches hierarchically by:
3949 /// 1. Finding the node that contains the given range
3950 /// 2. Looking for the previous sibling at the same tree level
3951 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3952 ///
3953 /// Returns `None` if there is no previous sibling at any ancestor level.
3954 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3955 &'a self,
3956 range: Range<T>,
3957 ) -> Option<tree_sitter::Node<'a>> {
3958 let range = range.start.to_offset(self)..range.end.to_offset(self);
3959 let mut result: Option<tree_sitter::Node<'a>> = None;
3960
3961 for layer in self
3962 .syntax
3963 .layers_for_range(range.clone(), &self.text, true)
3964 {
3965 let mut cursor = layer.node().walk();
3966
3967 // Find the node that contains the range
3968 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3969 continue;
3970 }
3971
3972 // Look for the previous sibling, moving up ancestor levels if needed
3973 loop {
3974 if cursor.goto_previous_sibling() {
3975 let layer_result = cursor.node();
3976
3977 if let Some(previous_result) = &result {
3978 if previous_result.byte_range().end < layer_result.byte_range().end {
3979 continue;
3980 }
3981 }
3982 result = Some(layer_result);
3983 break;
3984 }
3985
3986 // No sibling found at this level, try moving up to parent
3987 if !cursor.goto_parent() {
3988 break;
3989 }
3990 }
3991 }
3992
3993 result
3994 }
3995
3996 /// Find the next sibling syntax node at the given range.
3997 ///
3998 /// This function locates the syntax node that follows the node containing
3999 /// the given range. It searches hierarchically by:
4000 /// 1. Finding the node that contains the given range
4001 /// 2. Looking for the next sibling at the same tree level
4002 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4003 ///
4004 /// Returns `None` if there is no next sibling at any ancestor level.
4005 pub fn syntax_next_sibling<'a, T: ToOffset>(
4006 &'a self,
4007 range: Range<T>,
4008 ) -> Option<tree_sitter::Node<'a>> {
4009 let range = range.start.to_offset(self)..range.end.to_offset(self);
4010 let mut result: Option<tree_sitter::Node<'a>> = None;
4011
4012 for layer in self
4013 .syntax
4014 .layers_for_range(range.clone(), &self.text, true)
4015 {
4016 let mut cursor = layer.node().walk();
4017
4018 // Find the node that contains the range
4019 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4020 continue;
4021 }
4022
4023 // Look for the next sibling, moving up ancestor levels if needed
4024 loop {
4025 if cursor.goto_next_sibling() {
4026 let layer_result = cursor.node();
4027
4028 if let Some(previous_result) = &result {
4029 if previous_result.byte_range().start > layer_result.byte_range().start {
4030 continue;
4031 }
4032 }
4033 result = Some(layer_result);
4034 break;
4035 }
4036
4037 // No sibling found at this level, try moving up to parent
4038 if !cursor.goto_parent() {
4039 break;
4040 }
4041 }
4042 }
4043
4044 result
4045 }
4046
4047 /// Returns the root syntax node within the given row
4048 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4049 let start_offset = position.to_offset(self);
4050
4051 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4052
4053 let layer = self
4054 .syntax
4055 .layers_for_range(start_offset..start_offset, &self.text, true)
4056 .next()?;
4057
4058 let mut cursor = layer.node().walk();
4059
4060 // Descend to the first leaf that touches the start of the range.
4061 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4062 if cursor.node().end_byte() == start_offset {
4063 cursor.goto_next_sibling();
4064 }
4065 }
4066
4067 // Ascend to the root node within the same row.
4068 while cursor.goto_parent() {
4069 if cursor.node().start_position().row != row {
4070 break;
4071 }
4072 }
4073
4074 Some(cursor.node())
4075 }
4076
4077 /// Returns the outline for the buffer.
4078 ///
4079 /// This method allows passing an optional [`SyntaxTheme`] to
4080 /// syntax-highlight the returned symbols.
4081 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4082 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4083 }
4084
4085 /// Returns all the symbols that contain the given position.
4086 ///
4087 /// This method allows passing an optional [`SyntaxTheme`] to
4088 /// syntax-highlight the returned symbols.
4089 pub fn symbols_containing<T: ToOffset>(
4090 &self,
4091 position: T,
4092 theme: Option<&SyntaxTheme>,
4093 ) -> Vec<OutlineItem<Anchor>> {
4094 let position = position.to_offset(self);
4095 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4096 let end = self.clip_offset(position + 1, Bias::Right);
4097 let mut items = self.outline_items_containing(start..end, false, theme);
4098 let mut prev_depth = None;
4099 items.retain(|item| {
4100 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4101 prev_depth = Some(item.depth);
4102 result
4103 });
4104 items
4105 }
4106
4107 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4108 let range = range.to_offset(self);
4109 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4110 grammar.outline_config.as_ref().map(|c| &c.query)
4111 });
4112 let configs = matches
4113 .grammars()
4114 .iter()
4115 .map(|g| g.outline_config.as_ref().unwrap())
4116 .collect::<Vec<_>>();
4117
4118 while let Some(mat) = matches.peek() {
4119 let config = &configs[mat.grammar_index];
4120 let containing_item_node = maybe!({
4121 let item_node = mat.captures.iter().find_map(|cap| {
4122 if cap.index == config.item_capture_ix {
4123 Some(cap.node)
4124 } else {
4125 None
4126 }
4127 })?;
4128
4129 let item_byte_range = item_node.byte_range();
4130 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4131 None
4132 } else {
4133 Some(item_node)
4134 }
4135 });
4136
4137 if let Some(item_node) = containing_item_node {
4138 return Some(
4139 Point::from_ts_point(item_node.start_position())
4140 ..Point::from_ts_point(item_node.end_position()),
4141 );
4142 }
4143
4144 matches.advance();
4145 }
4146 None
4147 }
4148
4149 pub fn outline_items_containing<T: ToOffset>(
4150 &self,
4151 range: Range<T>,
4152 include_extra_context: bool,
4153 theme: Option<&SyntaxTheme>,
4154 ) -> Vec<OutlineItem<Anchor>> {
4155 self.outline_items_containing_internal(
4156 range,
4157 include_extra_context,
4158 theme,
4159 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4160 )
4161 }
4162
4163 pub fn outline_items_as_points_containing<T: ToOffset>(
4164 &self,
4165 range: Range<T>,
4166 include_extra_context: bool,
4167 theme: Option<&SyntaxTheme>,
4168 ) -> Vec<OutlineItem<Point>> {
4169 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4170 range
4171 })
4172 }
4173
4174 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4175 &self,
4176 range: Range<T>,
4177 include_extra_context: bool,
4178 theme: Option<&SyntaxTheme>,
4179 ) -> Vec<OutlineItem<usize>> {
4180 self.outline_items_containing_internal(
4181 range,
4182 include_extra_context,
4183 theme,
4184 |buffer, range| range.to_offset(buffer),
4185 )
4186 }
4187
4188 fn outline_items_containing_internal<T: ToOffset, U>(
4189 &self,
4190 range: Range<T>,
4191 include_extra_context: bool,
4192 theme: Option<&SyntaxTheme>,
4193 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4194 ) -> Vec<OutlineItem<U>> {
4195 let range = range.to_offset(self);
4196 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4197 grammar.outline_config.as_ref().map(|c| &c.query)
4198 });
4199
4200 let mut items = Vec::new();
4201 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4202 while let Some(mat) = matches.peek() {
4203 let config = matches.grammars()[mat.grammar_index]
4204 .outline_config
4205 .as_ref()
4206 .unwrap();
4207 if let Some(item) =
4208 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4209 {
4210 items.push(item);
4211 } else if let Some(capture) = mat
4212 .captures
4213 .iter()
4214 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4215 {
4216 let capture_range = capture.node.start_position()..capture.node.end_position();
4217 let mut capture_row_range =
4218 capture_range.start.row as u32..capture_range.end.row as u32;
4219 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4220 {
4221 capture_row_range.end -= 1;
4222 }
4223 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4224 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4225 last_row_range.end = capture_row_range.end;
4226 } else {
4227 annotation_row_ranges.push(capture_row_range);
4228 }
4229 } else {
4230 annotation_row_ranges.push(capture_row_range);
4231 }
4232 }
4233 matches.advance();
4234 }
4235
4236 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4237
4238 // Assign depths based on containment relationships and convert to anchors.
4239 let mut item_ends_stack = Vec::<Point>::new();
4240 let mut anchor_items = Vec::new();
4241 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4242 for item in items {
4243 while let Some(last_end) = item_ends_stack.last().copied() {
4244 if last_end < item.range.end {
4245 item_ends_stack.pop();
4246 } else {
4247 break;
4248 }
4249 }
4250
4251 let mut annotation_row_range = None;
4252 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4253 let row_preceding_item = item.range.start.row.saturating_sub(1);
4254 if next_annotation_row_range.end < row_preceding_item {
4255 annotation_row_ranges.next();
4256 } else {
4257 if next_annotation_row_range.end == row_preceding_item {
4258 annotation_row_range = Some(next_annotation_row_range.clone());
4259 annotation_row_ranges.next();
4260 }
4261 break;
4262 }
4263 }
4264
4265 anchor_items.push(OutlineItem {
4266 depth: item_ends_stack.len(),
4267 range: range_callback(self, item.range.clone()),
4268 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4269 text: item.text,
4270 highlight_ranges: item.highlight_ranges,
4271 name_ranges: item.name_ranges,
4272 body_range: item.body_range.map(|r| range_callback(self, r)),
4273 annotation_range: annotation_row_range.map(|annotation_range| {
4274 let point_range = Point::new(annotation_range.start, 0)
4275 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4276 range_callback(self, point_range)
4277 }),
4278 });
4279 item_ends_stack.push(item.range.end);
4280 }
4281
4282 anchor_items
4283 }
4284
4285 fn next_outline_item(
4286 &self,
4287 config: &OutlineConfig,
4288 mat: &SyntaxMapMatch,
4289 range: &Range<usize>,
4290 include_extra_context: bool,
4291 theme: Option<&SyntaxTheme>,
4292 ) -> Option<OutlineItem<Point>> {
4293 let item_node = mat.captures.iter().find_map(|cap| {
4294 if cap.index == config.item_capture_ix {
4295 Some(cap.node)
4296 } else {
4297 None
4298 }
4299 })?;
4300
4301 let item_byte_range = item_node.byte_range();
4302 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4303 return None;
4304 }
4305 let item_point_range = Point::from_ts_point(item_node.start_position())
4306 ..Point::from_ts_point(item_node.end_position());
4307
4308 let mut open_point = None;
4309 let mut close_point = None;
4310
4311 let mut buffer_ranges = Vec::new();
4312 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4313 let mut range = node.start_byte()..node.end_byte();
4314 let start = node.start_position();
4315 if node.end_position().row > start.row {
4316 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4317 }
4318
4319 if !range.is_empty() {
4320 buffer_ranges.push((range, node_is_name));
4321 }
4322 };
4323
4324 for capture in mat.captures {
4325 if capture.index == config.name_capture_ix {
4326 add_to_buffer_ranges(capture.node, true);
4327 } else if Some(capture.index) == config.context_capture_ix
4328 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4329 {
4330 add_to_buffer_ranges(capture.node, false);
4331 } else {
4332 if Some(capture.index) == config.open_capture_ix {
4333 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4334 } else if Some(capture.index) == config.close_capture_ix {
4335 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4336 }
4337 }
4338 }
4339
4340 if buffer_ranges.is_empty() {
4341 return None;
4342 }
4343 let source_range_for_text =
4344 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4345
4346 let mut text = String::new();
4347 let mut highlight_ranges = Vec::new();
4348 let mut name_ranges = Vec::new();
4349 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4350 let mut last_buffer_range_end = 0;
4351 for (buffer_range, is_name) in buffer_ranges {
4352 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4353 if space_added {
4354 text.push(' ');
4355 }
4356 let before_append_len = text.len();
4357 let mut offset = buffer_range.start;
4358 chunks.seek(buffer_range.clone());
4359 for mut chunk in chunks.by_ref() {
4360 if chunk.text.len() > buffer_range.end - offset {
4361 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4362 offset = buffer_range.end;
4363 } else {
4364 offset += chunk.text.len();
4365 }
4366 let style = chunk
4367 .syntax_highlight_id
4368 .zip(theme)
4369 .and_then(|(highlight, theme)| highlight.style(theme));
4370 if let Some(style) = style {
4371 let start = text.len();
4372 let end = start + chunk.text.len();
4373 highlight_ranges.push((start..end, style));
4374 }
4375 text.push_str(chunk.text);
4376 if offset >= buffer_range.end {
4377 break;
4378 }
4379 }
4380 if is_name {
4381 let after_append_len = text.len();
4382 let start = if space_added && !name_ranges.is_empty() {
4383 before_append_len - 1
4384 } else {
4385 before_append_len
4386 };
4387 name_ranges.push(start..after_append_len);
4388 }
4389 last_buffer_range_end = buffer_range.end;
4390 }
4391
4392 Some(OutlineItem {
4393 depth: 0, // We'll calculate the depth later
4394 range: item_point_range,
4395 source_range_for_text: source_range_for_text.to_point(self),
4396 text,
4397 highlight_ranges,
4398 name_ranges,
4399 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4400 annotation_range: None,
4401 })
4402 }
4403
4404 pub fn function_body_fold_ranges<T: ToOffset>(
4405 &self,
4406 within: Range<T>,
4407 ) -> impl Iterator<Item = Range<usize>> + '_ {
4408 self.text_object_ranges(within, TreeSitterOptions::default())
4409 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4410 }
4411
4412 /// For each grammar in the language, runs the provided
4413 /// [`tree_sitter::Query`] against the given range.
4414 pub fn matches(
4415 &self,
4416 range: Range<usize>,
4417 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4418 ) -> SyntaxMapMatches<'_> {
4419 self.syntax.matches(range, self, query)
4420 }
4421
4422 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4423 /// Hence, may return more bracket pairs than the range contains.
4424 ///
4425 /// Will omit known chunks.
4426 /// The resulting bracket match collections are not ordered.
4427 pub fn fetch_bracket_ranges(
4428 &self,
4429 range: Range<usize>,
4430 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4431 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4432 let mut all_bracket_matches = HashMap::default();
4433
4434 for chunk in self
4435 .tree_sitter_data
4436 .chunks
4437 .applicable_chunks(&[range.to_point(self)])
4438 {
4439 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4440 continue;
4441 }
4442 let chunk_range = chunk.anchor_range();
4443 let chunk_range = chunk_range.to_offset(&self);
4444
4445 if let Some(cached_brackets) =
4446 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4447 {
4448 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4449 continue;
4450 }
4451
4452 let mut all_brackets = Vec::new();
4453 let mut opens = Vec::new();
4454 let mut color_pairs = Vec::new();
4455
4456 let mut matches = self.syntax.matches_with_options(
4457 chunk_range.clone(),
4458 &self.text,
4459 TreeSitterOptions {
4460 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4461 max_start_depth: None,
4462 },
4463 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4464 );
4465 let configs = matches
4466 .grammars()
4467 .iter()
4468 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4469 .collect::<Vec<_>>();
4470
4471 while let Some(mat) = matches.peek() {
4472 let mut open = None;
4473 let mut close = None;
4474 let syntax_layer_depth = mat.depth;
4475 let config = configs[mat.grammar_index];
4476 let pattern = &config.patterns[mat.pattern_index];
4477 for capture in mat.captures {
4478 if capture.index == config.open_capture_ix {
4479 open = Some(capture.node.byte_range());
4480 } else if capture.index == config.close_capture_ix {
4481 close = Some(capture.node.byte_range());
4482 }
4483 }
4484
4485 matches.advance();
4486
4487 let Some((open_range, close_range)) = open.zip(close) else {
4488 continue;
4489 };
4490
4491 let bracket_range = open_range.start..=close_range.end;
4492 if !bracket_range.overlaps(&chunk_range) {
4493 continue;
4494 }
4495
4496 let index = all_brackets.len();
4497 all_brackets.push(BracketMatch {
4498 open_range: open_range.clone(),
4499 close_range: close_range.clone(),
4500 newline_only: pattern.newline_only,
4501 syntax_layer_depth,
4502 color_index: None,
4503 });
4504
4505 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4506 // bracket will match the entire tag with all text inside.
4507 // For now, avoid highlighting any pair that has more than single char in each bracket.
4508 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4509 let should_color =
4510 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4511 if should_color {
4512 opens.push(open_range.clone());
4513 color_pairs.push((open_range, close_range, index));
4514 }
4515 }
4516
4517 opens.sort_by_key(|r| (r.start, r.end));
4518 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4519 color_pairs.sort_by_key(|(_, close, _)| close.end);
4520
4521 let mut open_stack = Vec::new();
4522 let mut open_index = 0;
4523 for (open, close, index) in color_pairs {
4524 while open_index < opens.len() && opens[open_index].start < close.start {
4525 open_stack.push(opens[open_index].clone());
4526 open_index += 1;
4527 }
4528
4529 if open_stack.last() == Some(&open) {
4530 let depth_index = open_stack.len() - 1;
4531 all_brackets[index].color_index = Some(depth_index);
4532 open_stack.pop();
4533 }
4534 }
4535
4536 all_brackets.sort_by_key(|bracket_match| {
4537 (bracket_match.open_range.start, bracket_match.open_range.end)
4538 });
4539
4540 if let empty_slot @ None =
4541 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4542 {
4543 *empty_slot = Some(all_brackets.clone());
4544 }
4545 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4546 }
4547
4548 all_bracket_matches
4549 }
4550
4551 pub fn all_bracket_ranges(
4552 &self,
4553 range: Range<usize>,
4554 ) -> impl Iterator<Item = BracketMatch<usize>> {
4555 self.fetch_bracket_ranges(range.clone(), None)
4556 .into_values()
4557 .flatten()
4558 .filter(move |bracket_match| {
4559 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4560 bracket_range.overlaps(&range)
4561 })
4562 }
4563
4564 /// Returns bracket range pairs overlapping or adjacent to `range`
4565 pub fn bracket_ranges<T: ToOffset>(
4566 &self,
4567 range: Range<T>,
4568 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4569 // Find bracket pairs that *inclusively* contain the given range.
4570 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4571 self.all_bracket_ranges(range)
4572 .filter(|pair| !pair.newline_only)
4573 }
4574
4575 pub fn debug_variables_query<T: ToOffset>(
4576 &self,
4577 range: Range<T>,
4578 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4579 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4580
4581 let mut matches = self.syntax.matches_with_options(
4582 range.clone(),
4583 &self.text,
4584 TreeSitterOptions::default(),
4585 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4586 );
4587
4588 let configs = matches
4589 .grammars()
4590 .iter()
4591 .map(|grammar| grammar.debug_variables_config.as_ref())
4592 .collect::<Vec<_>>();
4593
4594 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4595
4596 iter::from_fn(move || {
4597 loop {
4598 while let Some(capture) = captures.pop() {
4599 if capture.0.overlaps(&range) {
4600 return Some(capture);
4601 }
4602 }
4603
4604 let mat = matches.peek()?;
4605
4606 let Some(config) = configs[mat.grammar_index].as_ref() else {
4607 matches.advance();
4608 continue;
4609 };
4610
4611 for capture in mat.captures {
4612 let Some(ix) = config
4613 .objects_by_capture_ix
4614 .binary_search_by_key(&capture.index, |e| e.0)
4615 .ok()
4616 else {
4617 continue;
4618 };
4619 let text_object = config.objects_by_capture_ix[ix].1;
4620 let byte_range = capture.node.byte_range();
4621
4622 let mut found = false;
4623 for (range, existing) in captures.iter_mut() {
4624 if existing == &text_object {
4625 range.start = range.start.min(byte_range.start);
4626 range.end = range.end.max(byte_range.end);
4627 found = true;
4628 break;
4629 }
4630 }
4631
4632 if !found {
4633 captures.push((byte_range, text_object));
4634 }
4635 }
4636
4637 matches.advance();
4638 }
4639 })
4640 }
4641
4642 pub fn text_object_ranges<T: ToOffset>(
4643 &self,
4644 range: Range<T>,
4645 options: TreeSitterOptions,
4646 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4647 let range =
4648 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4649
4650 let mut matches =
4651 self.syntax
4652 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4653 grammar.text_object_config.as_ref().map(|c| &c.query)
4654 });
4655
4656 let configs = matches
4657 .grammars()
4658 .iter()
4659 .map(|grammar| grammar.text_object_config.as_ref())
4660 .collect::<Vec<_>>();
4661
4662 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4663
4664 iter::from_fn(move || {
4665 loop {
4666 while let Some(capture) = captures.pop() {
4667 if capture.0.overlaps(&range) {
4668 return Some(capture);
4669 }
4670 }
4671
4672 let mat = matches.peek()?;
4673
4674 let Some(config) = configs[mat.grammar_index].as_ref() else {
4675 matches.advance();
4676 continue;
4677 };
4678
4679 for capture in mat.captures {
4680 let Some(ix) = config
4681 .text_objects_by_capture_ix
4682 .binary_search_by_key(&capture.index, |e| e.0)
4683 .ok()
4684 else {
4685 continue;
4686 };
4687 let text_object = config.text_objects_by_capture_ix[ix].1;
4688 let byte_range = capture.node.byte_range();
4689
4690 let mut found = false;
4691 for (range, existing) in captures.iter_mut() {
4692 if existing == &text_object {
4693 range.start = range.start.min(byte_range.start);
4694 range.end = range.end.max(byte_range.end);
4695 found = true;
4696 break;
4697 }
4698 }
4699
4700 if !found {
4701 captures.push((byte_range, text_object));
4702 }
4703 }
4704
4705 matches.advance();
4706 }
4707 })
4708 }
4709
4710 /// Returns enclosing bracket ranges containing the given range
4711 pub fn enclosing_bracket_ranges<T: ToOffset>(
4712 &self,
4713 range: Range<T>,
4714 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4715 let range = range.start.to_offset(self)..range.end.to_offset(self);
4716
4717 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4718 let max_depth = result
4719 .iter()
4720 .map(|mat| mat.syntax_layer_depth)
4721 .max()
4722 .unwrap_or(0);
4723 result.into_iter().filter(move |pair| {
4724 pair.open_range.start <= range.start
4725 && pair.close_range.end >= range.end
4726 && pair.syntax_layer_depth == max_depth
4727 })
4728 }
4729
4730 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4731 ///
4732 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4733 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4734 &self,
4735 range: Range<T>,
4736 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4737 ) -> Option<(Range<usize>, Range<usize>)> {
4738 let range = range.start.to_offset(self)..range.end.to_offset(self);
4739
4740 // Get the ranges of the innermost pair of brackets.
4741 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4742
4743 for pair in self.enclosing_bracket_ranges(range) {
4744 if let Some(range_filter) = range_filter
4745 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4746 {
4747 continue;
4748 }
4749
4750 let len = pair.close_range.end - pair.open_range.start;
4751
4752 if let Some((existing_open, existing_close)) = &result {
4753 let existing_len = existing_close.end - existing_open.start;
4754 if len > existing_len {
4755 continue;
4756 }
4757 }
4758
4759 result = Some((pair.open_range, pair.close_range));
4760 }
4761
4762 result
4763 }
4764
4765 /// Returns anchor ranges for any matches of the redaction query.
4766 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4767 /// will be run on the relevant section of the buffer.
4768 pub fn redacted_ranges<T: ToOffset>(
4769 &self,
4770 range: Range<T>,
4771 ) -> impl Iterator<Item = Range<usize>> + '_ {
4772 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4773 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4774 grammar
4775 .redactions_config
4776 .as_ref()
4777 .map(|config| &config.query)
4778 });
4779
4780 let configs = syntax_matches
4781 .grammars()
4782 .iter()
4783 .map(|grammar| grammar.redactions_config.as_ref())
4784 .collect::<Vec<_>>();
4785
4786 iter::from_fn(move || {
4787 let redacted_range = syntax_matches
4788 .peek()
4789 .and_then(|mat| {
4790 configs[mat.grammar_index].and_then(|config| {
4791 mat.captures
4792 .iter()
4793 .find(|capture| capture.index == config.redaction_capture_ix)
4794 })
4795 })
4796 .map(|mat| mat.node.byte_range());
4797 syntax_matches.advance();
4798 redacted_range
4799 })
4800 }
4801
4802 pub fn injections_intersecting_range<T: ToOffset>(
4803 &self,
4804 range: Range<T>,
4805 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4806 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4807
4808 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4809 grammar
4810 .injection_config
4811 .as_ref()
4812 .map(|config| &config.query)
4813 });
4814
4815 let configs = syntax_matches
4816 .grammars()
4817 .iter()
4818 .map(|grammar| grammar.injection_config.as_ref())
4819 .collect::<Vec<_>>();
4820
4821 iter::from_fn(move || {
4822 let ranges = syntax_matches.peek().and_then(|mat| {
4823 let config = &configs[mat.grammar_index]?;
4824 let content_capture_range = mat.captures.iter().find_map(|capture| {
4825 if capture.index == config.content_capture_ix {
4826 Some(capture.node.byte_range())
4827 } else {
4828 None
4829 }
4830 })?;
4831 let language = self.language_at(content_capture_range.start)?;
4832 Some((content_capture_range, language))
4833 });
4834 syntax_matches.advance();
4835 ranges
4836 })
4837 }
4838
4839 pub fn runnable_ranges(
4840 &self,
4841 offset_range: Range<usize>,
4842 ) -> impl Iterator<Item = RunnableRange> + '_ {
4843 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4844 grammar.runnable_config.as_ref().map(|config| &config.query)
4845 });
4846
4847 let test_configs = syntax_matches
4848 .grammars()
4849 .iter()
4850 .map(|grammar| grammar.runnable_config.as_ref())
4851 .collect::<Vec<_>>();
4852
4853 iter::from_fn(move || {
4854 loop {
4855 let mat = syntax_matches.peek()?;
4856
4857 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4858 let mut run_range = None;
4859 let full_range = mat.captures.iter().fold(
4860 Range {
4861 start: usize::MAX,
4862 end: 0,
4863 },
4864 |mut acc, next| {
4865 let byte_range = next.node.byte_range();
4866 if acc.start > byte_range.start {
4867 acc.start = byte_range.start;
4868 }
4869 if acc.end < byte_range.end {
4870 acc.end = byte_range.end;
4871 }
4872 acc
4873 },
4874 );
4875 if full_range.start > full_range.end {
4876 // We did not find a full spanning range of this match.
4877 return None;
4878 }
4879 let extra_captures: SmallVec<[_; 1]> =
4880 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4881 test_configs
4882 .extra_captures
4883 .get(capture.index as usize)
4884 .cloned()
4885 .and_then(|tag_name| match tag_name {
4886 RunnableCapture::Named(name) => {
4887 Some((capture.node.byte_range(), name))
4888 }
4889 RunnableCapture::Run => {
4890 let _ = run_range.insert(capture.node.byte_range());
4891 None
4892 }
4893 })
4894 }));
4895 let run_range = run_range?;
4896 let tags = test_configs
4897 .query
4898 .property_settings(mat.pattern_index)
4899 .iter()
4900 .filter_map(|property| {
4901 if *property.key == *"tag" {
4902 property
4903 .value
4904 .as_ref()
4905 .map(|value| RunnableTag(value.to_string().into()))
4906 } else {
4907 None
4908 }
4909 })
4910 .collect();
4911 let extra_captures = extra_captures
4912 .into_iter()
4913 .map(|(range, name)| {
4914 (
4915 name.to_string(),
4916 self.text_for_range(range).collect::<String>(),
4917 )
4918 })
4919 .collect();
4920 // All tags should have the same range.
4921 Some(RunnableRange {
4922 run_range,
4923 full_range,
4924 runnable: Runnable {
4925 tags,
4926 language: mat.language,
4927 buffer: self.remote_id(),
4928 },
4929 extra_captures,
4930 buffer_id: self.remote_id(),
4931 })
4932 });
4933
4934 syntax_matches.advance();
4935 if test_range.is_some() {
4936 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4937 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4938 return test_range;
4939 }
4940 }
4941 })
4942 }
4943
4944 /// Returns selections for remote peers intersecting the given range.
4945 #[allow(clippy::type_complexity)]
4946 pub fn selections_in_range(
4947 &self,
4948 range: Range<Anchor>,
4949 include_local: bool,
4950 ) -> impl Iterator<
4951 Item = (
4952 ReplicaId,
4953 bool,
4954 CursorShape,
4955 impl Iterator<Item = &Selection<Anchor>> + '_,
4956 ),
4957 > + '_ {
4958 self.remote_selections
4959 .iter()
4960 .filter(move |(replica_id, set)| {
4961 (include_local || **replica_id != self.text.replica_id())
4962 && !set.selections.is_empty()
4963 })
4964 .map(move |(replica_id, set)| {
4965 let start_ix = match set.selections.binary_search_by(|probe| {
4966 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4967 }) {
4968 Ok(ix) | Err(ix) => ix,
4969 };
4970 let end_ix = match set.selections.binary_search_by(|probe| {
4971 probe.start.cmp(&range.end, self).then(Ordering::Less)
4972 }) {
4973 Ok(ix) | Err(ix) => ix,
4974 };
4975
4976 (
4977 *replica_id,
4978 set.line_mode,
4979 set.cursor_shape,
4980 set.selections[start_ix..end_ix].iter(),
4981 )
4982 })
4983 }
4984
4985 /// Returns if the buffer contains any diagnostics.
4986 pub fn has_diagnostics(&self) -> bool {
4987 !self.diagnostics.is_empty()
4988 }
4989
4990 /// Returns all the diagnostics intersecting the given range.
4991 pub fn diagnostics_in_range<'a, T, O>(
4992 &'a self,
4993 search_range: Range<T>,
4994 reversed: bool,
4995 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4996 where
4997 T: 'a + Clone + ToOffset,
4998 O: 'a + FromAnchor,
4999 {
5000 let mut iterators: Vec<_> = self
5001 .diagnostics
5002 .iter()
5003 .map(|(_, collection)| {
5004 collection
5005 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5006 .peekable()
5007 })
5008 .collect();
5009
5010 std::iter::from_fn(move || {
5011 let (next_ix, _) = iterators
5012 .iter_mut()
5013 .enumerate()
5014 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5015 .min_by(|(_, a), (_, b)| {
5016 let cmp = a
5017 .range
5018 .start
5019 .cmp(&b.range.start, self)
5020 // when range is equal, sort by diagnostic severity
5021 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5022 // and stabilize order with group_id
5023 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5024 if reversed { cmp.reverse() } else { cmp }
5025 })?;
5026 iterators[next_ix]
5027 .next()
5028 .map(
5029 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5030 diagnostic,
5031 range: FromAnchor::from_anchor(&range.start, self)
5032 ..FromAnchor::from_anchor(&range.end, self),
5033 },
5034 )
5035 })
5036 }
5037
5038 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5039 /// should be used instead.
5040 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5041 &self.diagnostics
5042 }
5043
5044 /// Returns all the diagnostic groups associated with the given
5045 /// language server ID. If no language server ID is provided,
5046 /// all diagnostics groups are returned.
5047 pub fn diagnostic_groups(
5048 &self,
5049 language_server_id: Option<LanguageServerId>,
5050 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5051 let mut groups = Vec::new();
5052
5053 if let Some(language_server_id) = language_server_id {
5054 if let Ok(ix) = self
5055 .diagnostics
5056 .binary_search_by_key(&language_server_id, |e| e.0)
5057 {
5058 self.diagnostics[ix]
5059 .1
5060 .groups(language_server_id, &mut groups, self);
5061 }
5062 } else {
5063 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5064 diagnostics.groups(*language_server_id, &mut groups, self);
5065 }
5066 }
5067
5068 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5069 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5070 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5071 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5072 });
5073
5074 groups
5075 }
5076
5077 /// Returns an iterator over the diagnostics for the given group.
5078 pub fn diagnostic_group<O>(
5079 &self,
5080 group_id: usize,
5081 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5082 where
5083 O: FromAnchor + 'static,
5084 {
5085 self.diagnostics
5086 .iter()
5087 .flat_map(move |(_, set)| set.group(group_id, self))
5088 }
5089
5090 /// An integer version number that accounts for all updates besides
5091 /// the buffer's text itself (which is versioned via a version vector).
5092 pub fn non_text_state_update_count(&self) -> usize {
5093 self.non_text_state_update_count
5094 }
5095
5096 /// An integer version that changes when the buffer's syntax changes.
5097 pub fn syntax_update_count(&self) -> usize {
5098 self.syntax.update_count()
5099 }
5100
5101 /// Returns a snapshot of underlying file.
5102 pub fn file(&self) -> Option<&Arc<dyn File>> {
5103 self.file.as_ref()
5104 }
5105
5106 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5107 if let Some(file) = self.file() {
5108 if file.path().file_name().is_none() || include_root {
5109 Some(file.full_path(cx).to_string_lossy().into_owned())
5110 } else {
5111 Some(file.path().display(file.path_style(cx)).to_string())
5112 }
5113 } else {
5114 None
5115 }
5116 }
5117
5118 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5119 let query_str = query.fuzzy_contents;
5120 if query_str.is_some_and(|query| query.is_empty()) {
5121 return BTreeMap::default();
5122 }
5123
5124 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5125 language,
5126 override_id: None,
5127 }));
5128
5129 let mut query_ix = 0;
5130 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5131 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5132
5133 let mut words = BTreeMap::default();
5134 let mut current_word_start_ix = None;
5135 let mut chunk_ix = query.range.start;
5136 for chunk in self.chunks(query.range, false) {
5137 for (i, c) in chunk.text.char_indices() {
5138 let ix = chunk_ix + i;
5139 if classifier.is_word(c) {
5140 if current_word_start_ix.is_none() {
5141 current_word_start_ix = Some(ix);
5142 }
5143
5144 if let Some(query_chars) = &query_chars
5145 && query_ix < query_len
5146 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5147 {
5148 query_ix += 1;
5149 }
5150 continue;
5151 } else if let Some(word_start) = current_word_start_ix.take()
5152 && query_ix == query_len
5153 {
5154 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5155 let mut word_text = self.text_for_range(word_start..ix).peekable();
5156 let first_char = word_text
5157 .peek()
5158 .and_then(|first_chunk| first_chunk.chars().next());
5159 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5160 if !query.skip_digits
5161 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5162 {
5163 words.insert(word_text.collect(), word_range);
5164 }
5165 }
5166 query_ix = 0;
5167 }
5168 chunk_ix += chunk.text.len();
5169 }
5170
5171 words
5172 }
5173}
5174
5175pub struct WordsQuery<'a> {
5176 /// Only returns words with all chars from the fuzzy string in them.
5177 pub fuzzy_contents: Option<&'a str>,
5178 /// Skips words that start with a digit.
5179 pub skip_digits: bool,
5180 /// Buffer offset range, to look for words.
5181 pub range: Range<usize>,
5182}
5183
5184fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5185 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5186}
5187
5188fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5189 let mut result = IndentSize::spaces(0);
5190 for c in text {
5191 let kind = match c {
5192 ' ' => IndentKind::Space,
5193 '\t' => IndentKind::Tab,
5194 _ => break,
5195 };
5196 if result.len == 0 {
5197 result.kind = kind;
5198 }
5199 result.len += 1;
5200 }
5201 result
5202}
5203
5204impl Clone for BufferSnapshot {
5205 fn clone(&self) -> Self {
5206 Self {
5207 text: self.text.clone(),
5208 syntax: self.syntax.clone(),
5209 file: self.file.clone(),
5210 remote_selections: self.remote_selections.clone(),
5211 diagnostics: self.diagnostics.clone(),
5212 language: self.language.clone(),
5213 tree_sitter_data: self.tree_sitter_data.clone(),
5214 non_text_state_update_count: self.non_text_state_update_count,
5215 capability: self.capability,
5216 }
5217 }
5218}
5219
5220impl Deref for BufferSnapshot {
5221 type Target = text::BufferSnapshot;
5222
5223 fn deref(&self) -> &Self::Target {
5224 &self.text
5225 }
5226}
5227
5228unsafe impl Send for BufferChunks<'_> {}
5229
5230impl<'a> BufferChunks<'a> {
5231 pub(crate) fn new(
5232 text: &'a Rope,
5233 range: Range<usize>,
5234 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5235 diagnostics: bool,
5236 buffer_snapshot: Option<&'a BufferSnapshot>,
5237 ) -> Self {
5238 let mut highlights = None;
5239 if let Some((captures, highlight_maps)) = syntax {
5240 highlights = Some(BufferChunkHighlights {
5241 captures,
5242 next_capture: None,
5243 stack: Default::default(),
5244 highlight_maps,
5245 })
5246 }
5247
5248 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5249 let chunks = text.chunks_in_range(range.clone());
5250
5251 let mut this = BufferChunks {
5252 range,
5253 buffer_snapshot,
5254 chunks,
5255 diagnostic_endpoints,
5256 error_depth: 0,
5257 warning_depth: 0,
5258 information_depth: 0,
5259 hint_depth: 0,
5260 unnecessary_depth: 0,
5261 underline: true,
5262 highlights,
5263 };
5264 this.initialize_diagnostic_endpoints();
5265 this
5266 }
5267
5268 /// Seeks to the given byte offset in the buffer.
5269 pub fn seek(&mut self, range: Range<usize>) {
5270 let old_range = std::mem::replace(&mut self.range, range.clone());
5271 self.chunks.set_range(self.range.clone());
5272 if let Some(highlights) = self.highlights.as_mut() {
5273 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5274 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5275 highlights
5276 .stack
5277 .retain(|(end_offset, _)| *end_offset > range.start);
5278 if let Some(capture) = &highlights.next_capture
5279 && range.start >= capture.node.start_byte()
5280 {
5281 let next_capture_end = capture.node.end_byte();
5282 if range.start < next_capture_end {
5283 highlights.stack.push((
5284 next_capture_end,
5285 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5286 ));
5287 }
5288 highlights.next_capture.take();
5289 }
5290 } else if let Some(snapshot) = self.buffer_snapshot {
5291 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5292 *highlights = BufferChunkHighlights {
5293 captures,
5294 next_capture: None,
5295 stack: Default::default(),
5296 highlight_maps,
5297 };
5298 } else {
5299 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5300 // Seeking such BufferChunks is not supported.
5301 debug_assert!(
5302 false,
5303 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5304 );
5305 }
5306
5307 highlights.captures.set_byte_range(self.range.clone());
5308 self.initialize_diagnostic_endpoints();
5309 }
5310 }
5311
5312 fn initialize_diagnostic_endpoints(&mut self) {
5313 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5314 && let Some(buffer) = self.buffer_snapshot
5315 {
5316 let mut diagnostic_endpoints = Vec::new();
5317 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5318 diagnostic_endpoints.push(DiagnosticEndpoint {
5319 offset: entry.range.start,
5320 is_start: true,
5321 severity: entry.diagnostic.severity,
5322 is_unnecessary: entry.diagnostic.is_unnecessary,
5323 underline: entry.diagnostic.underline,
5324 });
5325 diagnostic_endpoints.push(DiagnosticEndpoint {
5326 offset: entry.range.end,
5327 is_start: false,
5328 severity: entry.diagnostic.severity,
5329 is_unnecessary: entry.diagnostic.is_unnecessary,
5330 underline: entry.diagnostic.underline,
5331 });
5332 }
5333 diagnostic_endpoints
5334 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5335 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5336 self.hint_depth = 0;
5337 self.error_depth = 0;
5338 self.warning_depth = 0;
5339 self.information_depth = 0;
5340 }
5341 }
5342
5343 /// The current byte offset in the buffer.
5344 pub fn offset(&self) -> usize {
5345 self.range.start
5346 }
5347
5348 pub fn range(&self) -> Range<usize> {
5349 self.range.clone()
5350 }
5351
5352 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5353 let depth = match endpoint.severity {
5354 DiagnosticSeverity::ERROR => &mut self.error_depth,
5355 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5356 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5357 DiagnosticSeverity::HINT => &mut self.hint_depth,
5358 _ => return,
5359 };
5360 if endpoint.is_start {
5361 *depth += 1;
5362 } else {
5363 *depth -= 1;
5364 }
5365
5366 if endpoint.is_unnecessary {
5367 if endpoint.is_start {
5368 self.unnecessary_depth += 1;
5369 } else {
5370 self.unnecessary_depth -= 1;
5371 }
5372 }
5373 }
5374
5375 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5376 if self.error_depth > 0 {
5377 Some(DiagnosticSeverity::ERROR)
5378 } else if self.warning_depth > 0 {
5379 Some(DiagnosticSeverity::WARNING)
5380 } else if self.information_depth > 0 {
5381 Some(DiagnosticSeverity::INFORMATION)
5382 } else if self.hint_depth > 0 {
5383 Some(DiagnosticSeverity::HINT)
5384 } else {
5385 None
5386 }
5387 }
5388
5389 fn current_code_is_unnecessary(&self) -> bool {
5390 self.unnecessary_depth > 0
5391 }
5392}
5393
5394impl<'a> Iterator for BufferChunks<'a> {
5395 type Item = Chunk<'a>;
5396
5397 fn next(&mut self) -> Option<Self::Item> {
5398 let mut next_capture_start = usize::MAX;
5399 let mut next_diagnostic_endpoint = usize::MAX;
5400
5401 if let Some(highlights) = self.highlights.as_mut() {
5402 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5403 if *parent_capture_end <= self.range.start {
5404 highlights.stack.pop();
5405 } else {
5406 break;
5407 }
5408 }
5409
5410 if highlights.next_capture.is_none() {
5411 highlights.next_capture = highlights.captures.next();
5412 }
5413
5414 while let Some(capture) = highlights.next_capture.as_ref() {
5415 if self.range.start < capture.node.start_byte() {
5416 next_capture_start = capture.node.start_byte();
5417 break;
5418 } else {
5419 let highlight_id =
5420 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5421 highlights
5422 .stack
5423 .push((capture.node.end_byte(), highlight_id));
5424 highlights.next_capture = highlights.captures.next();
5425 }
5426 }
5427 }
5428
5429 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5430 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5431 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5432 if endpoint.offset <= self.range.start {
5433 self.update_diagnostic_depths(endpoint);
5434 diagnostic_endpoints.next();
5435 self.underline = endpoint.underline;
5436 } else {
5437 next_diagnostic_endpoint = endpoint.offset;
5438 break;
5439 }
5440 }
5441 }
5442 self.diagnostic_endpoints = diagnostic_endpoints;
5443
5444 if let Some(ChunkBitmaps {
5445 text: chunk,
5446 chars: chars_map,
5447 tabs,
5448 }) = self.chunks.peek_with_bitmaps()
5449 {
5450 let chunk_start = self.range.start;
5451 let mut chunk_end = (self.chunks.offset() + chunk.len())
5452 .min(next_capture_start)
5453 .min(next_diagnostic_endpoint);
5454 let mut highlight_id = None;
5455 if let Some(highlights) = self.highlights.as_ref()
5456 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5457 {
5458 chunk_end = chunk_end.min(*parent_capture_end);
5459 highlight_id = Some(*parent_highlight_id);
5460 }
5461 let bit_start = chunk_start - self.chunks.offset();
5462 let bit_end = chunk_end - self.chunks.offset();
5463
5464 let slice = &chunk[bit_start..bit_end];
5465
5466 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5467 let tabs = (tabs >> bit_start) & mask;
5468 let chars = (chars_map >> bit_start) & mask;
5469
5470 self.range.start = chunk_end;
5471 if self.range.start == self.chunks.offset() + chunk.len() {
5472 self.chunks.next().unwrap();
5473 }
5474
5475 Some(Chunk {
5476 text: slice,
5477 syntax_highlight_id: highlight_id,
5478 underline: self.underline,
5479 diagnostic_severity: self.current_diagnostic_severity(),
5480 is_unnecessary: self.current_code_is_unnecessary(),
5481 tabs,
5482 chars,
5483 ..Chunk::default()
5484 })
5485 } else {
5486 None
5487 }
5488 }
5489}
5490
5491impl operation_queue::Operation for Operation {
5492 fn lamport_timestamp(&self) -> clock::Lamport {
5493 match self {
5494 Operation::Buffer(_) => {
5495 unreachable!("buffer operations should never be deferred at this layer")
5496 }
5497 Operation::UpdateDiagnostics {
5498 lamport_timestamp, ..
5499 }
5500 | Operation::UpdateSelections {
5501 lamport_timestamp, ..
5502 }
5503 | Operation::UpdateCompletionTriggers {
5504 lamport_timestamp, ..
5505 }
5506 | Operation::UpdateLineEnding {
5507 lamport_timestamp, ..
5508 } => *lamport_timestamp,
5509 }
5510 }
5511}
5512
5513impl Default for Diagnostic {
5514 fn default() -> Self {
5515 Self {
5516 source: Default::default(),
5517 source_kind: DiagnosticSourceKind::Other,
5518 code: None,
5519 code_description: None,
5520 severity: DiagnosticSeverity::ERROR,
5521 message: Default::default(),
5522 markdown: None,
5523 group_id: 0,
5524 is_primary: false,
5525 is_disk_based: false,
5526 is_unnecessary: false,
5527 underline: true,
5528 data: None,
5529 registration_id: None,
5530 }
5531 }
5532}
5533
5534impl IndentSize {
5535 /// Returns an [`IndentSize`] representing the given spaces.
5536 pub fn spaces(len: u32) -> Self {
5537 Self {
5538 len,
5539 kind: IndentKind::Space,
5540 }
5541 }
5542
5543 /// Returns an [`IndentSize`] representing a tab.
5544 pub fn tab() -> Self {
5545 Self {
5546 len: 1,
5547 kind: IndentKind::Tab,
5548 }
5549 }
5550
5551 /// An iterator over the characters represented by this [`IndentSize`].
5552 pub fn chars(&self) -> impl Iterator<Item = char> {
5553 iter::repeat(self.char()).take(self.len as usize)
5554 }
5555
5556 /// The character representation of this [`IndentSize`].
5557 pub fn char(&self) -> char {
5558 match self.kind {
5559 IndentKind::Space => ' ',
5560 IndentKind::Tab => '\t',
5561 }
5562 }
5563
5564 /// Consumes the current [`IndentSize`] and returns a new one that has
5565 /// been shrunk or enlarged by the given size along the given direction.
5566 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5567 match direction {
5568 Ordering::Less => {
5569 if self.kind == size.kind && self.len >= size.len {
5570 self.len -= size.len;
5571 }
5572 }
5573 Ordering::Equal => {}
5574 Ordering::Greater => {
5575 if self.len == 0 {
5576 self = size;
5577 } else if self.kind == size.kind {
5578 self.len += size.len;
5579 }
5580 }
5581 }
5582 self
5583 }
5584
5585 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5586 match self.kind {
5587 IndentKind::Space => self.len as usize,
5588 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5589 }
5590 }
5591}
5592
5593#[cfg(any(test, feature = "test-support"))]
5594pub struct TestFile {
5595 pub path: Arc<RelPath>,
5596 pub root_name: String,
5597 pub local_root: Option<PathBuf>,
5598}
5599
5600#[cfg(any(test, feature = "test-support"))]
5601impl File for TestFile {
5602 fn path(&self) -> &Arc<RelPath> {
5603 &self.path
5604 }
5605
5606 fn full_path(&self, _: &gpui::App) -> PathBuf {
5607 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5608 }
5609
5610 fn as_local(&self) -> Option<&dyn LocalFile> {
5611 if self.local_root.is_some() {
5612 Some(self)
5613 } else {
5614 None
5615 }
5616 }
5617
5618 fn disk_state(&self) -> DiskState {
5619 unimplemented!()
5620 }
5621
5622 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5623 self.path().file_name().unwrap_or(self.root_name.as_ref())
5624 }
5625
5626 fn worktree_id(&self, _: &App) -> WorktreeId {
5627 WorktreeId::from_usize(0)
5628 }
5629
5630 fn to_proto(&self, _: &App) -> rpc::proto::File {
5631 unimplemented!()
5632 }
5633
5634 fn is_private(&self) -> bool {
5635 false
5636 }
5637
5638 fn path_style(&self, _cx: &App) -> PathStyle {
5639 PathStyle::local()
5640 }
5641}
5642
5643#[cfg(any(test, feature = "test-support"))]
5644impl LocalFile for TestFile {
5645 fn abs_path(&self, _cx: &App) -> PathBuf {
5646 PathBuf::from(self.local_root.as_ref().unwrap())
5647 .join(&self.root_name)
5648 .join(self.path.as_std_path())
5649 }
5650
5651 fn load(&self, _cx: &App) -> Task<Result<String>> {
5652 unimplemented!()
5653 }
5654
5655 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5656 unimplemented!()
5657 }
5658}
5659
5660pub(crate) fn contiguous_ranges(
5661 values: impl Iterator<Item = u32>,
5662 max_len: usize,
5663) -> impl Iterator<Item = Range<u32>> {
5664 let mut values = values;
5665 let mut current_range: Option<Range<u32>> = None;
5666 std::iter::from_fn(move || {
5667 loop {
5668 if let Some(value) = values.next() {
5669 if let Some(range) = &mut current_range
5670 && value == range.end
5671 && range.len() < max_len
5672 {
5673 range.end += 1;
5674 continue;
5675 }
5676
5677 let prev_range = current_range.clone();
5678 current_range = Some(value..(value + 1));
5679 if prev_range.is_some() {
5680 return prev_range;
5681 }
5682 } else {
5683 return current_range.take();
5684 }
5685 }
5686 })
5687}
5688
5689#[derive(Default, Debug)]
5690pub struct CharClassifier {
5691 scope: Option<LanguageScope>,
5692 scope_context: Option<CharScopeContext>,
5693 ignore_punctuation: bool,
5694}
5695
5696impl CharClassifier {
5697 pub fn new(scope: Option<LanguageScope>) -> Self {
5698 Self {
5699 scope,
5700 scope_context: None,
5701 ignore_punctuation: false,
5702 }
5703 }
5704
5705 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5706 Self {
5707 scope_context,
5708 ..self
5709 }
5710 }
5711
5712 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5713 Self {
5714 ignore_punctuation,
5715 ..self
5716 }
5717 }
5718
5719 pub fn is_whitespace(&self, c: char) -> bool {
5720 self.kind(c) == CharKind::Whitespace
5721 }
5722
5723 pub fn is_word(&self, c: char) -> bool {
5724 self.kind(c) == CharKind::Word
5725 }
5726
5727 pub fn is_punctuation(&self, c: char) -> bool {
5728 self.kind(c) == CharKind::Punctuation
5729 }
5730
5731 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5732 if c.is_alphanumeric() || c == '_' {
5733 return CharKind::Word;
5734 }
5735
5736 if let Some(scope) = &self.scope {
5737 let characters = match self.scope_context {
5738 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5739 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5740 None => scope.word_characters(),
5741 };
5742 if let Some(characters) = characters
5743 && characters.contains(&c)
5744 {
5745 return CharKind::Word;
5746 }
5747 }
5748
5749 if c.is_whitespace() {
5750 return CharKind::Whitespace;
5751 }
5752
5753 if ignore_punctuation {
5754 CharKind::Word
5755 } else {
5756 CharKind::Punctuation
5757 }
5758 }
5759
5760 pub fn kind(&self, c: char) -> CharKind {
5761 self.kind_with(c, self.ignore_punctuation)
5762 }
5763}
5764
5765/// Find all of the ranges of whitespace that occur at the ends of lines
5766/// in the given rope.
5767///
5768/// This could also be done with a regex search, but this implementation
5769/// avoids copying text.
5770pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5771 let mut ranges = Vec::new();
5772
5773 let mut offset = 0;
5774 let mut prev_chunk_trailing_whitespace_range = 0..0;
5775 for chunk in rope.chunks() {
5776 let mut prev_line_trailing_whitespace_range = 0..0;
5777 for (i, line) in chunk.split('\n').enumerate() {
5778 let line_end_offset = offset + line.len();
5779 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5780 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5781
5782 if i == 0 && trimmed_line_len == 0 {
5783 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5784 }
5785 if !prev_line_trailing_whitespace_range.is_empty() {
5786 ranges.push(prev_line_trailing_whitespace_range);
5787 }
5788
5789 offset = line_end_offset + 1;
5790 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5791 }
5792
5793 offset -= 1;
5794 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5795 }
5796
5797 if !prev_chunk_trailing_whitespace_range.is_empty() {
5798 ranges.push(prev_chunk_trailing_whitespace_range);
5799 }
5800
5801 ranges
5802}