1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430}
431
432impl DiskState {
433 /// Returns the file's last known modification time on disk.
434 pub fn mtime(self) -> Option<MTime> {
435 match self {
436 DiskState::New => None,
437 DiskState::Present { mtime } => Some(mtime),
438 DiskState::Deleted => None,
439 }
440 }
441
442 pub fn exists(&self) -> bool {
443 match self {
444 DiskState::New => false,
445 DiskState::Present { .. } => true,
446 DiskState::Deleted => false,
447 }
448 }
449}
450
451/// The file associated with a buffer, in the case where the file is on the local disk.
452pub trait LocalFile: File {
453 /// Returns the absolute path of this file
454 fn abs_path(&self, cx: &App) -> PathBuf;
455
456 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
457 fn load(&self, cx: &App) -> Task<Result<String>>;
458
459 /// Loads the file's contents from disk.
460 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
461}
462
463/// The auto-indent behavior associated with an editing operation.
464/// For some editing operations, each affected line of text has its
465/// indentation recomputed. For other operations, the entire block
466/// of edited text is adjusted uniformly.
467#[derive(Clone, Debug)]
468pub enum AutoindentMode {
469 /// Indent each line of inserted text.
470 EachLine,
471 /// Apply the same indentation adjustment to all of the lines
472 /// in a given insertion.
473 Block {
474 /// The original indentation column of the first line of each
475 /// insertion, if it has been copied.
476 ///
477 /// Knowing this makes it possible to preserve the relative indentation
478 /// of every line in the insertion from when it was copied.
479 ///
480 /// If the original indent column is `a`, and the first line of insertion
481 /// is then auto-indented to column `b`, then every other line of
482 /// the insertion will be auto-indented to column `b - a`
483 original_indent_columns: Vec<Option<u32>>,
484 },
485}
486
487#[derive(Clone)]
488struct AutoindentRequest {
489 before_edit: BufferSnapshot,
490 entries: Vec<AutoindentRequestEntry>,
491 is_block_mode: bool,
492 ignore_empty_lines: bool,
493}
494
495#[derive(Debug, Clone)]
496struct AutoindentRequestEntry {
497 /// A range of the buffer whose indentation should be adjusted.
498 range: Range<Anchor>,
499 /// Whether or not these lines should be considered brand new, for the
500 /// purpose of auto-indent. When text is not new, its indentation will
501 /// only be adjusted if the suggested indentation level has *changed*
502 /// since the edit was made.
503 first_line_is_new: bool,
504 indent_size: IndentSize,
505 original_indent_column: Option<u32>,
506}
507
508#[derive(Debug)]
509struct IndentSuggestion {
510 basis_row: u32,
511 delta: Ordering,
512 within_error: bool,
513}
514
515struct BufferChunkHighlights<'a> {
516 captures: SyntaxMapCaptures<'a>,
517 next_capture: Option<SyntaxMapCapture<'a>>,
518 stack: Vec<(usize, HighlightId)>,
519 highlight_maps: Vec<HighlightMap>,
520}
521
522/// An iterator that yields chunks of a buffer's text, along with their
523/// syntax highlights and diagnostic status.
524pub struct BufferChunks<'a> {
525 buffer_snapshot: Option<&'a BufferSnapshot>,
526 range: Range<usize>,
527 chunks: text::Chunks<'a>,
528 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
529 error_depth: usize,
530 warning_depth: usize,
531 information_depth: usize,
532 hint_depth: usize,
533 unnecessary_depth: usize,
534 underline: bool,
535 highlights: Option<BufferChunkHighlights<'a>>,
536}
537
538/// A chunk of a buffer's text, along with its syntax highlight and
539/// diagnostic status.
540#[derive(Clone, Debug, Default)]
541pub struct Chunk<'a> {
542 /// The text of the chunk.
543 pub text: &'a str,
544 /// The syntax highlighting style of the chunk.
545 pub syntax_highlight_id: Option<HighlightId>,
546 /// The highlight style that has been applied to this chunk in
547 /// the editor.
548 pub highlight_style: Option<HighlightStyle>,
549 /// The severity of diagnostic associated with this chunk, if any.
550 pub diagnostic_severity: Option<DiagnosticSeverity>,
551 /// A bitset of which characters are tabs in this string.
552 pub tabs: u128,
553 /// Bitmap of character indices in this chunk
554 pub chars: u128,
555 /// Whether this chunk of text is marked as unnecessary.
556 pub is_unnecessary: bool,
557 /// Whether this chunk of text was originally a tab character.
558 pub is_tab: bool,
559 /// Whether this chunk of text was originally an inlay.
560 pub is_inlay: bool,
561 /// Whether to underline the corresponding text range in the editor.
562 pub underline: bool,
563}
564
565/// A set of edits to a given version of a buffer, computed asynchronously.
566#[derive(Debug)]
567pub struct Diff {
568 pub base_version: clock::Global,
569 pub line_ending: LineEnding,
570 pub edits: Vec<(Range<usize>, Arc<str>)>,
571}
572
573#[derive(Debug, Clone, Copy)]
574pub(crate) struct DiagnosticEndpoint {
575 offset: usize,
576 is_start: bool,
577 underline: bool,
578 severity: DiagnosticSeverity,
579 is_unnecessary: bool,
580}
581
582/// A class of characters, used for characterizing a run of text.
583#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
584pub enum CharKind {
585 /// Whitespace.
586 Whitespace,
587 /// Punctuation.
588 Punctuation,
589 /// Word.
590 Word,
591}
592
593/// Context for character classification within a specific scope.
594#[derive(Copy, Clone, Eq, PartialEq, Debug)]
595pub enum CharScopeContext {
596 /// Character classification for completion queries.
597 ///
598 /// This context treats certain characters as word constituents that would
599 /// normally be considered punctuation, such as '-' in Tailwind classes
600 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
601 Completion,
602 /// Character classification for linked edits.
603 ///
604 /// This context handles characters that should be treated as part of
605 /// identifiers during linked editing operations, such as '.' in JSX
606 /// component names like `<Animated.View>`.
607 LinkedEdit,
608}
609
610/// A runnable is a set of data about a region that could be resolved into a task
611pub struct Runnable {
612 pub tags: SmallVec<[RunnableTag; 1]>,
613 pub language: Arc<Language>,
614 pub buffer: BufferId,
615}
616
617#[derive(Default, Clone, Debug)]
618pub struct HighlightedText {
619 pub text: SharedString,
620 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
621}
622
623#[derive(Default, Debug)]
624struct HighlightedTextBuilder {
625 pub text: String,
626 highlights: Vec<(Range<usize>, HighlightStyle)>,
627}
628
629impl HighlightedText {
630 pub fn from_buffer_range<T: ToOffset>(
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) -> Self {
637 let mut highlighted_text = HighlightedTextBuilder::default();
638 highlighted_text.add_text_from_buffer_range(
639 range,
640 snapshot,
641 syntax_snapshot,
642 override_style,
643 syntax_theme,
644 );
645 highlighted_text.build()
646 }
647
648 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
649 gpui::StyledText::new(self.text.clone())
650 .with_default_highlights(default_style, self.highlights.iter().cloned())
651 }
652
653 /// Returns the first line without leading whitespace unless highlighted
654 /// and a boolean indicating if there are more lines after
655 pub fn first_line_preview(self) -> (Self, bool) {
656 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
657 let first_line = &self.text[..newline_ix];
658
659 // Trim leading whitespace, unless an edit starts prior to it.
660 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
661 if let Some((first_highlight_range, _)) = self.highlights.first() {
662 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
663 }
664
665 let preview_text = &first_line[preview_start_ix..];
666 let preview_highlights = self
667 .highlights
668 .into_iter()
669 .skip_while(|(range, _)| range.end <= preview_start_ix)
670 .take_while(|(range, _)| range.start < newline_ix)
671 .filter_map(|(mut range, highlight)| {
672 range.start = range.start.saturating_sub(preview_start_ix);
673 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
674 if range.is_empty() {
675 None
676 } else {
677 Some((range, highlight))
678 }
679 });
680
681 let preview = Self {
682 text: SharedString::new(preview_text),
683 highlights: preview_highlights.collect(),
684 };
685
686 (preview, self.text.len() > newline_ix)
687 }
688}
689
690impl HighlightedTextBuilder {
691 pub fn build(self) -> HighlightedText {
692 HighlightedText {
693 text: self.text.into(),
694 highlights: self.highlights,
695 }
696 }
697
698 pub fn add_text_from_buffer_range<T: ToOffset>(
699 &mut self,
700 range: Range<T>,
701 snapshot: &text::BufferSnapshot,
702 syntax_snapshot: &SyntaxSnapshot,
703 override_style: Option<HighlightStyle>,
704 syntax_theme: &SyntaxTheme,
705 ) {
706 let range = range.to_offset(snapshot);
707 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
708 let start = self.text.len();
709 self.text.push_str(chunk.text);
710 let end = self.text.len();
711
712 if let Some(highlight_style) = chunk
713 .syntax_highlight_id
714 .and_then(|id| id.style(syntax_theme))
715 {
716 let highlight_style = override_style.map_or(highlight_style, |override_style| {
717 highlight_style.highlight(override_style)
718 });
719 self.highlights.push((start..end, highlight_style));
720 } else if let Some(override_style) = override_style {
721 self.highlights.push((start..end, override_style));
722 }
723 }
724 }
725
726 fn highlighted_chunks<'a>(
727 range: Range<usize>,
728 snapshot: &'a text::BufferSnapshot,
729 syntax_snapshot: &'a SyntaxSnapshot,
730 ) -> BufferChunks<'a> {
731 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
732 grammar
733 .highlights_config
734 .as_ref()
735 .map(|config| &config.query)
736 });
737
738 let highlight_maps = captures
739 .grammars()
740 .iter()
741 .map(|grammar| grammar.highlight_map())
742 .collect();
743
744 BufferChunks::new(
745 snapshot.as_rope(),
746 range,
747 Some((captures, highlight_maps)),
748 false,
749 None,
750 )
751 }
752}
753
754#[derive(Clone)]
755pub struct EditPreview {
756 old_snapshot: text::BufferSnapshot,
757 applied_edits_snapshot: text::BufferSnapshot,
758 syntax_snapshot: SyntaxSnapshot,
759}
760
761impl EditPreview {
762 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
763 let (first, _) = edits.first()?;
764 let (last, _) = edits.last()?;
765
766 let start = first.start.to_point(&self.old_snapshot);
767 let old_end = last.end.to_point(&self.old_snapshot);
768 let new_end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 let start = Point::new(start.row.saturating_sub(3), 0);
774 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
775 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
776
777 Some(unified_diff(
778 &self
779 .old_snapshot
780 .text_for_range(start..old_end)
781 .collect::<String>(),
782 &self
783 .applied_edits_snapshot
784 .text_for_range(start..new_end)
785 .collect::<String>(),
786 ))
787 }
788
789 pub fn highlight_edits(
790 &self,
791 current_snapshot: &BufferSnapshot,
792 edits: &[(Range<Anchor>, impl AsRef<str>)],
793 include_deletions: bool,
794 cx: &App,
795 ) -> HighlightedText {
796 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
797 return HighlightedText::default();
798 };
799
800 let mut highlighted_text = HighlightedTextBuilder::default();
801
802 let visible_range_in_preview_snapshot =
803 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
804 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
805
806 let insertion_highlight_style = HighlightStyle {
807 background_color: Some(cx.theme().status().created_background),
808 ..Default::default()
809 };
810 let deletion_highlight_style = HighlightStyle {
811 background_color: Some(cx.theme().status().deleted_background),
812 ..Default::default()
813 };
814 let syntax_theme = cx.theme().syntax();
815
816 for (range, edit_text) in edits {
817 let edit_new_end_in_preview_snapshot = range
818 .end
819 .bias_right(&self.old_snapshot)
820 .to_offset(&self.applied_edits_snapshot);
821 let edit_start_in_preview_snapshot =
822 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
823
824 let unchanged_range_in_preview_snapshot =
825 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
826 if !unchanged_range_in_preview_snapshot.is_empty() {
827 highlighted_text.add_text_from_buffer_range(
828 unchanged_range_in_preview_snapshot,
829 &self.applied_edits_snapshot,
830 &self.syntax_snapshot,
831 None,
832 syntax_theme,
833 );
834 }
835
836 let range_in_current_snapshot = range.to_offset(current_snapshot);
837 if include_deletions && !range_in_current_snapshot.is_empty() {
838 highlighted_text.add_text_from_buffer_range(
839 range_in_current_snapshot,
840 ¤t_snapshot.text,
841 ¤t_snapshot.syntax,
842 Some(deletion_highlight_style),
843 syntax_theme,
844 );
845 }
846
847 if !edit_text.as_ref().is_empty() {
848 highlighted_text.add_text_from_buffer_range(
849 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
850 &self.applied_edits_snapshot,
851 &self.syntax_snapshot,
852 Some(insertion_highlight_style),
853 syntax_theme,
854 );
855 }
856
857 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
858 }
859
860 highlighted_text.add_text_from_buffer_range(
861 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
862 &self.applied_edits_snapshot,
863 &self.syntax_snapshot,
864 None,
865 syntax_theme,
866 );
867
868 highlighted_text.build()
869 }
870
871 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
872 cx.new(|cx| {
873 let mut buffer = Buffer::local_normalized(
874 self.applied_edits_snapshot.as_rope().clone(),
875 self.applied_edits_snapshot.line_ending(),
876 cx,
877 );
878 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
879 buffer
880 })
881 }
882
883 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
884 let (first, _) = edits.first()?;
885 let (last, _) = edits.last()?;
886
887 let start = first
888 .start
889 .bias_left(&self.old_snapshot)
890 .to_point(&self.applied_edits_snapshot);
891 let end = last
892 .end
893 .bias_right(&self.old_snapshot)
894 .to_point(&self.applied_edits_snapshot);
895
896 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
897 let range = Point::new(start.row, 0)
898 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
899
900 Some(range)
901 }
902}
903
904#[derive(Clone, Debug, PartialEq, Eq)]
905pub struct BracketMatch<T> {
906 pub open_range: Range<T>,
907 pub close_range: Range<T>,
908 pub newline_only: bool,
909 pub syntax_layer_depth: usize,
910 pub color_index: Option<usize>,
911}
912
913impl<T> BracketMatch<T> {
914 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
915 (self.open_range, self.close_range)
916 }
917}
918
919impl Buffer {
920 /// Create a new buffer with the given base text.
921 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
922 Self::build(
923 TextBuffer::new(
924 ReplicaId::LOCAL,
925 cx.entity_id().as_non_zero_u64().into(),
926 base_text.into(),
927 ),
928 None,
929 Capability::ReadWrite,
930 )
931 }
932
933 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
934 pub fn local_normalized(
935 base_text_normalized: Rope,
936 line_ending: LineEnding,
937 cx: &Context<Self>,
938 ) -> Self {
939 Self::build(
940 TextBuffer::new_normalized(
941 ReplicaId::LOCAL,
942 cx.entity_id().as_non_zero_u64().into(),
943 line_ending,
944 base_text_normalized,
945 ),
946 None,
947 Capability::ReadWrite,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer.
952 pub fn remote(
953 remote_id: BufferId,
954 replica_id: ReplicaId,
955 capability: Capability,
956 base_text: impl Into<String>,
957 ) -> Self {
958 Self::build(
959 TextBuffer::new(replica_id, remote_id, base_text.into()),
960 None,
961 capability,
962 )
963 }
964
965 /// Create a new buffer that is a replica of a remote buffer, populating its
966 /// state from the given protobuf message.
967 pub fn from_proto(
968 replica_id: ReplicaId,
969 capability: Capability,
970 message: proto::BufferState,
971 file: Option<Arc<dyn File>>,
972 ) -> Result<Self> {
973 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
974 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
975 let mut this = Self::build(buffer, file, capability);
976 this.text.set_line_ending(proto::deserialize_line_ending(
977 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
978 ));
979 this.saved_version = proto::deserialize_version(&message.saved_version);
980 this.saved_mtime = message.saved_mtime.map(|time| time.into());
981 Ok(this)
982 }
983
984 /// Serialize the buffer's state to a protobuf message.
985 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
986 proto::BufferState {
987 id: self.remote_id().into(),
988 file: self.file.as_ref().map(|f| f.to_proto(cx)),
989 base_text: self.base_text().to_string(),
990 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
991 saved_version: proto::serialize_version(&self.saved_version),
992 saved_mtime: self.saved_mtime.map(|time| time.into()),
993 }
994 }
995
996 /// Serialize as protobufs all of the changes to the buffer since the given version.
997 pub fn serialize_ops(
998 &self,
999 since: Option<clock::Global>,
1000 cx: &App,
1001 ) -> Task<Vec<proto::Operation>> {
1002 let mut operations = Vec::new();
1003 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1004
1005 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1006 proto::serialize_operation(&Operation::UpdateSelections {
1007 selections: set.selections.clone(),
1008 lamport_timestamp: set.lamport_timestamp,
1009 line_mode: set.line_mode,
1010 cursor_shape: set.cursor_shape,
1011 })
1012 }));
1013
1014 for (server_id, diagnostics) in &self.diagnostics {
1015 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1016 lamport_timestamp: self.diagnostics_timestamp,
1017 server_id: *server_id,
1018 diagnostics: diagnostics.iter().cloned().collect(),
1019 }));
1020 }
1021
1022 for (server_id, completions) in &self.completion_triggers_per_language_server {
1023 operations.push(proto::serialize_operation(
1024 &Operation::UpdateCompletionTriggers {
1025 triggers: completions.iter().cloned().collect(),
1026 lamport_timestamp: self.completion_triggers_timestamp,
1027 server_id: *server_id,
1028 },
1029 ));
1030 }
1031
1032 let text_operations = self.text.operations().clone();
1033 cx.background_spawn(async move {
1034 let since = since.unwrap_or_default();
1035 operations.extend(
1036 text_operations
1037 .iter()
1038 .filter(|(_, op)| !since.observed(op.timestamp()))
1039 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1040 );
1041 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1042 operations
1043 })
1044 }
1045
1046 /// Assign a language to the buffer, returning the buffer.
1047 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1048 self.set_language_async(Some(language), cx);
1049 self
1050 }
1051
1052 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1053 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1054 self.set_language(Some(language), cx);
1055 self
1056 }
1057
1058 /// Returns the [`Capability`] of this buffer.
1059 pub fn capability(&self) -> Capability {
1060 self.capability
1061 }
1062
1063 /// Whether this buffer can only be read.
1064 pub fn read_only(&self) -> bool {
1065 self.capability == Capability::ReadOnly
1066 }
1067
1068 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1069 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1070 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1071 let snapshot = buffer.snapshot();
1072 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1073 let tree_sitter_data = TreeSitterData::new(snapshot);
1074 Self {
1075 saved_mtime,
1076 tree_sitter_data: Arc::new(tree_sitter_data),
1077 saved_version: buffer.version(),
1078 preview_version: buffer.version(),
1079 reload_task: None,
1080 transaction_depth: 0,
1081 was_dirty_before_starting_transaction: None,
1082 has_unsaved_edits: Cell::new((buffer.version(), false)),
1083 text: buffer,
1084 branch_state: None,
1085 file,
1086 capability,
1087 syntax_map,
1088 reparse: None,
1089 non_text_state_update_count: 0,
1090 sync_parse_timeout: Duration::from_millis(1),
1091 parse_status: watch::channel(ParseStatus::Idle),
1092 autoindent_requests: Default::default(),
1093 wait_for_autoindent_txs: Default::default(),
1094 pending_autoindent: Default::default(),
1095 language: None,
1096 remote_selections: Default::default(),
1097 diagnostics: Default::default(),
1098 diagnostics_timestamp: Lamport::MIN,
1099 completion_triggers: Default::default(),
1100 completion_triggers_per_language_server: Default::default(),
1101 completion_triggers_timestamp: Lamport::MIN,
1102 deferred_ops: OperationQueue::new(),
1103 has_conflict: false,
1104 change_bits: Default::default(),
1105 _subscriptions: Vec::new(),
1106 encoding: encoding_rs::UTF_8,
1107 has_bom: false,
1108 }
1109 }
1110
1111 pub fn build_snapshot(
1112 text: Rope,
1113 language: Option<Arc<Language>>,
1114 language_registry: Option<Arc<LanguageRegistry>>,
1115 cx: &mut App,
1116 ) -> impl Future<Output = BufferSnapshot> + use<> {
1117 let entity_id = cx.reserve_entity::<Self>().entity_id();
1118 let buffer_id = entity_id.as_non_zero_u64().into();
1119 async move {
1120 let text =
1121 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1122 .snapshot();
1123 let mut syntax = SyntaxMap::new(&text).snapshot();
1124 if let Some(language) = language.clone() {
1125 let language_registry = language_registry.clone();
1126 syntax.reparse(&text, language_registry, language);
1127 }
1128 let tree_sitter_data = TreeSitterData::new(text.clone());
1129 BufferSnapshot {
1130 text,
1131 syntax,
1132 file: None,
1133 diagnostics: Default::default(),
1134 remote_selections: Default::default(),
1135 tree_sitter_data: Arc::new(tree_sitter_data),
1136 language,
1137 non_text_state_update_count: 0,
1138 }
1139 }
1140 }
1141
1142 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1143 let entity_id = cx.reserve_entity::<Self>().entity_id();
1144 let buffer_id = entity_id.as_non_zero_u64().into();
1145 let text = TextBuffer::new_normalized(
1146 ReplicaId::LOCAL,
1147 buffer_id,
1148 Default::default(),
1149 Rope::new(),
1150 )
1151 .snapshot();
1152 let syntax = SyntaxMap::new(&text).snapshot();
1153 let tree_sitter_data = TreeSitterData::new(text.clone());
1154 BufferSnapshot {
1155 text,
1156 syntax,
1157 tree_sitter_data: Arc::new(tree_sitter_data),
1158 file: None,
1159 diagnostics: Default::default(),
1160 remote_selections: Default::default(),
1161 language: None,
1162 non_text_state_update_count: 0,
1163 }
1164 }
1165
1166 #[cfg(any(test, feature = "test-support"))]
1167 pub fn build_snapshot_sync(
1168 text: Rope,
1169 language: Option<Arc<Language>>,
1170 language_registry: Option<Arc<LanguageRegistry>>,
1171 cx: &mut App,
1172 ) -> BufferSnapshot {
1173 let entity_id = cx.reserve_entity::<Self>().entity_id();
1174 let buffer_id = entity_id.as_non_zero_u64().into();
1175 let text =
1176 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1177 .snapshot();
1178 let mut syntax = SyntaxMap::new(&text).snapshot();
1179 if let Some(language) = language.clone() {
1180 syntax.reparse(&text, language_registry, language);
1181 }
1182 let tree_sitter_data = TreeSitterData::new(text.clone());
1183 BufferSnapshot {
1184 text,
1185 syntax,
1186 tree_sitter_data: Arc::new(tree_sitter_data),
1187 file: None,
1188 diagnostics: Default::default(),
1189 remote_selections: Default::default(),
1190 language,
1191 non_text_state_update_count: 0,
1192 }
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's current state. This is computationally
1196 /// cheap, and allows reading from the buffer on a background thread.
1197 pub fn snapshot(&self) -> BufferSnapshot {
1198 let text = self.text.snapshot();
1199 let mut syntax_map = self.syntax_map.lock();
1200 syntax_map.interpolate(&text);
1201 let syntax = syntax_map.snapshot();
1202
1203 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1204 Arc::new(TreeSitterData::new(text.clone()))
1205 } else {
1206 self.tree_sitter_data.clone()
1207 };
1208
1209 BufferSnapshot {
1210 text,
1211 syntax,
1212 tree_sitter_data,
1213 file: self.file.clone(),
1214 remote_selections: self.remote_selections.clone(),
1215 diagnostics: self.diagnostics.clone(),
1216 language: self.language.clone(),
1217 non_text_state_update_count: self.non_text_state_update_count,
1218 }
1219 }
1220
1221 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1222 let this = cx.entity();
1223 cx.new(|cx| {
1224 let mut branch = Self {
1225 branch_state: Some(BufferBranchState {
1226 base_buffer: this.clone(),
1227 merged_operations: Default::default(),
1228 }),
1229 language: self.language.clone(),
1230 has_conflict: self.has_conflict,
1231 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1232 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1233 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1234 };
1235 if let Some(language_registry) = self.language_registry() {
1236 branch.set_language_registry(language_registry);
1237 }
1238
1239 // Reparse the branch buffer so that we get syntax highlighting immediately.
1240 branch.reparse(cx, true);
1241
1242 branch
1243 })
1244 }
1245
1246 pub fn preview_edits(
1247 &self,
1248 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1249 cx: &App,
1250 ) -> Task<EditPreview> {
1251 let registry = self.language_registry();
1252 let language = self.language().cloned();
1253 let old_snapshot = self.text.snapshot();
1254 let mut branch_buffer = self.text.branch();
1255 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1256 cx.background_spawn(async move {
1257 if !edits.is_empty() {
1258 if let Some(language) = language.clone() {
1259 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1260 }
1261
1262 branch_buffer.edit(edits.iter().cloned());
1263 let snapshot = branch_buffer.snapshot();
1264 syntax_snapshot.interpolate(&snapshot);
1265
1266 if let Some(language) = language {
1267 syntax_snapshot.reparse(&snapshot, registry, language);
1268 }
1269 }
1270 EditPreview {
1271 old_snapshot,
1272 applied_edits_snapshot: branch_buffer.snapshot(),
1273 syntax_snapshot,
1274 }
1275 })
1276 }
1277
1278 /// Applies all of the changes in this buffer that intersect any of the
1279 /// given `ranges` to its base buffer.
1280 ///
1281 /// If `ranges` is empty, then all changes will be applied. This buffer must
1282 /// be a branch buffer to call this method.
1283 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1284 let Some(base_buffer) = self.base_buffer() else {
1285 debug_panic!("not a branch buffer");
1286 return;
1287 };
1288
1289 let mut ranges = if ranges.is_empty() {
1290 &[0..usize::MAX]
1291 } else {
1292 ranges.as_slice()
1293 }
1294 .iter()
1295 .peekable();
1296
1297 let mut edits = Vec::new();
1298 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1299 let mut is_included = false;
1300 while let Some(range) = ranges.peek() {
1301 if range.end < edit.new.start {
1302 ranges.next().unwrap();
1303 } else {
1304 if range.start <= edit.new.end {
1305 is_included = true;
1306 }
1307 break;
1308 }
1309 }
1310
1311 if is_included {
1312 edits.push((
1313 edit.old.clone(),
1314 self.text_for_range(edit.new.clone()).collect::<String>(),
1315 ));
1316 }
1317 }
1318
1319 let operation = base_buffer.update(cx, |base_buffer, cx| {
1320 // cx.emit(BufferEvent::DiffBaseChanged);
1321 base_buffer.edit(edits, None, cx)
1322 });
1323
1324 if let Some(operation) = operation
1325 && let Some(BufferBranchState {
1326 merged_operations, ..
1327 }) = &mut self.branch_state
1328 {
1329 merged_operations.push(operation);
1330 }
1331 }
1332
1333 fn on_base_buffer_event(
1334 &mut self,
1335 _: Entity<Buffer>,
1336 event: &BufferEvent,
1337 cx: &mut Context<Self>,
1338 ) {
1339 let BufferEvent::Operation { operation, .. } = event else {
1340 return;
1341 };
1342 let Some(BufferBranchState {
1343 merged_operations, ..
1344 }) = &mut self.branch_state
1345 else {
1346 return;
1347 };
1348
1349 let mut operation_to_undo = None;
1350 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1351 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1352 {
1353 merged_operations.remove(ix);
1354 operation_to_undo = Some(operation.timestamp);
1355 }
1356
1357 self.apply_ops([operation.clone()], cx);
1358
1359 if let Some(timestamp) = operation_to_undo {
1360 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1361 self.undo_operations(counts, cx);
1362 }
1363 }
1364
1365 #[cfg(test)]
1366 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1367 &self.text
1368 }
1369
1370 /// Retrieve a snapshot of the buffer's raw text, without any
1371 /// language-related state like the syntax tree or diagnostics.
1372 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1373 self.text.snapshot()
1374 }
1375
1376 /// The file associated with the buffer, if any.
1377 pub fn file(&self) -> Option<&Arc<dyn File>> {
1378 self.file.as_ref()
1379 }
1380
1381 /// The version of the buffer that was last saved or reloaded from disk.
1382 pub fn saved_version(&self) -> &clock::Global {
1383 &self.saved_version
1384 }
1385
1386 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1387 pub fn saved_mtime(&self) -> Option<MTime> {
1388 self.saved_mtime
1389 }
1390
1391 /// Returns the character encoding of the buffer's file.
1392 pub fn encoding(&self) -> &'static Encoding {
1393 self.encoding
1394 }
1395
1396 /// Sets the character encoding of the buffer.
1397 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1398 self.encoding = encoding;
1399 }
1400
1401 /// Returns whether the buffer has a Byte Order Mark.
1402 pub fn has_bom(&self) -> bool {
1403 self.has_bom
1404 }
1405
1406 /// Sets whether the buffer has a Byte Order Mark.
1407 pub fn set_has_bom(&mut self, has_bom: bool) {
1408 self.has_bom = has_bom;
1409 }
1410
1411 /// Assign a language to the buffer.
1412 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1413 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1414 }
1415
1416 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1417 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1418 self.set_language_(language, true, cx);
1419 }
1420
1421 fn set_language_(
1422 &mut self,
1423 language: Option<Arc<Language>>,
1424 may_block: bool,
1425 cx: &mut Context<Self>,
1426 ) {
1427 self.non_text_state_update_count += 1;
1428 self.syntax_map.lock().clear(&self.text);
1429 let old_language = std::mem::replace(&mut self.language, language);
1430 self.was_changed();
1431 self.reparse(cx, may_block);
1432 let has_fresh_language =
1433 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1434 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1435 }
1436
1437 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1438 /// other languages if parts of the buffer are written in different languages.
1439 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1440 self.syntax_map
1441 .lock()
1442 .set_language_registry(language_registry);
1443 }
1444
1445 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1446 self.syntax_map.lock().language_registry()
1447 }
1448
1449 /// Assign the line ending type to the buffer.
1450 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1451 self.text.set_line_ending(line_ending);
1452
1453 let lamport_timestamp = self.text.lamport_clock.tick();
1454 self.send_operation(
1455 Operation::UpdateLineEnding {
1456 line_ending,
1457 lamport_timestamp,
1458 },
1459 true,
1460 cx,
1461 );
1462 }
1463
1464 /// Assign the buffer a new [`Capability`].
1465 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1466 if self.capability != capability {
1467 self.capability = capability;
1468 cx.emit(BufferEvent::CapabilityChanged)
1469 }
1470 }
1471
1472 /// This method is called to signal that the buffer has been saved.
1473 pub fn did_save(
1474 &mut self,
1475 version: clock::Global,
1476 mtime: Option<MTime>,
1477 cx: &mut Context<Self>,
1478 ) {
1479 self.saved_version = version.clone();
1480 self.has_unsaved_edits.set((version, false));
1481 self.has_conflict = false;
1482 self.saved_mtime = mtime;
1483 self.was_changed();
1484 cx.emit(BufferEvent::Saved);
1485 cx.notify();
1486 }
1487
1488 /// Reloads the contents of the buffer from disk.
1489 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1490 let (tx, rx) = futures::channel::oneshot::channel();
1491 let prev_version = self.text.version();
1492 self.reload_task = Some(cx.spawn(async move |this, cx| {
1493 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1494 let file = this.file.as_ref()?.as_local()?;
1495 Some((
1496 file.disk_state().mtime(),
1497 file.load_bytes(cx),
1498 this.encoding,
1499 ))
1500 })?
1501 else {
1502 return Ok(());
1503 };
1504
1505 let bytes = load_bytes_task.await?;
1506 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1507 let new_text = cow.into_owned();
1508
1509 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1510 this.update(cx, |this, cx| {
1511 if this.version() == diff.base_version {
1512 this.finalize_last_transaction();
1513 this.apply_diff(diff, cx);
1514 tx.send(this.finalize_last_transaction().cloned()).ok();
1515 this.has_conflict = false;
1516 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1517 } else {
1518 if !diff.edits.is_empty()
1519 || this
1520 .edits_since::<usize>(&diff.base_version)
1521 .next()
1522 .is_some()
1523 {
1524 this.has_conflict = true;
1525 }
1526
1527 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1528 }
1529
1530 this.reload_task.take();
1531 })
1532 }));
1533 rx
1534 }
1535
1536 /// This method is called to signal that the buffer has been reloaded.
1537 pub fn did_reload(
1538 &mut self,
1539 version: clock::Global,
1540 line_ending: LineEnding,
1541 mtime: Option<MTime>,
1542 cx: &mut Context<Self>,
1543 ) {
1544 self.saved_version = version;
1545 self.has_unsaved_edits
1546 .set((self.saved_version.clone(), false));
1547 self.text.set_line_ending(line_ending);
1548 self.saved_mtime = mtime;
1549 cx.emit(BufferEvent::Reloaded);
1550 cx.notify();
1551 }
1552
1553 /// Updates the [`File`] backing this buffer. This should be called when
1554 /// the file has changed or has been deleted.
1555 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1556 let was_dirty = self.is_dirty();
1557 let mut file_changed = false;
1558
1559 if let Some(old_file) = self.file.as_ref() {
1560 if new_file.path() != old_file.path() {
1561 file_changed = true;
1562 }
1563
1564 let old_state = old_file.disk_state();
1565 let new_state = new_file.disk_state();
1566 if old_state != new_state {
1567 file_changed = true;
1568 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1569 cx.emit(BufferEvent::ReloadNeeded)
1570 }
1571 }
1572 } else {
1573 file_changed = true;
1574 };
1575
1576 self.file = Some(new_file);
1577 if file_changed {
1578 self.was_changed();
1579 self.non_text_state_update_count += 1;
1580 if was_dirty != self.is_dirty() {
1581 cx.emit(BufferEvent::DirtyChanged);
1582 }
1583 cx.emit(BufferEvent::FileHandleChanged);
1584 cx.notify();
1585 }
1586 }
1587
1588 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1589 Some(self.branch_state.as_ref()?.base_buffer.clone())
1590 }
1591
1592 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1593 pub fn language(&self) -> Option<&Arc<Language>> {
1594 self.language.as_ref()
1595 }
1596
1597 /// Returns the [`Language`] at the given location.
1598 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1599 let offset = position.to_offset(self);
1600 let mut is_first = true;
1601 let start_anchor = self.anchor_before(offset);
1602 let end_anchor = self.anchor_after(offset);
1603 self.syntax_map
1604 .lock()
1605 .layers_for_range(offset..offset, &self.text, false)
1606 .filter(|layer| {
1607 if is_first {
1608 is_first = false;
1609 return true;
1610 }
1611
1612 layer
1613 .included_sub_ranges
1614 .map(|sub_ranges| {
1615 sub_ranges.iter().any(|sub_range| {
1616 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1617 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1618 !is_before_start && !is_after_end
1619 })
1620 })
1621 .unwrap_or(true)
1622 })
1623 .last()
1624 .map(|info| info.language.clone())
1625 .or_else(|| self.language.clone())
1626 }
1627
1628 /// Returns each [`Language`] for the active syntax layers at the given location.
1629 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1630 let offset = position.to_offset(self);
1631 let mut languages: Vec<Arc<Language>> = self
1632 .syntax_map
1633 .lock()
1634 .layers_for_range(offset..offset, &self.text, false)
1635 .map(|info| info.language.clone())
1636 .collect();
1637
1638 if languages.is_empty()
1639 && let Some(buffer_language) = self.language()
1640 {
1641 languages.push(buffer_language.clone());
1642 }
1643
1644 languages
1645 }
1646
1647 /// An integer version number that accounts for all updates besides
1648 /// the buffer's text itself (which is versioned via a version vector).
1649 pub fn non_text_state_update_count(&self) -> usize {
1650 self.non_text_state_update_count
1651 }
1652
1653 /// Whether the buffer is being parsed in the background.
1654 #[cfg(any(test, feature = "test-support"))]
1655 pub fn is_parsing(&self) -> bool {
1656 self.reparse.is_some()
1657 }
1658
1659 /// Indicates whether the buffer contains any regions that may be
1660 /// written in a language that hasn't been loaded yet.
1661 pub fn contains_unknown_injections(&self) -> bool {
1662 self.syntax_map.lock().contains_unknown_injections()
1663 }
1664
1665 #[cfg(any(test, feature = "test-support"))]
1666 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1667 self.sync_parse_timeout = timeout;
1668 }
1669
1670 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1671 match Arc::get_mut(&mut self.tree_sitter_data) {
1672 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1673 None => {
1674 let tree_sitter_data = TreeSitterData::new(snapshot);
1675 self.tree_sitter_data = Arc::new(tree_sitter_data)
1676 }
1677 }
1678 }
1679
1680 /// Called after an edit to synchronize the buffer's main parse tree with
1681 /// the buffer's new underlying state.
1682 ///
1683 /// Locks the syntax map and interpolates the edits since the last reparse
1684 /// into the foreground syntax tree.
1685 ///
1686 /// Then takes a stable snapshot of the syntax map before unlocking it.
1687 /// The snapshot with the interpolated edits is sent to a background thread,
1688 /// where we ask Tree-sitter to perform an incremental parse.
1689 ///
1690 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1691 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1692 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1693 ///
1694 /// If we time out waiting on the parse, we spawn a second task waiting
1695 /// until the parse does complete and return with the interpolated tree still
1696 /// in the foreground. When the background parse completes, call back into
1697 /// the main thread and assign the foreground parse state.
1698 ///
1699 /// If the buffer or grammar changed since the start of the background parse,
1700 /// initiate an additional reparse recursively. To avoid concurrent parses
1701 /// for the same buffer, we only initiate a new parse if we are not already
1702 /// parsing in the background.
1703 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1704 if self.text.version() != *self.tree_sitter_data.version() {
1705 self.invalidate_tree_sitter_data(self.text.snapshot());
1706 }
1707 if self.reparse.is_some() {
1708 return;
1709 }
1710 let language = if let Some(language) = self.language.clone() {
1711 language
1712 } else {
1713 return;
1714 };
1715
1716 let text = self.text_snapshot();
1717 let parsed_version = self.version();
1718
1719 let mut syntax_map = self.syntax_map.lock();
1720 syntax_map.interpolate(&text);
1721 let language_registry = syntax_map.language_registry();
1722 let mut syntax_snapshot = syntax_map.snapshot();
1723 drop(syntax_map);
1724
1725 let parse_task = cx.background_spawn({
1726 let language = language.clone();
1727 let language_registry = language_registry.clone();
1728 async move {
1729 syntax_snapshot.reparse(&text, language_registry, language);
1730 syntax_snapshot
1731 }
1732 });
1733
1734 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1735 if may_block {
1736 match cx
1737 .background_executor()
1738 .block_with_timeout(self.sync_parse_timeout, parse_task)
1739 {
1740 Ok(new_syntax_snapshot) => {
1741 self.did_finish_parsing(new_syntax_snapshot, cx);
1742 self.reparse = None;
1743 }
1744 Err(parse_task) => {
1745 self.reparse = Some(cx.spawn(async move |this, cx| {
1746 let new_syntax_map = cx.background_spawn(parse_task).await;
1747 this.update(cx, move |this, cx| {
1748 let grammar_changed = || {
1749 this.language.as_ref().is_none_or(|current_language| {
1750 !Arc::ptr_eq(&language, current_language)
1751 })
1752 };
1753 let language_registry_changed = || {
1754 new_syntax_map.contains_unknown_injections()
1755 && language_registry.is_some_and(|registry| {
1756 registry.version()
1757 != new_syntax_map.language_registry_version()
1758 })
1759 };
1760 let parse_again = this.version.changed_since(&parsed_version)
1761 || language_registry_changed()
1762 || grammar_changed();
1763 this.did_finish_parsing(new_syntax_map, cx);
1764 this.reparse = None;
1765 if parse_again {
1766 this.reparse(cx, false);
1767 }
1768 })
1769 .ok();
1770 }));
1771 }
1772 }
1773 } else {
1774 self.reparse = Some(cx.spawn(async move |this, cx| {
1775 let new_syntax_map = cx.background_spawn(parse_task).await;
1776 this.update(cx, move |this, cx| {
1777 let grammar_changed = || {
1778 this.language.as_ref().is_none_or(|current_language| {
1779 !Arc::ptr_eq(&language, current_language)
1780 })
1781 };
1782 let language_registry_changed = || {
1783 new_syntax_map.contains_unknown_injections()
1784 && language_registry.is_some_and(|registry| {
1785 registry.version() != new_syntax_map.language_registry_version()
1786 })
1787 };
1788 let parse_again = this.version.changed_since(&parsed_version)
1789 || language_registry_changed()
1790 || grammar_changed();
1791 this.did_finish_parsing(new_syntax_map, cx);
1792 this.reparse = None;
1793 if parse_again {
1794 this.reparse(cx, false);
1795 }
1796 })
1797 .ok();
1798 }));
1799 }
1800 }
1801
1802 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1803 self.was_changed();
1804 self.non_text_state_update_count += 1;
1805 self.syntax_map.lock().did_parse(syntax_snapshot);
1806 self.request_autoindent(cx);
1807 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1808 self.invalidate_tree_sitter_data(self.text.snapshot());
1809 cx.emit(BufferEvent::Reparsed);
1810 cx.notify();
1811 }
1812
1813 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1814 self.parse_status.1.clone()
1815 }
1816
1817 /// Wait until the buffer is no longer parsing
1818 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1819 let mut parse_status = self.parse_status();
1820 async move {
1821 while *parse_status.borrow() != ParseStatus::Idle {
1822 if parse_status.changed().await.is_err() {
1823 break;
1824 }
1825 }
1826 }
1827 }
1828
1829 /// Assign to the buffer a set of diagnostics created by a given language server.
1830 pub fn update_diagnostics(
1831 &mut self,
1832 server_id: LanguageServerId,
1833 diagnostics: DiagnosticSet,
1834 cx: &mut Context<Self>,
1835 ) {
1836 let lamport_timestamp = self.text.lamport_clock.tick();
1837 let op = Operation::UpdateDiagnostics {
1838 server_id,
1839 diagnostics: diagnostics.iter().cloned().collect(),
1840 lamport_timestamp,
1841 };
1842
1843 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1844 self.send_operation(op, true, cx);
1845 }
1846
1847 pub fn buffer_diagnostics(
1848 &self,
1849 for_server: Option<LanguageServerId>,
1850 ) -> Vec<&DiagnosticEntry<Anchor>> {
1851 match for_server {
1852 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1853 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1854 Err(_) => Vec::new(),
1855 },
1856 None => self
1857 .diagnostics
1858 .iter()
1859 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1860 .collect(),
1861 }
1862 }
1863
1864 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1865 if let Some(indent_sizes) = self.compute_autoindents() {
1866 let indent_sizes = cx.background_spawn(indent_sizes);
1867 match cx
1868 .background_executor()
1869 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1870 {
1871 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1872 Err(indent_sizes) => {
1873 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1874 let indent_sizes = indent_sizes.await;
1875 this.update(cx, |this, cx| {
1876 this.apply_autoindents(indent_sizes, cx);
1877 })
1878 .ok();
1879 }));
1880 }
1881 }
1882 } else {
1883 self.autoindent_requests.clear();
1884 for tx in self.wait_for_autoindent_txs.drain(..) {
1885 tx.send(()).ok();
1886 }
1887 }
1888 }
1889
1890 fn compute_autoindents(
1891 &self,
1892 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1893 let max_rows_between_yields = 100;
1894 let snapshot = self.snapshot();
1895 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1896 return None;
1897 }
1898
1899 let autoindent_requests = self.autoindent_requests.clone();
1900 Some(async move {
1901 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1902 for request in autoindent_requests {
1903 // Resolve each edited range to its row in the current buffer and in the
1904 // buffer before this batch of edits.
1905 let mut row_ranges = Vec::new();
1906 let mut old_to_new_rows = BTreeMap::new();
1907 let mut language_indent_sizes_by_new_row = Vec::new();
1908 for entry in &request.entries {
1909 let position = entry.range.start;
1910 let new_row = position.to_point(&snapshot).row;
1911 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1912 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1913
1914 if !entry.first_line_is_new {
1915 let old_row = position.to_point(&request.before_edit).row;
1916 old_to_new_rows.insert(old_row, new_row);
1917 }
1918 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1919 }
1920
1921 // Build a map containing the suggested indentation for each of the edited lines
1922 // with respect to the state of the buffer before these edits. This map is keyed
1923 // by the rows for these lines in the current state of the buffer.
1924 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1925 let old_edited_ranges =
1926 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1927 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1928 let mut language_indent_size = IndentSize::default();
1929 for old_edited_range in old_edited_ranges {
1930 let suggestions = request
1931 .before_edit
1932 .suggest_autoindents(old_edited_range.clone())
1933 .into_iter()
1934 .flatten();
1935 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1936 if let Some(suggestion) = suggestion {
1937 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1938
1939 // Find the indent size based on the language for this row.
1940 while let Some((row, size)) = language_indent_sizes.peek() {
1941 if *row > new_row {
1942 break;
1943 }
1944 language_indent_size = *size;
1945 language_indent_sizes.next();
1946 }
1947
1948 let suggested_indent = old_to_new_rows
1949 .get(&suggestion.basis_row)
1950 .and_then(|from_row| {
1951 Some(old_suggestions.get(from_row).copied()?.0)
1952 })
1953 .unwrap_or_else(|| {
1954 request
1955 .before_edit
1956 .indent_size_for_line(suggestion.basis_row)
1957 })
1958 .with_delta(suggestion.delta, language_indent_size);
1959 old_suggestions
1960 .insert(new_row, (suggested_indent, suggestion.within_error));
1961 }
1962 }
1963 yield_now().await;
1964 }
1965
1966 // Compute new suggestions for each line, but only include them in the result
1967 // if they differ from the old suggestion for that line.
1968 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1969 let mut language_indent_size = IndentSize::default();
1970 for (row_range, original_indent_column) in row_ranges {
1971 let new_edited_row_range = if request.is_block_mode {
1972 row_range.start..row_range.start + 1
1973 } else {
1974 row_range.clone()
1975 };
1976
1977 let suggestions = snapshot
1978 .suggest_autoindents(new_edited_row_range.clone())
1979 .into_iter()
1980 .flatten();
1981 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1982 if let Some(suggestion) = suggestion {
1983 // Find the indent size based on the language for this row.
1984 while let Some((row, size)) = language_indent_sizes.peek() {
1985 if *row > new_row {
1986 break;
1987 }
1988 language_indent_size = *size;
1989 language_indent_sizes.next();
1990 }
1991
1992 let suggested_indent = indent_sizes
1993 .get(&suggestion.basis_row)
1994 .copied()
1995 .map(|e| e.0)
1996 .unwrap_or_else(|| {
1997 snapshot.indent_size_for_line(suggestion.basis_row)
1998 })
1999 .with_delta(suggestion.delta, language_indent_size);
2000
2001 if old_suggestions.get(&new_row).is_none_or(
2002 |(old_indentation, was_within_error)| {
2003 suggested_indent != *old_indentation
2004 && (!suggestion.within_error || *was_within_error)
2005 },
2006 ) {
2007 indent_sizes.insert(
2008 new_row,
2009 (suggested_indent, request.ignore_empty_lines),
2010 );
2011 }
2012 }
2013 }
2014
2015 if let (true, Some(original_indent_column)) =
2016 (request.is_block_mode, original_indent_column)
2017 {
2018 let new_indent =
2019 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2020 *indent
2021 } else {
2022 snapshot.indent_size_for_line(row_range.start)
2023 };
2024 let delta = new_indent.len as i64 - original_indent_column as i64;
2025 if delta != 0 {
2026 for row in row_range.skip(1) {
2027 indent_sizes.entry(row).or_insert_with(|| {
2028 let mut size = snapshot.indent_size_for_line(row);
2029 if size.kind == new_indent.kind {
2030 match delta.cmp(&0) {
2031 Ordering::Greater => size.len += delta as u32,
2032 Ordering::Less => {
2033 size.len = size.len.saturating_sub(-delta as u32)
2034 }
2035 Ordering::Equal => {}
2036 }
2037 }
2038 (size, request.ignore_empty_lines)
2039 });
2040 }
2041 }
2042 }
2043
2044 yield_now().await;
2045 }
2046 }
2047
2048 indent_sizes
2049 .into_iter()
2050 .filter_map(|(row, (indent, ignore_empty_lines))| {
2051 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2052 None
2053 } else {
2054 Some((row, indent))
2055 }
2056 })
2057 .collect()
2058 })
2059 }
2060
2061 fn apply_autoindents(
2062 &mut self,
2063 indent_sizes: BTreeMap<u32, IndentSize>,
2064 cx: &mut Context<Self>,
2065 ) {
2066 self.autoindent_requests.clear();
2067 for tx in self.wait_for_autoindent_txs.drain(..) {
2068 tx.send(()).ok();
2069 }
2070
2071 let edits: Vec<_> = indent_sizes
2072 .into_iter()
2073 .filter_map(|(row, indent_size)| {
2074 let current_size = indent_size_for_line(self, row);
2075 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2076 })
2077 .collect();
2078
2079 let preserve_preview = self.preserve_preview();
2080 self.edit(edits, None, cx);
2081 if preserve_preview {
2082 self.refresh_preview();
2083 }
2084 }
2085
2086 /// Create a minimal edit that will cause the given row to be indented
2087 /// with the given size. After applying this edit, the length of the line
2088 /// will always be at least `new_size.len`.
2089 pub fn edit_for_indent_size_adjustment(
2090 row: u32,
2091 current_size: IndentSize,
2092 new_size: IndentSize,
2093 ) -> Option<(Range<Point>, String)> {
2094 if new_size.kind == current_size.kind {
2095 match new_size.len.cmp(¤t_size.len) {
2096 Ordering::Greater => {
2097 let point = Point::new(row, 0);
2098 Some((
2099 point..point,
2100 iter::repeat(new_size.char())
2101 .take((new_size.len - current_size.len) as usize)
2102 .collect::<String>(),
2103 ))
2104 }
2105
2106 Ordering::Less => Some((
2107 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2108 String::new(),
2109 )),
2110
2111 Ordering::Equal => None,
2112 }
2113 } else {
2114 Some((
2115 Point::new(row, 0)..Point::new(row, current_size.len),
2116 iter::repeat(new_size.char())
2117 .take(new_size.len as usize)
2118 .collect::<String>(),
2119 ))
2120 }
2121 }
2122
2123 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2124 /// and the given new text.
2125 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2126 let old_text = self.as_rope().clone();
2127 let base_version = self.version();
2128 cx.background_executor()
2129 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2130 let old_text = old_text.to_string();
2131 let line_ending = LineEnding::detect(&new_text);
2132 LineEnding::normalize(&mut new_text);
2133 let edits = text_diff(&old_text, &new_text);
2134 Diff {
2135 base_version,
2136 line_ending,
2137 edits,
2138 }
2139 })
2140 }
2141
2142 /// Spawns a background task that searches the buffer for any whitespace
2143 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2144 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2145 let old_text = self.as_rope().clone();
2146 let line_ending = self.line_ending();
2147 let base_version = self.version();
2148 cx.background_spawn(async move {
2149 let ranges = trailing_whitespace_ranges(&old_text);
2150 let empty = Arc::<str>::from("");
2151 Diff {
2152 base_version,
2153 line_ending,
2154 edits: ranges
2155 .into_iter()
2156 .map(|range| (range, empty.clone()))
2157 .collect(),
2158 }
2159 })
2160 }
2161
2162 /// Ensures that the buffer ends with a single newline character, and
2163 /// no other whitespace. Skips if the buffer is empty.
2164 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2165 let len = self.len();
2166 if len == 0 {
2167 return;
2168 }
2169 let mut offset = len;
2170 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2171 let non_whitespace_len = chunk
2172 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2173 .len();
2174 offset -= chunk.len();
2175 offset += non_whitespace_len;
2176 if non_whitespace_len != 0 {
2177 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2178 return;
2179 }
2180 break;
2181 }
2182 }
2183 self.edit([(offset..len, "\n")], None, cx);
2184 }
2185
2186 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2187 /// calculated, then adjust the diff to account for those changes, and discard any
2188 /// parts of the diff that conflict with those changes.
2189 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2190 let snapshot = self.snapshot();
2191 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2192 let mut delta = 0;
2193 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2194 while let Some(edit_since) = edits_since.peek() {
2195 // If the edit occurs after a diff hunk, then it does not
2196 // affect that hunk.
2197 if edit_since.old.start > range.end {
2198 break;
2199 }
2200 // If the edit precedes the diff hunk, then adjust the hunk
2201 // to reflect the edit.
2202 else if edit_since.old.end < range.start {
2203 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2204 edits_since.next();
2205 }
2206 // If the edit intersects a diff hunk, then discard that hunk.
2207 else {
2208 return None;
2209 }
2210 }
2211
2212 let start = (range.start as i64 + delta) as usize;
2213 let end = (range.end as i64 + delta) as usize;
2214 Some((start..end, new_text))
2215 });
2216
2217 self.start_transaction();
2218 self.text.set_line_ending(diff.line_ending);
2219 self.edit(adjusted_edits, None, cx);
2220 self.end_transaction(cx)
2221 }
2222
2223 pub fn has_unsaved_edits(&self) -> bool {
2224 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2225
2226 if last_version == self.version {
2227 self.has_unsaved_edits
2228 .set((last_version, has_unsaved_edits));
2229 return has_unsaved_edits;
2230 }
2231
2232 let has_edits = self.has_edits_since(&self.saved_version);
2233 self.has_unsaved_edits
2234 .set((self.version.clone(), has_edits));
2235 has_edits
2236 }
2237
2238 /// Checks if the buffer has unsaved changes.
2239 pub fn is_dirty(&self) -> bool {
2240 if self.capability == Capability::ReadOnly {
2241 return false;
2242 }
2243 if self.has_conflict {
2244 return true;
2245 }
2246 match self.file.as_ref().map(|f| f.disk_state()) {
2247 Some(DiskState::New) | Some(DiskState::Deleted) => {
2248 !self.is_empty() && self.has_unsaved_edits()
2249 }
2250 _ => self.has_unsaved_edits(),
2251 }
2252 }
2253
2254 /// Marks the buffer as having a conflict regardless of current buffer state.
2255 pub fn set_conflict(&mut self) {
2256 self.has_conflict = true;
2257 }
2258
2259 /// Checks if the buffer and its file have both changed since the buffer
2260 /// was last saved or reloaded.
2261 pub fn has_conflict(&self) -> bool {
2262 if self.has_conflict {
2263 return true;
2264 }
2265 let Some(file) = self.file.as_ref() else {
2266 return false;
2267 };
2268 match file.disk_state() {
2269 DiskState::New => false,
2270 DiskState::Present { mtime } => match self.saved_mtime {
2271 Some(saved_mtime) => {
2272 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2273 }
2274 None => true,
2275 },
2276 DiskState::Deleted => false,
2277 }
2278 }
2279
2280 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2281 pub fn subscribe(&mut self) -> Subscription<usize> {
2282 self.text.subscribe()
2283 }
2284
2285 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2286 ///
2287 /// This allows downstream code to check if the buffer's text has changed without
2288 /// waiting for an effect cycle, which would be required if using eents.
2289 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2290 if let Err(ix) = self
2291 .change_bits
2292 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2293 {
2294 self.change_bits.insert(ix, bit);
2295 }
2296 }
2297
2298 /// Set the change bit for all "listeners".
2299 fn was_changed(&mut self) {
2300 self.change_bits.retain(|change_bit| {
2301 change_bit
2302 .upgrade()
2303 .inspect(|bit| {
2304 _ = bit.replace(true);
2305 })
2306 .is_some()
2307 });
2308 }
2309
2310 /// Starts a transaction, if one is not already in-progress. When undoing or
2311 /// redoing edits, all of the edits performed within a transaction are undone
2312 /// or redone together.
2313 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2314 self.start_transaction_at(Instant::now())
2315 }
2316
2317 /// Starts a transaction, providing the current time. Subsequent transactions
2318 /// that occur within a short period of time will be grouped together. This
2319 /// is controlled by the buffer's undo grouping duration.
2320 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2321 self.transaction_depth += 1;
2322 if self.was_dirty_before_starting_transaction.is_none() {
2323 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2324 }
2325 self.text.start_transaction_at(now)
2326 }
2327
2328 /// Terminates the current transaction, if this is the outermost transaction.
2329 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2330 self.end_transaction_at(Instant::now(), cx)
2331 }
2332
2333 /// Terminates the current transaction, providing the current time. Subsequent transactions
2334 /// that occur within a short period of time will be grouped together. This
2335 /// is controlled by the buffer's undo grouping duration.
2336 pub fn end_transaction_at(
2337 &mut self,
2338 now: Instant,
2339 cx: &mut Context<Self>,
2340 ) -> Option<TransactionId> {
2341 assert!(self.transaction_depth > 0);
2342 self.transaction_depth -= 1;
2343 let was_dirty = if self.transaction_depth == 0 {
2344 self.was_dirty_before_starting_transaction.take().unwrap()
2345 } else {
2346 false
2347 };
2348 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2349 self.did_edit(&start_version, was_dirty, cx);
2350 Some(transaction_id)
2351 } else {
2352 None
2353 }
2354 }
2355
2356 /// Manually add a transaction to the buffer's undo history.
2357 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2358 self.text.push_transaction(transaction, now);
2359 }
2360
2361 /// Differs from `push_transaction` in that it does not clear the redo
2362 /// stack. Intended to be used to create a parent transaction to merge
2363 /// potential child transactions into.
2364 ///
2365 /// The caller is responsible for removing it from the undo history using
2366 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2367 /// are merged into this transaction, the caller is responsible for ensuring
2368 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2369 /// cleared is to create transactions with the usual `start_transaction` and
2370 /// `end_transaction` methods and merging the resulting transactions into
2371 /// the transaction created by this method
2372 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2373 self.text.push_empty_transaction(now)
2374 }
2375
2376 /// Prevent the last transaction from being grouped with any subsequent transactions,
2377 /// even if they occur with the buffer's undo grouping duration.
2378 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2379 self.text.finalize_last_transaction()
2380 }
2381
2382 /// Manually group all changes since a given transaction.
2383 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2384 self.text.group_until_transaction(transaction_id);
2385 }
2386
2387 /// Manually remove a transaction from the buffer's undo history
2388 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2389 self.text.forget_transaction(transaction_id)
2390 }
2391
2392 /// Retrieve a transaction from the buffer's undo history
2393 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2394 self.text.get_transaction(transaction_id)
2395 }
2396
2397 /// Manually merge two transactions in the buffer's undo history.
2398 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2399 self.text.merge_transactions(transaction, destination);
2400 }
2401
2402 /// Waits for the buffer to receive operations with the given timestamps.
2403 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2404 &mut self,
2405 edit_ids: It,
2406 ) -> impl Future<Output = Result<()>> + use<It> {
2407 self.text.wait_for_edits(edit_ids)
2408 }
2409
2410 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2411 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2412 &mut self,
2413 anchors: It,
2414 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2415 self.text.wait_for_anchors(anchors)
2416 }
2417
2418 /// Waits for the buffer to receive operations up to the given version.
2419 pub fn wait_for_version(
2420 &mut self,
2421 version: clock::Global,
2422 ) -> impl Future<Output = Result<()>> + use<> {
2423 self.text.wait_for_version(version)
2424 }
2425
2426 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2427 /// [`Buffer::wait_for_version`] to resolve with an error.
2428 pub fn give_up_waiting(&mut self) {
2429 self.text.give_up_waiting();
2430 }
2431
2432 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2433 let mut rx = None;
2434 if !self.autoindent_requests.is_empty() {
2435 let channel = oneshot::channel();
2436 self.wait_for_autoindent_txs.push(channel.0);
2437 rx = Some(channel.1);
2438 }
2439 rx
2440 }
2441
2442 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2443 pub fn set_active_selections(
2444 &mut self,
2445 selections: Arc<[Selection<Anchor>]>,
2446 line_mode: bool,
2447 cursor_shape: CursorShape,
2448 cx: &mut Context<Self>,
2449 ) {
2450 let lamport_timestamp = self.text.lamport_clock.tick();
2451 self.remote_selections.insert(
2452 self.text.replica_id(),
2453 SelectionSet {
2454 selections: selections.clone(),
2455 lamport_timestamp,
2456 line_mode,
2457 cursor_shape,
2458 },
2459 );
2460 self.send_operation(
2461 Operation::UpdateSelections {
2462 selections,
2463 line_mode,
2464 lamport_timestamp,
2465 cursor_shape,
2466 },
2467 true,
2468 cx,
2469 );
2470 self.non_text_state_update_count += 1;
2471 cx.notify();
2472 }
2473
2474 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2475 /// this replica.
2476 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2477 if self
2478 .remote_selections
2479 .get(&self.text.replica_id())
2480 .is_none_or(|set| !set.selections.is_empty())
2481 {
2482 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2483 }
2484 }
2485
2486 pub fn set_agent_selections(
2487 &mut self,
2488 selections: Arc<[Selection<Anchor>]>,
2489 line_mode: bool,
2490 cursor_shape: CursorShape,
2491 cx: &mut Context<Self>,
2492 ) {
2493 let lamport_timestamp = self.text.lamport_clock.tick();
2494 self.remote_selections.insert(
2495 ReplicaId::AGENT,
2496 SelectionSet {
2497 selections,
2498 lamport_timestamp,
2499 line_mode,
2500 cursor_shape,
2501 },
2502 );
2503 self.non_text_state_update_count += 1;
2504 cx.notify();
2505 }
2506
2507 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2508 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2509 }
2510
2511 /// Replaces the buffer's entire text.
2512 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2513 where
2514 T: Into<Arc<str>>,
2515 {
2516 self.autoindent_requests.clear();
2517 self.edit([(0..self.len(), text)], None, cx)
2518 }
2519
2520 /// Appends the given text to the end of the buffer.
2521 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2522 where
2523 T: Into<Arc<str>>,
2524 {
2525 self.edit([(self.len()..self.len(), text)], None, cx)
2526 }
2527
2528 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2529 /// delete, and a string of text to insert at that location.
2530 ///
2531 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2532 /// request for the edited ranges, which will be processed when the buffer finishes
2533 /// parsing.
2534 ///
2535 /// Parsing takes place at the end of a transaction, and may compute synchronously
2536 /// or asynchronously, depending on the changes.
2537 pub fn edit<I, S, T>(
2538 &mut self,
2539 edits_iter: I,
2540 autoindent_mode: Option<AutoindentMode>,
2541 cx: &mut Context<Self>,
2542 ) -> Option<clock::Lamport>
2543 where
2544 I: IntoIterator<Item = (Range<S>, T)>,
2545 S: ToOffset,
2546 T: Into<Arc<str>>,
2547 {
2548 // Skip invalid edits and coalesce contiguous ones.
2549 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2550
2551 for (range, new_text) in edits_iter {
2552 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2553
2554 if range.start > range.end {
2555 mem::swap(&mut range.start, &mut range.end);
2556 }
2557 let new_text = new_text.into();
2558 if !new_text.is_empty() || !range.is_empty() {
2559 if let Some((prev_range, prev_text)) = edits.last_mut()
2560 && prev_range.end >= range.start
2561 {
2562 prev_range.end = cmp::max(prev_range.end, range.end);
2563 *prev_text = format!("{prev_text}{new_text}").into();
2564 } else {
2565 edits.push((range, new_text));
2566 }
2567 }
2568 }
2569 if edits.is_empty() {
2570 return None;
2571 }
2572
2573 self.start_transaction();
2574 self.pending_autoindent.take();
2575 let autoindent_request = autoindent_mode
2576 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2577
2578 let edit_operation = self.text.edit(edits.iter().cloned());
2579 let edit_id = edit_operation.timestamp();
2580
2581 if let Some((before_edit, mode)) = autoindent_request {
2582 let mut delta = 0isize;
2583 let mut previous_setting = None;
2584 let entries: Vec<_> = edits
2585 .into_iter()
2586 .enumerate()
2587 .zip(&edit_operation.as_edit().unwrap().new_text)
2588 .filter(|((_, (range, _)), _)| {
2589 let language = before_edit.language_at(range.start);
2590 let language_id = language.map(|l| l.id());
2591 if let Some((cached_language_id, auto_indent)) = previous_setting
2592 && cached_language_id == language_id
2593 {
2594 auto_indent
2595 } else {
2596 // The auto-indent setting is not present in editorconfigs, hence
2597 // we can avoid passing the file here.
2598 let auto_indent =
2599 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2600 previous_setting = Some((language_id, auto_indent));
2601 auto_indent
2602 }
2603 })
2604 .map(|((ix, (range, _)), new_text)| {
2605 let new_text_length = new_text.len();
2606 let old_start = range.start.to_point(&before_edit);
2607 let new_start = (delta + range.start as isize) as usize;
2608 let range_len = range.end - range.start;
2609 delta += new_text_length as isize - range_len as isize;
2610
2611 // Decide what range of the insertion to auto-indent, and whether
2612 // the first line of the insertion should be considered a newly-inserted line
2613 // or an edit to an existing line.
2614 let mut range_of_insertion_to_indent = 0..new_text_length;
2615 let mut first_line_is_new = true;
2616
2617 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2618 let old_line_end = before_edit.line_len(old_start.row);
2619
2620 if old_start.column > old_line_start {
2621 first_line_is_new = false;
2622 }
2623
2624 if !new_text.contains('\n')
2625 && (old_start.column + (range_len as u32) < old_line_end
2626 || old_line_end == old_line_start)
2627 {
2628 first_line_is_new = false;
2629 }
2630
2631 // When inserting text starting with a newline, avoid auto-indenting the
2632 // previous line.
2633 if new_text.starts_with('\n') {
2634 range_of_insertion_to_indent.start += 1;
2635 first_line_is_new = true;
2636 }
2637
2638 let mut original_indent_column = None;
2639 if let AutoindentMode::Block {
2640 original_indent_columns,
2641 } = &mode
2642 {
2643 original_indent_column = Some(if new_text.starts_with('\n') {
2644 indent_size_for_text(
2645 new_text[range_of_insertion_to_indent.clone()].chars(),
2646 )
2647 .len
2648 } else {
2649 original_indent_columns
2650 .get(ix)
2651 .copied()
2652 .flatten()
2653 .unwrap_or_else(|| {
2654 indent_size_for_text(
2655 new_text[range_of_insertion_to_indent.clone()].chars(),
2656 )
2657 .len
2658 })
2659 });
2660
2661 // Avoid auto-indenting the line after the edit.
2662 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2663 range_of_insertion_to_indent.end -= 1;
2664 }
2665 }
2666
2667 AutoindentRequestEntry {
2668 first_line_is_new,
2669 original_indent_column,
2670 indent_size: before_edit.language_indent_size_at(range.start, cx),
2671 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2672 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2673 }
2674 })
2675 .collect();
2676
2677 if !entries.is_empty() {
2678 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2679 before_edit,
2680 entries,
2681 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2682 ignore_empty_lines: false,
2683 }));
2684 }
2685 }
2686
2687 self.end_transaction(cx);
2688 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2689 Some(edit_id)
2690 }
2691
2692 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2693 self.was_changed();
2694
2695 if self.edits_since::<usize>(old_version).next().is_none() {
2696 return;
2697 }
2698
2699 self.reparse(cx, true);
2700 cx.emit(BufferEvent::Edited);
2701 if was_dirty != self.is_dirty() {
2702 cx.emit(BufferEvent::DirtyChanged);
2703 }
2704 cx.notify();
2705 }
2706
2707 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2708 where
2709 I: IntoIterator<Item = Range<T>>,
2710 T: ToOffset + Copy,
2711 {
2712 let before_edit = self.snapshot();
2713 let entries = ranges
2714 .into_iter()
2715 .map(|range| AutoindentRequestEntry {
2716 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2717 first_line_is_new: true,
2718 indent_size: before_edit.language_indent_size_at(range.start, cx),
2719 original_indent_column: None,
2720 })
2721 .collect();
2722 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2723 before_edit,
2724 entries,
2725 is_block_mode: false,
2726 ignore_empty_lines: true,
2727 }));
2728 self.request_autoindent(cx);
2729 }
2730
2731 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2732 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2733 pub fn insert_empty_line(
2734 &mut self,
2735 position: impl ToPoint,
2736 space_above: bool,
2737 space_below: bool,
2738 cx: &mut Context<Self>,
2739 ) -> Point {
2740 let mut position = position.to_point(self);
2741
2742 self.start_transaction();
2743
2744 self.edit(
2745 [(position..position, "\n")],
2746 Some(AutoindentMode::EachLine),
2747 cx,
2748 );
2749
2750 if position.column > 0 {
2751 position += Point::new(1, 0);
2752 }
2753
2754 if !self.is_line_blank(position.row) {
2755 self.edit(
2756 [(position..position, "\n")],
2757 Some(AutoindentMode::EachLine),
2758 cx,
2759 );
2760 }
2761
2762 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2763 self.edit(
2764 [(position..position, "\n")],
2765 Some(AutoindentMode::EachLine),
2766 cx,
2767 );
2768 position.row += 1;
2769 }
2770
2771 if space_below
2772 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2773 {
2774 self.edit(
2775 [(position..position, "\n")],
2776 Some(AutoindentMode::EachLine),
2777 cx,
2778 );
2779 }
2780
2781 self.end_transaction(cx);
2782
2783 position
2784 }
2785
2786 /// Applies the given remote operations to the buffer.
2787 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2788 self.pending_autoindent.take();
2789 let was_dirty = self.is_dirty();
2790 let old_version = self.version.clone();
2791 let mut deferred_ops = Vec::new();
2792 let buffer_ops = ops
2793 .into_iter()
2794 .filter_map(|op| match op {
2795 Operation::Buffer(op) => Some(op),
2796 _ => {
2797 if self.can_apply_op(&op) {
2798 self.apply_op(op, cx);
2799 } else {
2800 deferred_ops.push(op);
2801 }
2802 None
2803 }
2804 })
2805 .collect::<Vec<_>>();
2806 for operation in buffer_ops.iter() {
2807 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2808 }
2809 self.text.apply_ops(buffer_ops);
2810 self.deferred_ops.insert(deferred_ops);
2811 self.flush_deferred_ops(cx);
2812 self.did_edit(&old_version, was_dirty, cx);
2813 // Notify independently of whether the buffer was edited as the operations could include a
2814 // selection update.
2815 cx.notify();
2816 }
2817
2818 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2819 let mut deferred_ops = Vec::new();
2820 for op in self.deferred_ops.drain().iter().cloned() {
2821 if self.can_apply_op(&op) {
2822 self.apply_op(op, cx);
2823 } else {
2824 deferred_ops.push(op);
2825 }
2826 }
2827 self.deferred_ops.insert(deferred_ops);
2828 }
2829
2830 pub fn has_deferred_ops(&self) -> bool {
2831 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2832 }
2833
2834 fn can_apply_op(&self, operation: &Operation) -> bool {
2835 match operation {
2836 Operation::Buffer(_) => {
2837 unreachable!("buffer operations should never be applied at this layer")
2838 }
2839 Operation::UpdateDiagnostics {
2840 diagnostics: diagnostic_set,
2841 ..
2842 } => diagnostic_set.iter().all(|diagnostic| {
2843 self.text.can_resolve(&diagnostic.range.start)
2844 && self.text.can_resolve(&diagnostic.range.end)
2845 }),
2846 Operation::UpdateSelections { selections, .. } => selections
2847 .iter()
2848 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2849 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2850 }
2851 }
2852
2853 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2854 match operation {
2855 Operation::Buffer(_) => {
2856 unreachable!("buffer operations should never be applied at this layer")
2857 }
2858 Operation::UpdateDiagnostics {
2859 server_id,
2860 diagnostics: diagnostic_set,
2861 lamport_timestamp,
2862 } => {
2863 let snapshot = self.snapshot();
2864 self.apply_diagnostic_update(
2865 server_id,
2866 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2867 lamport_timestamp,
2868 cx,
2869 );
2870 }
2871 Operation::UpdateSelections {
2872 selections,
2873 lamport_timestamp,
2874 line_mode,
2875 cursor_shape,
2876 } => {
2877 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2878 && set.lamport_timestamp > lamport_timestamp
2879 {
2880 return;
2881 }
2882
2883 self.remote_selections.insert(
2884 lamport_timestamp.replica_id,
2885 SelectionSet {
2886 selections,
2887 lamport_timestamp,
2888 line_mode,
2889 cursor_shape,
2890 },
2891 );
2892 self.text.lamport_clock.observe(lamport_timestamp);
2893 self.non_text_state_update_count += 1;
2894 }
2895 Operation::UpdateCompletionTriggers {
2896 triggers,
2897 lamport_timestamp,
2898 server_id,
2899 } => {
2900 if triggers.is_empty() {
2901 self.completion_triggers_per_language_server
2902 .remove(&server_id);
2903 self.completion_triggers = self
2904 .completion_triggers_per_language_server
2905 .values()
2906 .flat_map(|triggers| triggers.iter().cloned())
2907 .collect();
2908 } else {
2909 self.completion_triggers_per_language_server
2910 .insert(server_id, triggers.iter().cloned().collect());
2911 self.completion_triggers.extend(triggers);
2912 }
2913 self.text.lamport_clock.observe(lamport_timestamp);
2914 }
2915 Operation::UpdateLineEnding {
2916 line_ending,
2917 lamport_timestamp,
2918 } => {
2919 self.text.set_line_ending(line_ending);
2920 self.text.lamport_clock.observe(lamport_timestamp);
2921 }
2922 }
2923 }
2924
2925 fn apply_diagnostic_update(
2926 &mut self,
2927 server_id: LanguageServerId,
2928 diagnostics: DiagnosticSet,
2929 lamport_timestamp: clock::Lamport,
2930 cx: &mut Context<Self>,
2931 ) {
2932 if lamport_timestamp > self.diagnostics_timestamp {
2933 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2934 if diagnostics.is_empty() {
2935 if let Ok(ix) = ix {
2936 self.diagnostics.remove(ix);
2937 }
2938 } else {
2939 match ix {
2940 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2941 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2942 };
2943 }
2944 self.diagnostics_timestamp = lamport_timestamp;
2945 self.non_text_state_update_count += 1;
2946 self.text.lamport_clock.observe(lamport_timestamp);
2947 cx.notify();
2948 cx.emit(BufferEvent::DiagnosticsUpdated);
2949 }
2950 }
2951
2952 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2953 self.was_changed();
2954 cx.emit(BufferEvent::Operation {
2955 operation,
2956 is_local,
2957 });
2958 }
2959
2960 /// Removes the selections for a given peer.
2961 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2962 self.remote_selections.remove(&replica_id);
2963 cx.notify();
2964 }
2965
2966 /// Undoes the most recent transaction.
2967 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2968 let was_dirty = self.is_dirty();
2969 let old_version = self.version.clone();
2970
2971 if let Some((transaction_id, operation)) = self.text.undo() {
2972 self.send_operation(Operation::Buffer(operation), true, cx);
2973 self.did_edit(&old_version, was_dirty, cx);
2974 Some(transaction_id)
2975 } else {
2976 None
2977 }
2978 }
2979
2980 /// Manually undoes a specific transaction in the buffer's undo history.
2981 pub fn undo_transaction(
2982 &mut self,
2983 transaction_id: TransactionId,
2984 cx: &mut Context<Self>,
2985 ) -> bool {
2986 let was_dirty = self.is_dirty();
2987 let old_version = self.version.clone();
2988 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2989 self.send_operation(Operation::Buffer(operation), true, cx);
2990 self.did_edit(&old_version, was_dirty, cx);
2991 true
2992 } else {
2993 false
2994 }
2995 }
2996
2997 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2998 pub fn undo_to_transaction(
2999 &mut self,
3000 transaction_id: TransactionId,
3001 cx: &mut Context<Self>,
3002 ) -> bool {
3003 let was_dirty = self.is_dirty();
3004 let old_version = self.version.clone();
3005
3006 let operations = self.text.undo_to_transaction(transaction_id);
3007 let undone = !operations.is_empty();
3008 for operation in operations {
3009 self.send_operation(Operation::Buffer(operation), true, cx);
3010 }
3011 if undone {
3012 self.did_edit(&old_version, was_dirty, cx)
3013 }
3014 undone
3015 }
3016
3017 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3018 let was_dirty = self.is_dirty();
3019 let operation = self.text.undo_operations(counts);
3020 let old_version = self.version.clone();
3021 self.send_operation(Operation::Buffer(operation), true, cx);
3022 self.did_edit(&old_version, was_dirty, cx);
3023 }
3024
3025 /// Manually redoes a specific transaction in the buffer's redo history.
3026 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3027 let was_dirty = self.is_dirty();
3028 let old_version = self.version.clone();
3029
3030 if let Some((transaction_id, operation)) = self.text.redo() {
3031 self.send_operation(Operation::Buffer(operation), true, cx);
3032 self.did_edit(&old_version, was_dirty, cx);
3033 Some(transaction_id)
3034 } else {
3035 None
3036 }
3037 }
3038
3039 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3040 pub fn redo_to_transaction(
3041 &mut self,
3042 transaction_id: TransactionId,
3043 cx: &mut Context<Self>,
3044 ) -> bool {
3045 let was_dirty = self.is_dirty();
3046 let old_version = self.version.clone();
3047
3048 let operations = self.text.redo_to_transaction(transaction_id);
3049 let redone = !operations.is_empty();
3050 for operation in operations {
3051 self.send_operation(Operation::Buffer(operation), true, cx);
3052 }
3053 if redone {
3054 self.did_edit(&old_version, was_dirty, cx)
3055 }
3056 redone
3057 }
3058
3059 /// Override current completion triggers with the user-provided completion triggers.
3060 pub fn set_completion_triggers(
3061 &mut self,
3062 server_id: LanguageServerId,
3063 triggers: BTreeSet<String>,
3064 cx: &mut Context<Self>,
3065 ) {
3066 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3067 if triggers.is_empty() {
3068 self.completion_triggers_per_language_server
3069 .remove(&server_id);
3070 self.completion_triggers = self
3071 .completion_triggers_per_language_server
3072 .values()
3073 .flat_map(|triggers| triggers.iter().cloned())
3074 .collect();
3075 } else {
3076 self.completion_triggers_per_language_server
3077 .insert(server_id, triggers.clone());
3078 self.completion_triggers.extend(triggers.iter().cloned());
3079 }
3080 self.send_operation(
3081 Operation::UpdateCompletionTriggers {
3082 triggers: triggers.into_iter().collect(),
3083 lamport_timestamp: self.completion_triggers_timestamp,
3084 server_id,
3085 },
3086 true,
3087 cx,
3088 );
3089 cx.notify();
3090 }
3091
3092 /// Returns a list of strings which trigger a completion menu for this language.
3093 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3094 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3095 &self.completion_triggers
3096 }
3097
3098 /// Call this directly after performing edits to prevent the preview tab
3099 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3100 /// to return false until there are additional edits.
3101 pub fn refresh_preview(&mut self) {
3102 self.preview_version = self.version.clone();
3103 }
3104
3105 /// Whether we should preserve the preview status of a tab containing this buffer.
3106 pub fn preserve_preview(&self) -> bool {
3107 !self.has_edits_since(&self.preview_version)
3108 }
3109}
3110
3111#[doc(hidden)]
3112#[cfg(any(test, feature = "test-support"))]
3113impl Buffer {
3114 pub fn edit_via_marked_text(
3115 &mut self,
3116 marked_string: &str,
3117 autoindent_mode: Option<AutoindentMode>,
3118 cx: &mut Context<Self>,
3119 ) {
3120 let edits = self.edits_for_marked_text(marked_string);
3121 self.edit(edits, autoindent_mode, cx);
3122 }
3123
3124 pub fn set_group_interval(&mut self, group_interval: Duration) {
3125 self.text.set_group_interval(group_interval);
3126 }
3127
3128 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3129 where
3130 T: rand::Rng,
3131 {
3132 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3133 let mut last_end = None;
3134 for _ in 0..old_range_count {
3135 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3136 break;
3137 }
3138
3139 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3140 let mut range = self.random_byte_range(new_start, rng);
3141 if rng.random_bool(0.2) {
3142 mem::swap(&mut range.start, &mut range.end);
3143 }
3144 last_end = Some(range.end);
3145
3146 let new_text_len = rng.random_range(0..10);
3147 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3148 new_text = new_text.to_uppercase();
3149
3150 edits.push((range, new_text));
3151 }
3152 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3153 self.edit(edits, None, cx);
3154 }
3155
3156 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3157 let was_dirty = self.is_dirty();
3158 let old_version = self.version.clone();
3159
3160 let ops = self.text.randomly_undo_redo(rng);
3161 if !ops.is_empty() {
3162 for op in ops {
3163 self.send_operation(Operation::Buffer(op), true, cx);
3164 self.did_edit(&old_version, was_dirty, cx);
3165 }
3166 }
3167 }
3168}
3169
3170impl EventEmitter<BufferEvent> for Buffer {}
3171
3172impl Deref for Buffer {
3173 type Target = TextBuffer;
3174
3175 fn deref(&self) -> &Self::Target {
3176 &self.text
3177 }
3178}
3179
3180impl BufferSnapshot {
3181 /// Returns [`IndentSize`] for a given line that respects user settings and
3182 /// language preferences.
3183 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3184 indent_size_for_line(self, row)
3185 }
3186
3187 /// Returns [`IndentSize`] for a given position that respects user settings
3188 /// and language preferences.
3189 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3190 let settings = language_settings(
3191 self.language_at(position).map(|l| l.name()),
3192 self.file(),
3193 cx,
3194 );
3195 if settings.hard_tabs {
3196 IndentSize::tab()
3197 } else {
3198 IndentSize::spaces(settings.tab_size.get())
3199 }
3200 }
3201
3202 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3203 /// is passed in as `single_indent_size`.
3204 pub fn suggested_indents(
3205 &self,
3206 rows: impl Iterator<Item = u32>,
3207 single_indent_size: IndentSize,
3208 ) -> BTreeMap<u32, IndentSize> {
3209 let mut result = BTreeMap::new();
3210
3211 for row_range in contiguous_ranges(rows, 10) {
3212 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3213 Some(suggestions) => suggestions,
3214 _ => break,
3215 };
3216
3217 for (row, suggestion) in row_range.zip(suggestions) {
3218 let indent_size = if let Some(suggestion) = suggestion {
3219 result
3220 .get(&suggestion.basis_row)
3221 .copied()
3222 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3223 .with_delta(suggestion.delta, single_indent_size)
3224 } else {
3225 self.indent_size_for_line(row)
3226 };
3227
3228 result.insert(row, indent_size);
3229 }
3230 }
3231
3232 result
3233 }
3234
3235 fn suggest_autoindents(
3236 &self,
3237 row_range: Range<u32>,
3238 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3239 let config = &self.language.as_ref()?.config;
3240 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3241
3242 #[derive(Debug, Clone)]
3243 struct StartPosition {
3244 start: Point,
3245 suffix: SharedString,
3246 language: Arc<Language>,
3247 }
3248
3249 // Find the suggested indentation ranges based on the syntax tree.
3250 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3251 let end = Point::new(row_range.end, 0);
3252 let range = (start..end).to_offset(&self.text);
3253 let mut matches = self.syntax.matches_with_options(
3254 range.clone(),
3255 &self.text,
3256 TreeSitterOptions {
3257 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3258 max_start_depth: None,
3259 },
3260 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3261 );
3262 let indent_configs = matches
3263 .grammars()
3264 .iter()
3265 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3266 .collect::<Vec<_>>();
3267
3268 let mut indent_ranges = Vec::<Range<Point>>::new();
3269 let mut start_positions = Vec::<StartPosition>::new();
3270 let mut outdent_positions = Vec::<Point>::new();
3271 while let Some(mat) = matches.peek() {
3272 let mut start: Option<Point> = None;
3273 let mut end: Option<Point> = None;
3274
3275 let config = indent_configs[mat.grammar_index];
3276 for capture in mat.captures {
3277 if capture.index == config.indent_capture_ix {
3278 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3279 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3280 } else if Some(capture.index) == config.start_capture_ix {
3281 start = Some(Point::from_ts_point(capture.node.end_position()));
3282 } else if Some(capture.index) == config.end_capture_ix {
3283 end = Some(Point::from_ts_point(capture.node.start_position()));
3284 } else if Some(capture.index) == config.outdent_capture_ix {
3285 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3286 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3287 start_positions.push(StartPosition {
3288 start: Point::from_ts_point(capture.node.start_position()),
3289 suffix: suffix.clone(),
3290 language: mat.language.clone(),
3291 });
3292 }
3293 }
3294
3295 matches.advance();
3296 if let Some((start, end)) = start.zip(end) {
3297 if start.row == end.row {
3298 continue;
3299 }
3300 let range = start..end;
3301 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3302 Err(ix) => indent_ranges.insert(ix, range),
3303 Ok(ix) => {
3304 let prev_range = &mut indent_ranges[ix];
3305 prev_range.end = prev_range.end.max(range.end);
3306 }
3307 }
3308 }
3309 }
3310
3311 let mut error_ranges = Vec::<Range<Point>>::new();
3312 let mut matches = self
3313 .syntax
3314 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3315 while let Some(mat) = matches.peek() {
3316 let node = mat.captures[0].node;
3317 let start = Point::from_ts_point(node.start_position());
3318 let end = Point::from_ts_point(node.end_position());
3319 let range = start..end;
3320 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3321 Ok(ix) | Err(ix) => ix,
3322 };
3323 let mut end_ix = ix;
3324 while let Some(existing_range) = error_ranges.get(end_ix) {
3325 if existing_range.end < end {
3326 end_ix += 1;
3327 } else {
3328 break;
3329 }
3330 }
3331 error_ranges.splice(ix..end_ix, [range]);
3332 matches.advance();
3333 }
3334
3335 outdent_positions.sort();
3336 for outdent_position in outdent_positions {
3337 // find the innermost indent range containing this outdent_position
3338 // set its end to the outdent position
3339 if let Some(range_to_truncate) = indent_ranges
3340 .iter_mut()
3341 .rfind(|indent_range| indent_range.contains(&outdent_position))
3342 {
3343 range_to_truncate.end = outdent_position;
3344 }
3345 }
3346
3347 start_positions.sort_by_key(|b| b.start);
3348
3349 // Find the suggested indentation increases and decreased based on regexes.
3350 let mut regex_outdent_map = HashMap::default();
3351 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3352 let mut start_positions_iter = start_positions.iter().peekable();
3353
3354 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3355 self.for_each_line(
3356 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3357 ..Point::new(row_range.end, 0),
3358 |row, line| {
3359 let indent_len = self.indent_size_for_line(row).len;
3360 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3361 let row_language_config = row_language
3362 .as_ref()
3363 .map(|lang| lang.config())
3364 .unwrap_or(config);
3365
3366 if row_language_config
3367 .decrease_indent_pattern
3368 .as_ref()
3369 .is_some_and(|regex| regex.is_match(line))
3370 {
3371 indent_change_rows.push((row, Ordering::Less));
3372 }
3373 if row_language_config
3374 .increase_indent_pattern
3375 .as_ref()
3376 .is_some_and(|regex| regex.is_match(line))
3377 {
3378 indent_change_rows.push((row + 1, Ordering::Greater));
3379 }
3380 while let Some(pos) = start_positions_iter.peek() {
3381 if pos.start.row < row {
3382 let pos = start_positions_iter.next().unwrap().clone();
3383 last_seen_suffix
3384 .entry(pos.suffix.to_string())
3385 .or_default()
3386 .push(pos);
3387 } else {
3388 break;
3389 }
3390 }
3391 for rule in &row_language_config.decrease_indent_patterns {
3392 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3393 let row_start_column = self.indent_size_for_line(row).len;
3394 let basis_row = rule
3395 .valid_after
3396 .iter()
3397 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3398 .flatten()
3399 .filter(|pos| {
3400 row_language
3401 .as_ref()
3402 .or(self.language.as_ref())
3403 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3404 })
3405 .filter(|pos| pos.start.column <= row_start_column)
3406 .max_by_key(|pos| pos.start.row);
3407 if let Some(outdent_to) = basis_row {
3408 regex_outdent_map.insert(row, outdent_to.start.row);
3409 }
3410 break;
3411 }
3412 }
3413 },
3414 );
3415
3416 let mut indent_changes = indent_change_rows.into_iter().peekable();
3417 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3418 prev_non_blank_row.unwrap_or(0)
3419 } else {
3420 row_range.start.saturating_sub(1)
3421 };
3422
3423 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3424 Some(row_range.map(move |row| {
3425 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3426
3427 let mut indent_from_prev_row = false;
3428 let mut outdent_from_prev_row = false;
3429 let mut outdent_to_row = u32::MAX;
3430 let mut from_regex = false;
3431
3432 while let Some((indent_row, delta)) = indent_changes.peek() {
3433 match indent_row.cmp(&row) {
3434 Ordering::Equal => match delta {
3435 Ordering::Less => {
3436 from_regex = true;
3437 outdent_from_prev_row = true
3438 }
3439 Ordering::Greater => {
3440 indent_from_prev_row = true;
3441 from_regex = true
3442 }
3443 _ => {}
3444 },
3445
3446 Ordering::Greater => break,
3447 Ordering::Less => {}
3448 }
3449
3450 indent_changes.next();
3451 }
3452
3453 for range in &indent_ranges {
3454 if range.start.row >= row {
3455 break;
3456 }
3457 if range.start.row == prev_row && range.end > row_start {
3458 indent_from_prev_row = true;
3459 }
3460 if range.end > prev_row_start && range.end <= row_start {
3461 outdent_to_row = outdent_to_row.min(range.start.row);
3462 }
3463 }
3464
3465 if let Some(basis_row) = regex_outdent_map.get(&row) {
3466 indent_from_prev_row = false;
3467 outdent_to_row = *basis_row;
3468 from_regex = true;
3469 }
3470
3471 let within_error = error_ranges
3472 .iter()
3473 .any(|e| e.start.row < row && e.end > row_start);
3474
3475 let suggestion = if outdent_to_row == prev_row
3476 || (outdent_from_prev_row && indent_from_prev_row)
3477 {
3478 Some(IndentSuggestion {
3479 basis_row: prev_row,
3480 delta: Ordering::Equal,
3481 within_error: within_error && !from_regex,
3482 })
3483 } else if indent_from_prev_row {
3484 Some(IndentSuggestion {
3485 basis_row: prev_row,
3486 delta: Ordering::Greater,
3487 within_error: within_error && !from_regex,
3488 })
3489 } else if outdent_to_row < prev_row {
3490 Some(IndentSuggestion {
3491 basis_row: outdent_to_row,
3492 delta: Ordering::Equal,
3493 within_error: within_error && !from_regex,
3494 })
3495 } else if outdent_from_prev_row {
3496 Some(IndentSuggestion {
3497 basis_row: prev_row,
3498 delta: Ordering::Less,
3499 within_error: within_error && !from_regex,
3500 })
3501 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3502 {
3503 Some(IndentSuggestion {
3504 basis_row: prev_row,
3505 delta: Ordering::Equal,
3506 within_error: within_error && !from_regex,
3507 })
3508 } else {
3509 None
3510 };
3511
3512 prev_row = row;
3513 prev_row_start = row_start;
3514 suggestion
3515 }))
3516 }
3517
3518 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3519 while row > 0 {
3520 row -= 1;
3521 if !self.is_line_blank(row) {
3522 return Some(row);
3523 }
3524 }
3525 None
3526 }
3527
3528 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3529 let captures = self.syntax.captures(range, &self.text, |grammar| {
3530 grammar
3531 .highlights_config
3532 .as_ref()
3533 .map(|config| &config.query)
3534 });
3535 let highlight_maps = captures
3536 .grammars()
3537 .iter()
3538 .map(|grammar| grammar.highlight_map())
3539 .collect();
3540 (captures, highlight_maps)
3541 }
3542
3543 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3544 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3545 /// returned in chunks where each chunk has a single syntax highlighting style and
3546 /// diagnostic status.
3547 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3548 let range = range.start.to_offset(self)..range.end.to_offset(self);
3549
3550 let mut syntax = None;
3551 if language_aware {
3552 syntax = Some(self.get_highlights(range.clone()));
3553 }
3554 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3555 let diagnostics = language_aware;
3556 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3557 }
3558
3559 pub fn highlighted_text_for_range<T: ToOffset>(
3560 &self,
3561 range: Range<T>,
3562 override_style: Option<HighlightStyle>,
3563 syntax_theme: &SyntaxTheme,
3564 ) -> HighlightedText {
3565 HighlightedText::from_buffer_range(
3566 range,
3567 &self.text,
3568 &self.syntax,
3569 override_style,
3570 syntax_theme,
3571 )
3572 }
3573
3574 /// Invokes the given callback for each line of text in the given range of the buffer.
3575 /// Uses callback to avoid allocating a string for each line.
3576 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3577 let mut line = String::new();
3578 let mut row = range.start.row;
3579 for chunk in self
3580 .as_rope()
3581 .chunks_in_range(range.to_offset(self))
3582 .chain(["\n"])
3583 {
3584 for (newline_ix, text) in chunk.split('\n').enumerate() {
3585 if newline_ix > 0 {
3586 callback(row, &line);
3587 row += 1;
3588 line.clear();
3589 }
3590 line.push_str(text);
3591 }
3592 }
3593 }
3594
3595 /// Iterates over every [`SyntaxLayer`] in the buffer.
3596 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3597 self.syntax_layers_for_range(0..self.len(), true)
3598 }
3599
3600 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3601 let offset = position.to_offset(self);
3602 self.syntax_layers_for_range(offset..offset, false)
3603 .filter(|l| {
3604 if let Some(ranges) = l.included_sub_ranges {
3605 ranges.iter().any(|range| {
3606 let start = range.start.to_offset(self);
3607 start <= offset && {
3608 let end = range.end.to_offset(self);
3609 offset < end
3610 }
3611 })
3612 } else {
3613 l.node().start_byte() <= offset && l.node().end_byte() > offset
3614 }
3615 })
3616 .last()
3617 }
3618
3619 pub fn syntax_layers_for_range<D: ToOffset>(
3620 &self,
3621 range: Range<D>,
3622 include_hidden: bool,
3623 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3624 self.syntax
3625 .layers_for_range(range, &self.text, include_hidden)
3626 }
3627
3628 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3629 &self,
3630 range: Range<D>,
3631 ) -> Option<SyntaxLayer<'_>> {
3632 let range = range.to_offset(self);
3633 self.syntax
3634 .layers_for_range(range, &self.text, false)
3635 .max_by(|a, b| {
3636 if a.depth != b.depth {
3637 a.depth.cmp(&b.depth)
3638 } else if a.offset.0 != b.offset.0 {
3639 a.offset.0.cmp(&b.offset.0)
3640 } else {
3641 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3642 }
3643 })
3644 }
3645
3646 /// Returns the main [`Language`].
3647 pub fn language(&self) -> Option<&Arc<Language>> {
3648 self.language.as_ref()
3649 }
3650
3651 /// Returns the [`Language`] at the given location.
3652 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3653 self.syntax_layer_at(position)
3654 .map(|info| info.language)
3655 .or(self.language.as_ref())
3656 }
3657
3658 /// Returns the settings for the language at the given location.
3659 pub fn settings_at<'a, D: ToOffset>(
3660 &'a self,
3661 position: D,
3662 cx: &'a App,
3663 ) -> Cow<'a, LanguageSettings> {
3664 language_settings(
3665 self.language_at(position).map(|l| l.name()),
3666 self.file.as_ref(),
3667 cx,
3668 )
3669 }
3670
3671 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3672 CharClassifier::new(self.language_scope_at(point))
3673 }
3674
3675 /// Returns the [`LanguageScope`] at the given location.
3676 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3677 let offset = position.to_offset(self);
3678 let mut scope = None;
3679 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3680
3681 // Use the layer that has the smallest node intersecting the given point.
3682 for layer in self
3683 .syntax
3684 .layers_for_range(offset..offset, &self.text, false)
3685 {
3686 let mut cursor = layer.node().walk();
3687
3688 let mut range = None;
3689 loop {
3690 let child_range = cursor.node().byte_range();
3691 if !child_range.contains(&offset) {
3692 break;
3693 }
3694
3695 range = Some(child_range);
3696 if cursor.goto_first_child_for_byte(offset).is_none() {
3697 break;
3698 }
3699 }
3700
3701 if let Some(range) = range
3702 && smallest_range_and_depth.as_ref().is_none_or(
3703 |(smallest_range, smallest_range_depth)| {
3704 if layer.depth > *smallest_range_depth {
3705 true
3706 } else if layer.depth == *smallest_range_depth {
3707 range.len() < smallest_range.len()
3708 } else {
3709 false
3710 }
3711 },
3712 )
3713 {
3714 smallest_range_and_depth = Some((range, layer.depth));
3715 scope = Some(LanguageScope {
3716 language: layer.language.clone(),
3717 override_id: layer.override_id(offset, &self.text),
3718 });
3719 }
3720 }
3721
3722 scope.or_else(|| {
3723 self.language.clone().map(|language| LanguageScope {
3724 language,
3725 override_id: None,
3726 })
3727 })
3728 }
3729
3730 /// Returns a tuple of the range and character kind of the word
3731 /// surrounding the given position.
3732 pub fn surrounding_word<T: ToOffset>(
3733 &self,
3734 start: T,
3735 scope_context: Option<CharScopeContext>,
3736 ) -> (Range<usize>, Option<CharKind>) {
3737 let mut start = start.to_offset(self);
3738 let mut end = start;
3739 let mut next_chars = self.chars_at(start).take(128).peekable();
3740 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3741
3742 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3743 let word_kind = cmp::max(
3744 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3745 next_chars.peek().copied().map(|c| classifier.kind(c)),
3746 );
3747
3748 for ch in prev_chars {
3749 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3750 start -= ch.len_utf8();
3751 } else {
3752 break;
3753 }
3754 }
3755
3756 for ch in next_chars {
3757 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3758 end += ch.len_utf8();
3759 } else {
3760 break;
3761 }
3762 }
3763
3764 (start..end, word_kind)
3765 }
3766
3767 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3768 /// range. When `require_larger` is true, the node found must be larger than the query range.
3769 ///
3770 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3771 /// be moved to the root of the tree.
3772 fn goto_node_enclosing_range(
3773 cursor: &mut tree_sitter::TreeCursor,
3774 query_range: &Range<usize>,
3775 require_larger: bool,
3776 ) -> bool {
3777 let mut ascending = false;
3778 loop {
3779 let mut range = cursor.node().byte_range();
3780 if query_range.is_empty() {
3781 // When the query range is empty and the current node starts after it, move to the
3782 // previous sibling to find the node the containing node.
3783 if range.start > query_range.start {
3784 cursor.goto_previous_sibling();
3785 range = cursor.node().byte_range();
3786 }
3787 } else {
3788 // When the query range is non-empty and the current node ends exactly at the start,
3789 // move to the next sibling to find a node that extends beyond the start.
3790 if range.end == query_range.start {
3791 cursor.goto_next_sibling();
3792 range = cursor.node().byte_range();
3793 }
3794 }
3795
3796 let encloses = range.contains_inclusive(query_range)
3797 && (!require_larger || range.len() > query_range.len());
3798 if !encloses {
3799 ascending = true;
3800 if !cursor.goto_parent() {
3801 return false;
3802 }
3803 continue;
3804 } else if ascending {
3805 return true;
3806 }
3807
3808 // Descend into the current node.
3809 if cursor
3810 .goto_first_child_for_byte(query_range.start)
3811 .is_none()
3812 {
3813 return true;
3814 }
3815 }
3816 }
3817
3818 pub fn syntax_ancestor<'a, T: ToOffset>(
3819 &'a self,
3820 range: Range<T>,
3821 ) -> Option<tree_sitter::Node<'a>> {
3822 let range = range.start.to_offset(self)..range.end.to_offset(self);
3823 let mut result: Option<tree_sitter::Node<'a>> = None;
3824 for layer in self
3825 .syntax
3826 .layers_for_range(range.clone(), &self.text, true)
3827 {
3828 let mut cursor = layer.node().walk();
3829
3830 // Find the node that both contains the range and is larger than it.
3831 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3832 continue;
3833 }
3834
3835 let left_node = cursor.node();
3836 let mut layer_result = left_node;
3837
3838 // For an empty range, try to find another node immediately to the right of the range.
3839 if left_node.end_byte() == range.start {
3840 let mut right_node = None;
3841 while !cursor.goto_next_sibling() {
3842 if !cursor.goto_parent() {
3843 break;
3844 }
3845 }
3846
3847 while cursor.node().start_byte() == range.start {
3848 right_node = Some(cursor.node());
3849 if !cursor.goto_first_child() {
3850 break;
3851 }
3852 }
3853
3854 // If there is a candidate node on both sides of the (empty) range, then
3855 // decide between the two by favoring a named node over an anonymous token.
3856 // If both nodes are the same in that regard, favor the right one.
3857 if let Some(right_node) = right_node
3858 && (right_node.is_named() || !left_node.is_named())
3859 {
3860 layer_result = right_node;
3861 }
3862 }
3863
3864 if let Some(previous_result) = &result
3865 && previous_result.byte_range().len() < layer_result.byte_range().len()
3866 {
3867 continue;
3868 }
3869 result = Some(layer_result);
3870 }
3871
3872 result
3873 }
3874
3875 /// Find the previous sibling syntax node at the given range.
3876 ///
3877 /// This function locates the syntax node that precedes the node containing
3878 /// the given range. It searches hierarchically by:
3879 /// 1. Finding the node that contains the given range
3880 /// 2. Looking for the previous sibling at the same tree level
3881 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3882 ///
3883 /// Returns `None` if there is no previous sibling at any ancestor level.
3884 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3885 &'a self,
3886 range: Range<T>,
3887 ) -> Option<tree_sitter::Node<'a>> {
3888 let range = range.start.to_offset(self)..range.end.to_offset(self);
3889 let mut result: Option<tree_sitter::Node<'a>> = None;
3890
3891 for layer in self
3892 .syntax
3893 .layers_for_range(range.clone(), &self.text, true)
3894 {
3895 let mut cursor = layer.node().walk();
3896
3897 // Find the node that contains the range
3898 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3899 continue;
3900 }
3901
3902 // Look for the previous sibling, moving up ancestor levels if needed
3903 loop {
3904 if cursor.goto_previous_sibling() {
3905 let layer_result = cursor.node();
3906
3907 if let Some(previous_result) = &result {
3908 if previous_result.byte_range().end < layer_result.byte_range().end {
3909 continue;
3910 }
3911 }
3912 result = Some(layer_result);
3913 break;
3914 }
3915
3916 // No sibling found at this level, try moving up to parent
3917 if !cursor.goto_parent() {
3918 break;
3919 }
3920 }
3921 }
3922
3923 result
3924 }
3925
3926 /// Find the next sibling syntax node at the given range.
3927 ///
3928 /// This function locates the syntax node that follows the node containing
3929 /// the given range. It searches hierarchically by:
3930 /// 1. Finding the node that contains the given range
3931 /// 2. Looking for the next sibling at the same tree level
3932 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3933 ///
3934 /// Returns `None` if there is no next sibling at any ancestor level.
3935 pub fn syntax_next_sibling<'a, T: ToOffset>(
3936 &'a self,
3937 range: Range<T>,
3938 ) -> Option<tree_sitter::Node<'a>> {
3939 let range = range.start.to_offset(self)..range.end.to_offset(self);
3940 let mut result: Option<tree_sitter::Node<'a>> = None;
3941
3942 for layer in self
3943 .syntax
3944 .layers_for_range(range.clone(), &self.text, true)
3945 {
3946 let mut cursor = layer.node().walk();
3947
3948 // Find the node that contains the range
3949 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3950 continue;
3951 }
3952
3953 // Look for the next sibling, moving up ancestor levels if needed
3954 loop {
3955 if cursor.goto_next_sibling() {
3956 let layer_result = cursor.node();
3957
3958 if let Some(previous_result) = &result {
3959 if previous_result.byte_range().start > layer_result.byte_range().start {
3960 continue;
3961 }
3962 }
3963 result = Some(layer_result);
3964 break;
3965 }
3966
3967 // No sibling found at this level, try moving up to parent
3968 if !cursor.goto_parent() {
3969 break;
3970 }
3971 }
3972 }
3973
3974 result
3975 }
3976
3977 /// Returns the root syntax node within the given row
3978 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3979 let start_offset = position.to_offset(self);
3980
3981 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3982
3983 let layer = self
3984 .syntax
3985 .layers_for_range(start_offset..start_offset, &self.text, true)
3986 .next()?;
3987
3988 let mut cursor = layer.node().walk();
3989
3990 // Descend to the first leaf that touches the start of the range.
3991 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3992 if cursor.node().end_byte() == start_offset {
3993 cursor.goto_next_sibling();
3994 }
3995 }
3996
3997 // Ascend to the root node within the same row.
3998 while cursor.goto_parent() {
3999 if cursor.node().start_position().row != row {
4000 break;
4001 }
4002 }
4003
4004 Some(cursor.node())
4005 }
4006
4007 /// Returns the outline for the buffer.
4008 ///
4009 /// This method allows passing an optional [`SyntaxTheme`] to
4010 /// syntax-highlight the returned symbols.
4011 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4012 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4013 }
4014
4015 /// Returns all the symbols that contain the given position.
4016 ///
4017 /// This method allows passing an optional [`SyntaxTheme`] to
4018 /// syntax-highlight the returned symbols.
4019 pub fn symbols_containing<T: ToOffset>(
4020 &self,
4021 position: T,
4022 theme: Option<&SyntaxTheme>,
4023 ) -> Vec<OutlineItem<Anchor>> {
4024 let position = position.to_offset(self);
4025 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4026 let end = self.clip_offset(position + 1, Bias::Right);
4027 let mut items = self.outline_items_containing(start..end, false, theme);
4028 let mut prev_depth = None;
4029 items.retain(|item| {
4030 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4031 prev_depth = Some(item.depth);
4032 result
4033 });
4034 items
4035 }
4036
4037 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4038 let range = range.to_offset(self);
4039 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4040 grammar.outline_config.as_ref().map(|c| &c.query)
4041 });
4042 let configs = matches
4043 .grammars()
4044 .iter()
4045 .map(|g| g.outline_config.as_ref().unwrap())
4046 .collect::<Vec<_>>();
4047
4048 while let Some(mat) = matches.peek() {
4049 let config = &configs[mat.grammar_index];
4050 let containing_item_node = maybe!({
4051 let item_node = mat.captures.iter().find_map(|cap| {
4052 if cap.index == config.item_capture_ix {
4053 Some(cap.node)
4054 } else {
4055 None
4056 }
4057 })?;
4058
4059 let item_byte_range = item_node.byte_range();
4060 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4061 None
4062 } else {
4063 Some(item_node)
4064 }
4065 });
4066
4067 if let Some(item_node) = containing_item_node {
4068 return Some(
4069 Point::from_ts_point(item_node.start_position())
4070 ..Point::from_ts_point(item_node.end_position()),
4071 );
4072 }
4073
4074 matches.advance();
4075 }
4076 None
4077 }
4078
4079 pub fn outline_items_containing<T: ToOffset>(
4080 &self,
4081 range: Range<T>,
4082 include_extra_context: bool,
4083 theme: Option<&SyntaxTheme>,
4084 ) -> Vec<OutlineItem<Anchor>> {
4085 self.outline_items_containing_internal(
4086 range,
4087 include_extra_context,
4088 theme,
4089 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4090 )
4091 }
4092
4093 pub fn outline_items_as_points_containing<T: ToOffset>(
4094 &self,
4095 range: Range<T>,
4096 include_extra_context: bool,
4097 theme: Option<&SyntaxTheme>,
4098 ) -> Vec<OutlineItem<Point>> {
4099 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4100 range
4101 })
4102 }
4103
4104 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4105 &self,
4106 range: Range<T>,
4107 include_extra_context: bool,
4108 theme: Option<&SyntaxTheme>,
4109 ) -> Vec<OutlineItem<usize>> {
4110 self.outline_items_containing_internal(
4111 range,
4112 include_extra_context,
4113 theme,
4114 |buffer, range| range.to_offset(buffer),
4115 )
4116 }
4117
4118 fn outline_items_containing_internal<T: ToOffset, U>(
4119 &self,
4120 range: Range<T>,
4121 include_extra_context: bool,
4122 theme: Option<&SyntaxTheme>,
4123 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4124 ) -> Vec<OutlineItem<U>> {
4125 let range = range.to_offset(self);
4126 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4127 grammar.outline_config.as_ref().map(|c| &c.query)
4128 });
4129
4130 let mut items = Vec::new();
4131 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4132 while let Some(mat) = matches.peek() {
4133 let config = matches.grammars()[mat.grammar_index]
4134 .outline_config
4135 .as_ref()
4136 .unwrap();
4137 if let Some(item) =
4138 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4139 {
4140 items.push(item);
4141 } else if let Some(capture) = mat
4142 .captures
4143 .iter()
4144 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4145 {
4146 let capture_range = capture.node.start_position()..capture.node.end_position();
4147 let mut capture_row_range =
4148 capture_range.start.row as u32..capture_range.end.row as u32;
4149 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4150 {
4151 capture_row_range.end -= 1;
4152 }
4153 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4154 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4155 last_row_range.end = capture_row_range.end;
4156 } else {
4157 annotation_row_ranges.push(capture_row_range);
4158 }
4159 } else {
4160 annotation_row_ranges.push(capture_row_range);
4161 }
4162 }
4163 matches.advance();
4164 }
4165
4166 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4167
4168 // Assign depths based on containment relationships and convert to anchors.
4169 let mut item_ends_stack = Vec::<Point>::new();
4170 let mut anchor_items = Vec::new();
4171 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4172 for item in items {
4173 while let Some(last_end) = item_ends_stack.last().copied() {
4174 if last_end < item.range.end {
4175 item_ends_stack.pop();
4176 } else {
4177 break;
4178 }
4179 }
4180
4181 let mut annotation_row_range = None;
4182 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4183 let row_preceding_item = item.range.start.row.saturating_sub(1);
4184 if next_annotation_row_range.end < row_preceding_item {
4185 annotation_row_ranges.next();
4186 } else {
4187 if next_annotation_row_range.end == row_preceding_item {
4188 annotation_row_range = Some(next_annotation_row_range.clone());
4189 annotation_row_ranges.next();
4190 }
4191 break;
4192 }
4193 }
4194
4195 anchor_items.push(OutlineItem {
4196 depth: item_ends_stack.len(),
4197 range: range_callback(self, item.range.clone()),
4198 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4199 text: item.text,
4200 highlight_ranges: item.highlight_ranges,
4201 name_ranges: item.name_ranges,
4202 body_range: item.body_range.map(|r| range_callback(self, r)),
4203 annotation_range: annotation_row_range.map(|annotation_range| {
4204 let point_range = Point::new(annotation_range.start, 0)
4205 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4206 range_callback(self, point_range)
4207 }),
4208 });
4209 item_ends_stack.push(item.range.end);
4210 }
4211
4212 anchor_items
4213 }
4214
4215 fn next_outline_item(
4216 &self,
4217 config: &OutlineConfig,
4218 mat: &SyntaxMapMatch,
4219 range: &Range<usize>,
4220 include_extra_context: bool,
4221 theme: Option<&SyntaxTheme>,
4222 ) -> Option<OutlineItem<Point>> {
4223 let item_node = mat.captures.iter().find_map(|cap| {
4224 if cap.index == config.item_capture_ix {
4225 Some(cap.node)
4226 } else {
4227 None
4228 }
4229 })?;
4230
4231 let item_byte_range = item_node.byte_range();
4232 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4233 return None;
4234 }
4235 let item_point_range = Point::from_ts_point(item_node.start_position())
4236 ..Point::from_ts_point(item_node.end_position());
4237
4238 let mut open_point = None;
4239 let mut close_point = None;
4240
4241 let mut buffer_ranges = Vec::new();
4242 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4243 let mut range = node.start_byte()..node.end_byte();
4244 let start = node.start_position();
4245 if node.end_position().row > start.row {
4246 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4247 }
4248
4249 if !range.is_empty() {
4250 buffer_ranges.push((range, node_is_name));
4251 }
4252 };
4253
4254 for capture in mat.captures {
4255 if capture.index == config.name_capture_ix {
4256 add_to_buffer_ranges(capture.node, true);
4257 } else if Some(capture.index) == config.context_capture_ix
4258 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4259 {
4260 add_to_buffer_ranges(capture.node, false);
4261 } else {
4262 if Some(capture.index) == config.open_capture_ix {
4263 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4264 } else if Some(capture.index) == config.close_capture_ix {
4265 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4266 }
4267 }
4268 }
4269
4270 if buffer_ranges.is_empty() {
4271 return None;
4272 }
4273 let source_range_for_text =
4274 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4275
4276 let mut text = String::new();
4277 let mut highlight_ranges = Vec::new();
4278 let mut name_ranges = Vec::new();
4279 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4280 let mut last_buffer_range_end = 0;
4281 for (buffer_range, is_name) in buffer_ranges {
4282 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4283 if space_added {
4284 text.push(' ');
4285 }
4286 let before_append_len = text.len();
4287 let mut offset = buffer_range.start;
4288 chunks.seek(buffer_range.clone());
4289 for mut chunk in chunks.by_ref() {
4290 if chunk.text.len() > buffer_range.end - offset {
4291 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4292 offset = buffer_range.end;
4293 } else {
4294 offset += chunk.text.len();
4295 }
4296 let style = chunk
4297 .syntax_highlight_id
4298 .zip(theme)
4299 .and_then(|(highlight, theme)| highlight.style(theme));
4300 if let Some(style) = style {
4301 let start = text.len();
4302 let end = start + chunk.text.len();
4303 highlight_ranges.push((start..end, style));
4304 }
4305 text.push_str(chunk.text);
4306 if offset >= buffer_range.end {
4307 break;
4308 }
4309 }
4310 if is_name {
4311 let after_append_len = text.len();
4312 let start = if space_added && !name_ranges.is_empty() {
4313 before_append_len - 1
4314 } else {
4315 before_append_len
4316 };
4317 name_ranges.push(start..after_append_len);
4318 }
4319 last_buffer_range_end = buffer_range.end;
4320 }
4321
4322 Some(OutlineItem {
4323 depth: 0, // We'll calculate the depth later
4324 range: item_point_range,
4325 source_range_for_text: source_range_for_text.to_point(self),
4326 text,
4327 highlight_ranges,
4328 name_ranges,
4329 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4330 annotation_range: None,
4331 })
4332 }
4333
4334 pub fn function_body_fold_ranges<T: ToOffset>(
4335 &self,
4336 within: Range<T>,
4337 ) -> impl Iterator<Item = Range<usize>> + '_ {
4338 self.text_object_ranges(within, TreeSitterOptions::default())
4339 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4340 }
4341
4342 /// For each grammar in the language, runs the provided
4343 /// [`tree_sitter::Query`] against the given range.
4344 pub fn matches(
4345 &self,
4346 range: Range<usize>,
4347 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4348 ) -> SyntaxMapMatches<'_> {
4349 self.syntax.matches(range, self, query)
4350 }
4351
4352 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4353 /// Hence, may return more bracket pairs than the range contains.
4354 ///
4355 /// Will omit known chunks.
4356 /// The resulting bracket match collections are not ordered.
4357 pub fn fetch_bracket_ranges(
4358 &self,
4359 range: Range<usize>,
4360 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4361 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4362 let mut all_bracket_matches = HashMap::default();
4363
4364 for chunk in self
4365 .tree_sitter_data
4366 .chunks
4367 .applicable_chunks(&[range.to_point(self)])
4368 {
4369 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4370 continue;
4371 }
4372 let chunk_range = chunk.anchor_range();
4373 let chunk_range = chunk_range.to_offset(&self);
4374
4375 if let Some(cached_brackets) =
4376 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4377 {
4378 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4379 continue;
4380 }
4381
4382 let mut all_brackets = Vec::new();
4383 let mut opens = Vec::new();
4384 let mut color_pairs = Vec::new();
4385
4386 let mut matches = self.syntax.matches_with_options(
4387 chunk_range.clone(),
4388 &self.text,
4389 TreeSitterOptions {
4390 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4391 max_start_depth: None,
4392 },
4393 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4394 );
4395 let configs = matches
4396 .grammars()
4397 .iter()
4398 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4399 .collect::<Vec<_>>();
4400
4401 while let Some(mat) = matches.peek() {
4402 let mut open = None;
4403 let mut close = None;
4404 let syntax_layer_depth = mat.depth;
4405 let config = configs[mat.grammar_index];
4406 let pattern = &config.patterns[mat.pattern_index];
4407 for capture in mat.captures {
4408 if capture.index == config.open_capture_ix {
4409 open = Some(capture.node.byte_range());
4410 } else if capture.index == config.close_capture_ix {
4411 close = Some(capture.node.byte_range());
4412 }
4413 }
4414
4415 matches.advance();
4416
4417 let Some((open_range, close_range)) = open.zip(close) else {
4418 continue;
4419 };
4420
4421 let bracket_range = open_range.start..=close_range.end;
4422 if !bracket_range.overlaps(&chunk_range) {
4423 continue;
4424 }
4425
4426 let index = all_brackets.len();
4427 all_brackets.push(BracketMatch {
4428 open_range: open_range.clone(),
4429 close_range: close_range.clone(),
4430 newline_only: pattern.newline_only,
4431 syntax_layer_depth,
4432 color_index: None,
4433 });
4434
4435 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4436 // bracket will match the entire tag with all text inside.
4437 // For now, avoid highlighting any pair that has more than single char in each bracket.
4438 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4439 let should_color =
4440 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4441 if should_color {
4442 opens.push(open_range.clone());
4443 color_pairs.push((open_range, close_range, index));
4444 }
4445 }
4446
4447 opens.sort_by_key(|r| (r.start, r.end));
4448 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4449 color_pairs.sort_by_key(|(_, close, _)| close.end);
4450
4451 let mut open_stack = Vec::new();
4452 let mut open_index = 0;
4453 for (open, close, index) in color_pairs {
4454 while open_index < opens.len() && opens[open_index].start < close.start {
4455 open_stack.push(opens[open_index].clone());
4456 open_index += 1;
4457 }
4458
4459 if open_stack.last() == Some(&open) {
4460 let depth_index = open_stack.len() - 1;
4461 all_brackets[index].color_index = Some(depth_index);
4462 open_stack.pop();
4463 }
4464 }
4465
4466 all_brackets.sort_by_key(|bracket_match| {
4467 (bracket_match.open_range.start, bracket_match.open_range.end)
4468 });
4469
4470 if let empty_slot @ None =
4471 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4472 {
4473 *empty_slot = Some(all_brackets.clone());
4474 }
4475 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4476 }
4477
4478 all_bracket_matches
4479 }
4480
4481 pub fn all_bracket_ranges(
4482 &self,
4483 range: Range<usize>,
4484 ) -> impl Iterator<Item = BracketMatch<usize>> {
4485 self.fetch_bracket_ranges(range.clone(), None)
4486 .into_values()
4487 .flatten()
4488 .filter(move |bracket_match| {
4489 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4490 bracket_range.overlaps(&range)
4491 })
4492 }
4493
4494 /// Returns bracket range pairs overlapping or adjacent to `range`
4495 pub fn bracket_ranges<T: ToOffset>(
4496 &self,
4497 range: Range<T>,
4498 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4499 // Find bracket pairs that *inclusively* contain the given range.
4500 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4501 self.all_bracket_ranges(range)
4502 .filter(|pair| !pair.newline_only)
4503 }
4504
4505 pub fn debug_variables_query<T: ToOffset>(
4506 &self,
4507 range: Range<T>,
4508 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4509 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4510
4511 let mut matches = self.syntax.matches_with_options(
4512 range.clone(),
4513 &self.text,
4514 TreeSitterOptions::default(),
4515 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4516 );
4517
4518 let configs = matches
4519 .grammars()
4520 .iter()
4521 .map(|grammar| grammar.debug_variables_config.as_ref())
4522 .collect::<Vec<_>>();
4523
4524 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4525
4526 iter::from_fn(move || {
4527 loop {
4528 while let Some(capture) = captures.pop() {
4529 if capture.0.overlaps(&range) {
4530 return Some(capture);
4531 }
4532 }
4533
4534 let mat = matches.peek()?;
4535
4536 let Some(config) = configs[mat.grammar_index].as_ref() else {
4537 matches.advance();
4538 continue;
4539 };
4540
4541 for capture in mat.captures {
4542 let Some(ix) = config
4543 .objects_by_capture_ix
4544 .binary_search_by_key(&capture.index, |e| e.0)
4545 .ok()
4546 else {
4547 continue;
4548 };
4549 let text_object = config.objects_by_capture_ix[ix].1;
4550 let byte_range = capture.node.byte_range();
4551
4552 let mut found = false;
4553 for (range, existing) in captures.iter_mut() {
4554 if existing == &text_object {
4555 range.start = range.start.min(byte_range.start);
4556 range.end = range.end.max(byte_range.end);
4557 found = true;
4558 break;
4559 }
4560 }
4561
4562 if !found {
4563 captures.push((byte_range, text_object));
4564 }
4565 }
4566
4567 matches.advance();
4568 }
4569 })
4570 }
4571
4572 pub fn text_object_ranges<T: ToOffset>(
4573 &self,
4574 range: Range<T>,
4575 options: TreeSitterOptions,
4576 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4577 let range =
4578 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4579
4580 let mut matches =
4581 self.syntax
4582 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4583 grammar.text_object_config.as_ref().map(|c| &c.query)
4584 });
4585
4586 let configs = matches
4587 .grammars()
4588 .iter()
4589 .map(|grammar| grammar.text_object_config.as_ref())
4590 .collect::<Vec<_>>();
4591
4592 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4593
4594 iter::from_fn(move || {
4595 loop {
4596 while let Some(capture) = captures.pop() {
4597 if capture.0.overlaps(&range) {
4598 return Some(capture);
4599 }
4600 }
4601
4602 let mat = matches.peek()?;
4603
4604 let Some(config) = configs[mat.grammar_index].as_ref() else {
4605 matches.advance();
4606 continue;
4607 };
4608
4609 for capture in mat.captures {
4610 let Some(ix) = config
4611 .text_objects_by_capture_ix
4612 .binary_search_by_key(&capture.index, |e| e.0)
4613 .ok()
4614 else {
4615 continue;
4616 };
4617 let text_object = config.text_objects_by_capture_ix[ix].1;
4618 let byte_range = capture.node.byte_range();
4619
4620 let mut found = false;
4621 for (range, existing) in captures.iter_mut() {
4622 if existing == &text_object {
4623 range.start = range.start.min(byte_range.start);
4624 range.end = range.end.max(byte_range.end);
4625 found = true;
4626 break;
4627 }
4628 }
4629
4630 if !found {
4631 captures.push((byte_range, text_object));
4632 }
4633 }
4634
4635 matches.advance();
4636 }
4637 })
4638 }
4639
4640 /// Returns enclosing bracket ranges containing the given range
4641 pub fn enclosing_bracket_ranges<T: ToOffset>(
4642 &self,
4643 range: Range<T>,
4644 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4645 let range = range.start.to_offset(self)..range.end.to_offset(self);
4646
4647 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4648 let max_depth = result
4649 .iter()
4650 .map(|mat| mat.syntax_layer_depth)
4651 .max()
4652 .unwrap_or(0);
4653 result.into_iter().filter(move |pair| {
4654 pair.open_range.start <= range.start
4655 && pair.close_range.end >= range.end
4656 && pair.syntax_layer_depth == max_depth
4657 })
4658 }
4659
4660 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4661 ///
4662 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4663 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4664 &self,
4665 range: Range<T>,
4666 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4667 ) -> Option<(Range<usize>, Range<usize>)> {
4668 let range = range.start.to_offset(self)..range.end.to_offset(self);
4669
4670 // Get the ranges of the innermost pair of brackets.
4671 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4672
4673 for pair in self.enclosing_bracket_ranges(range) {
4674 if let Some(range_filter) = range_filter
4675 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4676 {
4677 continue;
4678 }
4679
4680 let len = pair.close_range.end - pair.open_range.start;
4681
4682 if let Some((existing_open, existing_close)) = &result {
4683 let existing_len = existing_close.end - existing_open.start;
4684 if len > existing_len {
4685 continue;
4686 }
4687 }
4688
4689 result = Some((pair.open_range, pair.close_range));
4690 }
4691
4692 result
4693 }
4694
4695 /// Returns anchor ranges for any matches of the redaction query.
4696 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4697 /// will be run on the relevant section of the buffer.
4698 pub fn redacted_ranges<T: ToOffset>(
4699 &self,
4700 range: Range<T>,
4701 ) -> impl Iterator<Item = Range<usize>> + '_ {
4702 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4703 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4704 grammar
4705 .redactions_config
4706 .as_ref()
4707 .map(|config| &config.query)
4708 });
4709
4710 let configs = syntax_matches
4711 .grammars()
4712 .iter()
4713 .map(|grammar| grammar.redactions_config.as_ref())
4714 .collect::<Vec<_>>();
4715
4716 iter::from_fn(move || {
4717 let redacted_range = syntax_matches
4718 .peek()
4719 .and_then(|mat| {
4720 configs[mat.grammar_index].and_then(|config| {
4721 mat.captures
4722 .iter()
4723 .find(|capture| capture.index == config.redaction_capture_ix)
4724 })
4725 })
4726 .map(|mat| mat.node.byte_range());
4727 syntax_matches.advance();
4728 redacted_range
4729 })
4730 }
4731
4732 pub fn injections_intersecting_range<T: ToOffset>(
4733 &self,
4734 range: Range<T>,
4735 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4736 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4737
4738 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4739 grammar
4740 .injection_config
4741 .as_ref()
4742 .map(|config| &config.query)
4743 });
4744
4745 let configs = syntax_matches
4746 .grammars()
4747 .iter()
4748 .map(|grammar| grammar.injection_config.as_ref())
4749 .collect::<Vec<_>>();
4750
4751 iter::from_fn(move || {
4752 let ranges = syntax_matches.peek().and_then(|mat| {
4753 let config = &configs[mat.grammar_index]?;
4754 let content_capture_range = mat.captures.iter().find_map(|capture| {
4755 if capture.index == config.content_capture_ix {
4756 Some(capture.node.byte_range())
4757 } else {
4758 None
4759 }
4760 })?;
4761 let language = self.language_at(content_capture_range.start)?;
4762 Some((content_capture_range, language))
4763 });
4764 syntax_matches.advance();
4765 ranges
4766 })
4767 }
4768
4769 pub fn runnable_ranges(
4770 &self,
4771 offset_range: Range<usize>,
4772 ) -> impl Iterator<Item = RunnableRange> + '_ {
4773 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4774 grammar.runnable_config.as_ref().map(|config| &config.query)
4775 });
4776
4777 let test_configs = syntax_matches
4778 .grammars()
4779 .iter()
4780 .map(|grammar| grammar.runnable_config.as_ref())
4781 .collect::<Vec<_>>();
4782
4783 iter::from_fn(move || {
4784 loop {
4785 let mat = syntax_matches.peek()?;
4786
4787 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4788 let mut run_range = None;
4789 let full_range = mat.captures.iter().fold(
4790 Range {
4791 start: usize::MAX,
4792 end: 0,
4793 },
4794 |mut acc, next| {
4795 let byte_range = next.node.byte_range();
4796 if acc.start > byte_range.start {
4797 acc.start = byte_range.start;
4798 }
4799 if acc.end < byte_range.end {
4800 acc.end = byte_range.end;
4801 }
4802 acc
4803 },
4804 );
4805 if full_range.start > full_range.end {
4806 // We did not find a full spanning range of this match.
4807 return None;
4808 }
4809 let extra_captures: SmallVec<[_; 1]> =
4810 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4811 test_configs
4812 .extra_captures
4813 .get(capture.index as usize)
4814 .cloned()
4815 .and_then(|tag_name| match tag_name {
4816 RunnableCapture::Named(name) => {
4817 Some((capture.node.byte_range(), name))
4818 }
4819 RunnableCapture::Run => {
4820 let _ = run_range.insert(capture.node.byte_range());
4821 None
4822 }
4823 })
4824 }));
4825 let run_range = run_range?;
4826 let tags = test_configs
4827 .query
4828 .property_settings(mat.pattern_index)
4829 .iter()
4830 .filter_map(|property| {
4831 if *property.key == *"tag" {
4832 property
4833 .value
4834 .as_ref()
4835 .map(|value| RunnableTag(value.to_string().into()))
4836 } else {
4837 None
4838 }
4839 })
4840 .collect();
4841 let extra_captures = extra_captures
4842 .into_iter()
4843 .map(|(range, name)| {
4844 (
4845 name.to_string(),
4846 self.text_for_range(range).collect::<String>(),
4847 )
4848 })
4849 .collect();
4850 // All tags should have the same range.
4851 Some(RunnableRange {
4852 run_range,
4853 full_range,
4854 runnable: Runnable {
4855 tags,
4856 language: mat.language,
4857 buffer: self.remote_id(),
4858 },
4859 extra_captures,
4860 buffer_id: self.remote_id(),
4861 })
4862 });
4863
4864 syntax_matches.advance();
4865 if test_range.is_some() {
4866 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4867 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4868 return test_range;
4869 }
4870 }
4871 })
4872 }
4873
4874 /// Returns selections for remote peers intersecting the given range.
4875 #[allow(clippy::type_complexity)]
4876 pub fn selections_in_range(
4877 &self,
4878 range: Range<Anchor>,
4879 include_local: bool,
4880 ) -> impl Iterator<
4881 Item = (
4882 ReplicaId,
4883 bool,
4884 CursorShape,
4885 impl Iterator<Item = &Selection<Anchor>> + '_,
4886 ),
4887 > + '_ {
4888 self.remote_selections
4889 .iter()
4890 .filter(move |(replica_id, set)| {
4891 (include_local || **replica_id != self.text.replica_id())
4892 && !set.selections.is_empty()
4893 })
4894 .map(move |(replica_id, set)| {
4895 let start_ix = match set.selections.binary_search_by(|probe| {
4896 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4897 }) {
4898 Ok(ix) | Err(ix) => ix,
4899 };
4900 let end_ix = match set.selections.binary_search_by(|probe| {
4901 probe.start.cmp(&range.end, self).then(Ordering::Less)
4902 }) {
4903 Ok(ix) | Err(ix) => ix,
4904 };
4905
4906 (
4907 *replica_id,
4908 set.line_mode,
4909 set.cursor_shape,
4910 set.selections[start_ix..end_ix].iter(),
4911 )
4912 })
4913 }
4914
4915 /// Returns if the buffer contains any diagnostics.
4916 pub fn has_diagnostics(&self) -> bool {
4917 !self.diagnostics.is_empty()
4918 }
4919
4920 /// Returns all the diagnostics intersecting the given range.
4921 pub fn diagnostics_in_range<'a, T, O>(
4922 &'a self,
4923 search_range: Range<T>,
4924 reversed: bool,
4925 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4926 where
4927 T: 'a + Clone + ToOffset,
4928 O: 'a + FromAnchor,
4929 {
4930 let mut iterators: Vec<_> = self
4931 .diagnostics
4932 .iter()
4933 .map(|(_, collection)| {
4934 collection
4935 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4936 .peekable()
4937 })
4938 .collect();
4939
4940 std::iter::from_fn(move || {
4941 let (next_ix, _) = iterators
4942 .iter_mut()
4943 .enumerate()
4944 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4945 .min_by(|(_, a), (_, b)| {
4946 let cmp = a
4947 .range
4948 .start
4949 .cmp(&b.range.start, self)
4950 // when range is equal, sort by diagnostic severity
4951 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4952 // and stabilize order with group_id
4953 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4954 if reversed { cmp.reverse() } else { cmp }
4955 })?;
4956 iterators[next_ix]
4957 .next()
4958 .map(
4959 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4960 diagnostic,
4961 range: FromAnchor::from_anchor(&range.start, self)
4962 ..FromAnchor::from_anchor(&range.end, self),
4963 },
4964 )
4965 })
4966 }
4967
4968 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4969 /// should be used instead.
4970 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4971 &self.diagnostics
4972 }
4973
4974 /// Returns all the diagnostic groups associated with the given
4975 /// language server ID. If no language server ID is provided,
4976 /// all diagnostics groups are returned.
4977 pub fn diagnostic_groups(
4978 &self,
4979 language_server_id: Option<LanguageServerId>,
4980 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4981 let mut groups = Vec::new();
4982
4983 if let Some(language_server_id) = language_server_id {
4984 if let Ok(ix) = self
4985 .diagnostics
4986 .binary_search_by_key(&language_server_id, |e| e.0)
4987 {
4988 self.diagnostics[ix]
4989 .1
4990 .groups(language_server_id, &mut groups, self);
4991 }
4992 } else {
4993 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4994 diagnostics.groups(*language_server_id, &mut groups, self);
4995 }
4996 }
4997
4998 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4999 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5000 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5001 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5002 });
5003
5004 groups
5005 }
5006
5007 /// Returns an iterator over the diagnostics for the given group.
5008 pub fn diagnostic_group<O>(
5009 &self,
5010 group_id: usize,
5011 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5012 where
5013 O: FromAnchor + 'static,
5014 {
5015 self.diagnostics
5016 .iter()
5017 .flat_map(move |(_, set)| set.group(group_id, self))
5018 }
5019
5020 /// An integer version number that accounts for all updates besides
5021 /// the buffer's text itself (which is versioned via a version vector).
5022 pub fn non_text_state_update_count(&self) -> usize {
5023 self.non_text_state_update_count
5024 }
5025
5026 /// An integer version that changes when the buffer's syntax changes.
5027 pub fn syntax_update_count(&self) -> usize {
5028 self.syntax.update_count()
5029 }
5030
5031 /// Returns a snapshot of underlying file.
5032 pub fn file(&self) -> Option<&Arc<dyn File>> {
5033 self.file.as_ref()
5034 }
5035
5036 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5037 if let Some(file) = self.file() {
5038 if file.path().file_name().is_none() || include_root {
5039 Some(file.full_path(cx).to_string_lossy().into_owned())
5040 } else {
5041 Some(file.path().display(file.path_style(cx)).to_string())
5042 }
5043 } else {
5044 None
5045 }
5046 }
5047
5048 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5049 let query_str = query.fuzzy_contents;
5050 if query_str.is_some_and(|query| query.is_empty()) {
5051 return BTreeMap::default();
5052 }
5053
5054 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5055 language,
5056 override_id: None,
5057 }));
5058
5059 let mut query_ix = 0;
5060 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5061 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5062
5063 let mut words = BTreeMap::default();
5064 let mut current_word_start_ix = None;
5065 let mut chunk_ix = query.range.start;
5066 for chunk in self.chunks(query.range, false) {
5067 for (i, c) in chunk.text.char_indices() {
5068 let ix = chunk_ix + i;
5069 if classifier.is_word(c) {
5070 if current_word_start_ix.is_none() {
5071 current_word_start_ix = Some(ix);
5072 }
5073
5074 if let Some(query_chars) = &query_chars
5075 && query_ix < query_len
5076 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5077 {
5078 query_ix += 1;
5079 }
5080 continue;
5081 } else if let Some(word_start) = current_word_start_ix.take()
5082 && query_ix == query_len
5083 {
5084 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5085 let mut word_text = self.text_for_range(word_start..ix).peekable();
5086 let first_char = word_text
5087 .peek()
5088 .and_then(|first_chunk| first_chunk.chars().next());
5089 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5090 if !query.skip_digits
5091 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5092 {
5093 words.insert(word_text.collect(), word_range);
5094 }
5095 }
5096 query_ix = 0;
5097 }
5098 chunk_ix += chunk.text.len();
5099 }
5100
5101 words
5102 }
5103}
5104
5105pub struct WordsQuery<'a> {
5106 /// Only returns words with all chars from the fuzzy string in them.
5107 pub fuzzy_contents: Option<&'a str>,
5108 /// Skips words that start with a digit.
5109 pub skip_digits: bool,
5110 /// Buffer offset range, to look for words.
5111 pub range: Range<usize>,
5112}
5113
5114fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5115 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5116}
5117
5118fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5119 let mut result = IndentSize::spaces(0);
5120 for c in text {
5121 let kind = match c {
5122 ' ' => IndentKind::Space,
5123 '\t' => IndentKind::Tab,
5124 _ => break,
5125 };
5126 if result.len == 0 {
5127 result.kind = kind;
5128 }
5129 result.len += 1;
5130 }
5131 result
5132}
5133
5134impl Clone for BufferSnapshot {
5135 fn clone(&self) -> Self {
5136 Self {
5137 text: self.text.clone(),
5138 syntax: self.syntax.clone(),
5139 file: self.file.clone(),
5140 remote_selections: self.remote_selections.clone(),
5141 diagnostics: self.diagnostics.clone(),
5142 language: self.language.clone(),
5143 tree_sitter_data: self.tree_sitter_data.clone(),
5144 non_text_state_update_count: self.non_text_state_update_count,
5145 }
5146 }
5147}
5148
5149impl Deref for BufferSnapshot {
5150 type Target = text::BufferSnapshot;
5151
5152 fn deref(&self) -> &Self::Target {
5153 &self.text
5154 }
5155}
5156
5157unsafe impl Send for BufferChunks<'_> {}
5158
5159impl<'a> BufferChunks<'a> {
5160 pub(crate) fn new(
5161 text: &'a Rope,
5162 range: Range<usize>,
5163 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5164 diagnostics: bool,
5165 buffer_snapshot: Option<&'a BufferSnapshot>,
5166 ) -> Self {
5167 let mut highlights = None;
5168 if let Some((captures, highlight_maps)) = syntax {
5169 highlights = Some(BufferChunkHighlights {
5170 captures,
5171 next_capture: None,
5172 stack: Default::default(),
5173 highlight_maps,
5174 })
5175 }
5176
5177 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5178 let chunks = text.chunks_in_range(range.clone());
5179
5180 let mut this = BufferChunks {
5181 range,
5182 buffer_snapshot,
5183 chunks,
5184 diagnostic_endpoints,
5185 error_depth: 0,
5186 warning_depth: 0,
5187 information_depth: 0,
5188 hint_depth: 0,
5189 unnecessary_depth: 0,
5190 underline: true,
5191 highlights,
5192 };
5193 this.initialize_diagnostic_endpoints();
5194 this
5195 }
5196
5197 /// Seeks to the given byte offset in the buffer.
5198 pub fn seek(&mut self, range: Range<usize>) {
5199 let old_range = std::mem::replace(&mut self.range, range.clone());
5200 self.chunks.set_range(self.range.clone());
5201 if let Some(highlights) = self.highlights.as_mut() {
5202 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5203 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5204 highlights
5205 .stack
5206 .retain(|(end_offset, _)| *end_offset > range.start);
5207 if let Some(capture) = &highlights.next_capture
5208 && range.start >= capture.node.start_byte()
5209 {
5210 let next_capture_end = capture.node.end_byte();
5211 if range.start < next_capture_end {
5212 highlights.stack.push((
5213 next_capture_end,
5214 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5215 ));
5216 }
5217 highlights.next_capture.take();
5218 }
5219 } else if let Some(snapshot) = self.buffer_snapshot {
5220 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5221 *highlights = BufferChunkHighlights {
5222 captures,
5223 next_capture: None,
5224 stack: Default::default(),
5225 highlight_maps,
5226 };
5227 } else {
5228 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5229 // Seeking such BufferChunks is not supported.
5230 debug_assert!(
5231 false,
5232 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5233 );
5234 }
5235
5236 highlights.captures.set_byte_range(self.range.clone());
5237 self.initialize_diagnostic_endpoints();
5238 }
5239 }
5240
5241 fn initialize_diagnostic_endpoints(&mut self) {
5242 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5243 && let Some(buffer) = self.buffer_snapshot
5244 {
5245 let mut diagnostic_endpoints = Vec::new();
5246 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5247 diagnostic_endpoints.push(DiagnosticEndpoint {
5248 offset: entry.range.start,
5249 is_start: true,
5250 severity: entry.diagnostic.severity,
5251 is_unnecessary: entry.diagnostic.is_unnecessary,
5252 underline: entry.diagnostic.underline,
5253 });
5254 diagnostic_endpoints.push(DiagnosticEndpoint {
5255 offset: entry.range.end,
5256 is_start: false,
5257 severity: entry.diagnostic.severity,
5258 is_unnecessary: entry.diagnostic.is_unnecessary,
5259 underline: entry.diagnostic.underline,
5260 });
5261 }
5262 diagnostic_endpoints
5263 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5264 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5265 self.hint_depth = 0;
5266 self.error_depth = 0;
5267 self.warning_depth = 0;
5268 self.information_depth = 0;
5269 }
5270 }
5271
5272 /// The current byte offset in the buffer.
5273 pub fn offset(&self) -> usize {
5274 self.range.start
5275 }
5276
5277 pub fn range(&self) -> Range<usize> {
5278 self.range.clone()
5279 }
5280
5281 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5282 let depth = match endpoint.severity {
5283 DiagnosticSeverity::ERROR => &mut self.error_depth,
5284 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5285 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5286 DiagnosticSeverity::HINT => &mut self.hint_depth,
5287 _ => return,
5288 };
5289 if endpoint.is_start {
5290 *depth += 1;
5291 } else {
5292 *depth -= 1;
5293 }
5294
5295 if endpoint.is_unnecessary {
5296 if endpoint.is_start {
5297 self.unnecessary_depth += 1;
5298 } else {
5299 self.unnecessary_depth -= 1;
5300 }
5301 }
5302 }
5303
5304 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5305 if self.error_depth > 0 {
5306 Some(DiagnosticSeverity::ERROR)
5307 } else if self.warning_depth > 0 {
5308 Some(DiagnosticSeverity::WARNING)
5309 } else if self.information_depth > 0 {
5310 Some(DiagnosticSeverity::INFORMATION)
5311 } else if self.hint_depth > 0 {
5312 Some(DiagnosticSeverity::HINT)
5313 } else {
5314 None
5315 }
5316 }
5317
5318 fn current_code_is_unnecessary(&self) -> bool {
5319 self.unnecessary_depth > 0
5320 }
5321}
5322
5323impl<'a> Iterator for BufferChunks<'a> {
5324 type Item = Chunk<'a>;
5325
5326 fn next(&mut self) -> Option<Self::Item> {
5327 let mut next_capture_start = usize::MAX;
5328 let mut next_diagnostic_endpoint = usize::MAX;
5329
5330 if let Some(highlights) = self.highlights.as_mut() {
5331 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5332 if *parent_capture_end <= self.range.start {
5333 highlights.stack.pop();
5334 } else {
5335 break;
5336 }
5337 }
5338
5339 if highlights.next_capture.is_none() {
5340 highlights.next_capture = highlights.captures.next();
5341 }
5342
5343 while let Some(capture) = highlights.next_capture.as_ref() {
5344 if self.range.start < capture.node.start_byte() {
5345 next_capture_start = capture.node.start_byte();
5346 break;
5347 } else {
5348 let highlight_id =
5349 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5350 highlights
5351 .stack
5352 .push((capture.node.end_byte(), highlight_id));
5353 highlights.next_capture = highlights.captures.next();
5354 }
5355 }
5356 }
5357
5358 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5359 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5360 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5361 if endpoint.offset <= self.range.start {
5362 self.update_diagnostic_depths(endpoint);
5363 diagnostic_endpoints.next();
5364 self.underline = endpoint.underline;
5365 } else {
5366 next_diagnostic_endpoint = endpoint.offset;
5367 break;
5368 }
5369 }
5370 }
5371 self.diagnostic_endpoints = diagnostic_endpoints;
5372
5373 if let Some(ChunkBitmaps {
5374 text: chunk,
5375 chars: chars_map,
5376 tabs,
5377 }) = self.chunks.peek_with_bitmaps()
5378 {
5379 let chunk_start = self.range.start;
5380 let mut chunk_end = (self.chunks.offset() + chunk.len())
5381 .min(next_capture_start)
5382 .min(next_diagnostic_endpoint);
5383 let mut highlight_id = None;
5384 if let Some(highlights) = self.highlights.as_ref()
5385 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5386 {
5387 chunk_end = chunk_end.min(*parent_capture_end);
5388 highlight_id = Some(*parent_highlight_id);
5389 }
5390 let bit_start = chunk_start - self.chunks.offset();
5391 let bit_end = chunk_end - self.chunks.offset();
5392
5393 let slice = &chunk[bit_start..bit_end];
5394
5395 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5396 let tabs = (tabs >> bit_start) & mask;
5397 let chars = (chars_map >> bit_start) & mask;
5398
5399 self.range.start = chunk_end;
5400 if self.range.start == self.chunks.offset() + chunk.len() {
5401 self.chunks.next().unwrap();
5402 }
5403
5404 Some(Chunk {
5405 text: slice,
5406 syntax_highlight_id: highlight_id,
5407 underline: self.underline,
5408 diagnostic_severity: self.current_diagnostic_severity(),
5409 is_unnecessary: self.current_code_is_unnecessary(),
5410 tabs,
5411 chars,
5412 ..Chunk::default()
5413 })
5414 } else {
5415 None
5416 }
5417 }
5418}
5419
5420impl operation_queue::Operation for Operation {
5421 fn lamport_timestamp(&self) -> clock::Lamport {
5422 match self {
5423 Operation::Buffer(_) => {
5424 unreachable!("buffer operations should never be deferred at this layer")
5425 }
5426 Operation::UpdateDiagnostics {
5427 lamport_timestamp, ..
5428 }
5429 | Operation::UpdateSelections {
5430 lamport_timestamp, ..
5431 }
5432 | Operation::UpdateCompletionTriggers {
5433 lamport_timestamp, ..
5434 }
5435 | Operation::UpdateLineEnding {
5436 lamport_timestamp, ..
5437 } => *lamport_timestamp,
5438 }
5439 }
5440}
5441
5442impl Default for Diagnostic {
5443 fn default() -> Self {
5444 Self {
5445 source: Default::default(),
5446 source_kind: DiagnosticSourceKind::Other,
5447 code: None,
5448 code_description: None,
5449 severity: DiagnosticSeverity::ERROR,
5450 message: Default::default(),
5451 markdown: None,
5452 group_id: 0,
5453 is_primary: false,
5454 is_disk_based: false,
5455 is_unnecessary: false,
5456 underline: true,
5457 data: None,
5458 registration_id: None,
5459 }
5460 }
5461}
5462
5463impl IndentSize {
5464 /// Returns an [`IndentSize`] representing the given spaces.
5465 pub fn spaces(len: u32) -> Self {
5466 Self {
5467 len,
5468 kind: IndentKind::Space,
5469 }
5470 }
5471
5472 /// Returns an [`IndentSize`] representing a tab.
5473 pub fn tab() -> Self {
5474 Self {
5475 len: 1,
5476 kind: IndentKind::Tab,
5477 }
5478 }
5479
5480 /// An iterator over the characters represented by this [`IndentSize`].
5481 pub fn chars(&self) -> impl Iterator<Item = char> {
5482 iter::repeat(self.char()).take(self.len as usize)
5483 }
5484
5485 /// The character representation of this [`IndentSize`].
5486 pub fn char(&self) -> char {
5487 match self.kind {
5488 IndentKind::Space => ' ',
5489 IndentKind::Tab => '\t',
5490 }
5491 }
5492
5493 /// Consumes the current [`IndentSize`] and returns a new one that has
5494 /// been shrunk or enlarged by the given size along the given direction.
5495 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5496 match direction {
5497 Ordering::Less => {
5498 if self.kind == size.kind && self.len >= size.len {
5499 self.len -= size.len;
5500 }
5501 }
5502 Ordering::Equal => {}
5503 Ordering::Greater => {
5504 if self.len == 0 {
5505 self = size;
5506 } else if self.kind == size.kind {
5507 self.len += size.len;
5508 }
5509 }
5510 }
5511 self
5512 }
5513
5514 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5515 match self.kind {
5516 IndentKind::Space => self.len as usize,
5517 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5518 }
5519 }
5520}
5521
5522#[cfg(any(test, feature = "test-support"))]
5523pub struct TestFile {
5524 pub path: Arc<RelPath>,
5525 pub root_name: String,
5526 pub local_root: Option<PathBuf>,
5527}
5528
5529#[cfg(any(test, feature = "test-support"))]
5530impl File for TestFile {
5531 fn path(&self) -> &Arc<RelPath> {
5532 &self.path
5533 }
5534
5535 fn full_path(&self, _: &gpui::App) -> PathBuf {
5536 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5537 }
5538
5539 fn as_local(&self) -> Option<&dyn LocalFile> {
5540 if self.local_root.is_some() {
5541 Some(self)
5542 } else {
5543 None
5544 }
5545 }
5546
5547 fn disk_state(&self) -> DiskState {
5548 unimplemented!()
5549 }
5550
5551 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5552 self.path().file_name().unwrap_or(self.root_name.as_ref())
5553 }
5554
5555 fn worktree_id(&self, _: &App) -> WorktreeId {
5556 WorktreeId::from_usize(0)
5557 }
5558
5559 fn to_proto(&self, _: &App) -> rpc::proto::File {
5560 unimplemented!()
5561 }
5562
5563 fn is_private(&self) -> bool {
5564 false
5565 }
5566
5567 fn path_style(&self, _cx: &App) -> PathStyle {
5568 PathStyle::local()
5569 }
5570}
5571
5572#[cfg(any(test, feature = "test-support"))]
5573impl LocalFile for TestFile {
5574 fn abs_path(&self, _cx: &App) -> PathBuf {
5575 PathBuf::from(self.local_root.as_ref().unwrap())
5576 .join(&self.root_name)
5577 .join(self.path.as_std_path())
5578 }
5579
5580 fn load(&self, _cx: &App) -> Task<Result<String>> {
5581 unimplemented!()
5582 }
5583
5584 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5585 unimplemented!()
5586 }
5587}
5588
5589pub(crate) fn contiguous_ranges(
5590 values: impl Iterator<Item = u32>,
5591 max_len: usize,
5592) -> impl Iterator<Item = Range<u32>> {
5593 let mut values = values;
5594 let mut current_range: Option<Range<u32>> = None;
5595 std::iter::from_fn(move || {
5596 loop {
5597 if let Some(value) = values.next() {
5598 if let Some(range) = &mut current_range
5599 && value == range.end
5600 && range.len() < max_len
5601 {
5602 range.end += 1;
5603 continue;
5604 }
5605
5606 let prev_range = current_range.clone();
5607 current_range = Some(value..(value + 1));
5608 if prev_range.is_some() {
5609 return prev_range;
5610 }
5611 } else {
5612 return current_range.take();
5613 }
5614 }
5615 })
5616}
5617
5618#[derive(Default, Debug)]
5619pub struct CharClassifier {
5620 scope: Option<LanguageScope>,
5621 scope_context: Option<CharScopeContext>,
5622 ignore_punctuation: bool,
5623}
5624
5625impl CharClassifier {
5626 pub fn new(scope: Option<LanguageScope>) -> Self {
5627 Self {
5628 scope,
5629 scope_context: None,
5630 ignore_punctuation: false,
5631 }
5632 }
5633
5634 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5635 Self {
5636 scope_context,
5637 ..self
5638 }
5639 }
5640
5641 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5642 Self {
5643 ignore_punctuation,
5644 ..self
5645 }
5646 }
5647
5648 pub fn is_whitespace(&self, c: char) -> bool {
5649 self.kind(c) == CharKind::Whitespace
5650 }
5651
5652 pub fn is_word(&self, c: char) -> bool {
5653 self.kind(c) == CharKind::Word
5654 }
5655
5656 pub fn is_punctuation(&self, c: char) -> bool {
5657 self.kind(c) == CharKind::Punctuation
5658 }
5659
5660 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5661 if c.is_alphanumeric() || c == '_' {
5662 return CharKind::Word;
5663 }
5664
5665 if let Some(scope) = &self.scope {
5666 let characters = match self.scope_context {
5667 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5668 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5669 None => scope.word_characters(),
5670 };
5671 if let Some(characters) = characters
5672 && characters.contains(&c)
5673 {
5674 return CharKind::Word;
5675 }
5676 }
5677
5678 if c.is_whitespace() {
5679 return CharKind::Whitespace;
5680 }
5681
5682 if ignore_punctuation {
5683 CharKind::Word
5684 } else {
5685 CharKind::Punctuation
5686 }
5687 }
5688
5689 pub fn kind(&self, c: char) -> CharKind {
5690 self.kind_with(c, self.ignore_punctuation)
5691 }
5692}
5693
5694/// Find all of the ranges of whitespace that occur at the ends of lines
5695/// in the given rope.
5696///
5697/// This could also be done with a regex search, but this implementation
5698/// avoids copying text.
5699pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5700 let mut ranges = Vec::new();
5701
5702 let mut offset = 0;
5703 let mut prev_chunk_trailing_whitespace_range = 0..0;
5704 for chunk in rope.chunks() {
5705 let mut prev_line_trailing_whitespace_range = 0..0;
5706 for (i, line) in chunk.split('\n').enumerate() {
5707 let line_end_offset = offset + line.len();
5708 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5709 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5710
5711 if i == 0 && trimmed_line_len == 0 {
5712 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5713 }
5714 if !prev_line_trailing_whitespace_range.is_empty() {
5715 ranges.push(prev_line_trailing_whitespace_range);
5716 }
5717
5718 offset = line_end_offset + 1;
5719 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5720 }
5721
5722 offset -= 1;
5723 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5724 }
5725
5726 if !prev_chunk_trailing_whitespace_range.is_empty() {
5727 ranges.push(prev_chunk_trailing_whitespace_range);
5728 }
5729
5730 ranges
5731}