1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430}
431
432impl DiskState {
433 /// Returns the file's last known modification time on disk.
434 pub fn mtime(self) -> Option<MTime> {
435 match self {
436 DiskState::New => None,
437 DiskState::Present { mtime } => Some(mtime),
438 DiskState::Deleted => None,
439 }
440 }
441
442 pub fn exists(&self) -> bool {
443 match self {
444 DiskState::New => false,
445 DiskState::Present { .. } => true,
446 DiskState::Deleted => false,
447 }
448 }
449}
450
451/// The file associated with a buffer, in the case where the file is on the local disk.
452pub trait LocalFile: File {
453 /// Returns the absolute path of this file
454 fn abs_path(&self, cx: &App) -> PathBuf;
455
456 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
457 fn load(&self, cx: &App) -> Task<Result<String>>;
458
459 /// Loads the file's contents from disk.
460 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
461}
462
463/// The auto-indent behavior associated with an editing operation.
464/// For some editing operations, each affected line of text has its
465/// indentation recomputed. For other operations, the entire block
466/// of edited text is adjusted uniformly.
467#[derive(Clone, Debug)]
468pub enum AutoindentMode {
469 /// Indent each line of inserted text.
470 EachLine,
471 /// Apply the same indentation adjustment to all of the lines
472 /// in a given insertion.
473 Block {
474 /// The original indentation column of the first line of each
475 /// insertion, if it has been copied.
476 ///
477 /// Knowing this makes it possible to preserve the relative indentation
478 /// of every line in the insertion from when it was copied.
479 ///
480 /// If the original indent column is `a`, and the first line of insertion
481 /// is then auto-indented to column `b`, then every other line of
482 /// the insertion will be auto-indented to column `b - a`
483 original_indent_columns: Vec<Option<u32>>,
484 },
485}
486
487#[derive(Clone)]
488struct AutoindentRequest {
489 before_edit: BufferSnapshot,
490 entries: Vec<AutoindentRequestEntry>,
491 is_block_mode: bool,
492 ignore_empty_lines: bool,
493}
494
495#[derive(Debug, Clone)]
496struct AutoindentRequestEntry {
497 /// A range of the buffer whose indentation should be adjusted.
498 range: Range<Anchor>,
499 /// Whether or not these lines should be considered brand new, for the
500 /// purpose of auto-indent. When text is not new, its indentation will
501 /// only be adjusted if the suggested indentation level has *changed*
502 /// since the edit was made.
503 first_line_is_new: bool,
504 indent_size: IndentSize,
505 original_indent_column: Option<u32>,
506}
507
508#[derive(Debug)]
509struct IndentSuggestion {
510 basis_row: u32,
511 delta: Ordering,
512 within_error: bool,
513}
514
515struct BufferChunkHighlights<'a> {
516 captures: SyntaxMapCaptures<'a>,
517 next_capture: Option<SyntaxMapCapture<'a>>,
518 stack: Vec<(usize, HighlightId)>,
519 highlight_maps: Vec<HighlightMap>,
520}
521
522/// An iterator that yields chunks of a buffer's text, along with their
523/// syntax highlights and diagnostic status.
524pub struct BufferChunks<'a> {
525 buffer_snapshot: Option<&'a BufferSnapshot>,
526 range: Range<usize>,
527 chunks: text::Chunks<'a>,
528 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
529 error_depth: usize,
530 warning_depth: usize,
531 information_depth: usize,
532 hint_depth: usize,
533 unnecessary_depth: usize,
534 underline: bool,
535 highlights: Option<BufferChunkHighlights<'a>>,
536}
537
538/// A chunk of a buffer's text, along with its syntax highlight and
539/// diagnostic status.
540#[derive(Clone, Debug, Default)]
541pub struct Chunk<'a> {
542 /// The text of the chunk.
543 pub text: &'a str,
544 /// The syntax highlighting style of the chunk.
545 pub syntax_highlight_id: Option<HighlightId>,
546 /// The highlight style that has been applied to this chunk in
547 /// the editor.
548 pub highlight_style: Option<HighlightStyle>,
549 /// The severity of diagnostic associated with this chunk, if any.
550 pub diagnostic_severity: Option<DiagnosticSeverity>,
551 /// A bitset of which characters are tabs in this string.
552 pub tabs: u128,
553 /// Bitmap of character indices in this chunk
554 pub chars: u128,
555 /// Whether this chunk of text is marked as unnecessary.
556 pub is_unnecessary: bool,
557 /// Whether this chunk of text was originally a tab character.
558 pub is_tab: bool,
559 /// Whether this chunk of text was originally an inlay.
560 pub is_inlay: bool,
561 /// Whether to underline the corresponding text range in the editor.
562 pub underline: bool,
563}
564
565/// A set of edits to a given version of a buffer, computed asynchronously.
566#[derive(Debug)]
567pub struct Diff {
568 pub base_version: clock::Global,
569 pub line_ending: LineEnding,
570 pub edits: Vec<(Range<usize>, Arc<str>)>,
571}
572
573#[derive(Debug, Clone, Copy)]
574pub(crate) struct DiagnosticEndpoint {
575 offset: usize,
576 is_start: bool,
577 underline: bool,
578 severity: DiagnosticSeverity,
579 is_unnecessary: bool,
580}
581
582/// A class of characters, used for characterizing a run of text.
583#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
584pub enum CharKind {
585 /// Whitespace.
586 Whitespace,
587 /// Punctuation.
588 Punctuation,
589 /// Word.
590 Word,
591}
592
593/// Context for character classification within a specific scope.
594#[derive(Copy, Clone, Eq, PartialEq, Debug)]
595pub enum CharScopeContext {
596 /// Character classification for completion queries.
597 ///
598 /// This context treats certain characters as word constituents that would
599 /// normally be considered punctuation, such as '-' in Tailwind classes
600 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
601 Completion,
602 /// Character classification for linked edits.
603 ///
604 /// This context handles characters that should be treated as part of
605 /// identifiers during linked editing operations, such as '.' in JSX
606 /// component names like `<Animated.View>`.
607 LinkedEdit,
608}
609
610/// A runnable is a set of data about a region that could be resolved into a task
611pub struct Runnable {
612 pub tags: SmallVec<[RunnableTag; 1]>,
613 pub language: Arc<Language>,
614 pub buffer: BufferId,
615}
616
617#[derive(Default, Clone, Debug)]
618pub struct HighlightedText {
619 pub text: SharedString,
620 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
621}
622
623#[derive(Default, Debug)]
624struct HighlightedTextBuilder {
625 pub text: String,
626 highlights: Vec<(Range<usize>, HighlightStyle)>,
627}
628
629impl HighlightedText {
630 pub fn from_buffer_range<T: ToOffset>(
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) -> Self {
637 let mut highlighted_text = HighlightedTextBuilder::default();
638 highlighted_text.add_text_from_buffer_range(
639 range,
640 snapshot,
641 syntax_snapshot,
642 override_style,
643 syntax_theme,
644 );
645 highlighted_text.build()
646 }
647
648 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
649 gpui::StyledText::new(self.text.clone())
650 .with_default_highlights(default_style, self.highlights.iter().cloned())
651 }
652
653 /// Returns the first line without leading whitespace unless highlighted
654 /// and a boolean indicating if there are more lines after
655 pub fn first_line_preview(self) -> (Self, bool) {
656 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
657 let first_line = &self.text[..newline_ix];
658
659 // Trim leading whitespace, unless an edit starts prior to it.
660 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
661 if let Some((first_highlight_range, _)) = self.highlights.first() {
662 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
663 }
664
665 let preview_text = &first_line[preview_start_ix..];
666 let preview_highlights = self
667 .highlights
668 .into_iter()
669 .skip_while(|(range, _)| range.end <= preview_start_ix)
670 .take_while(|(range, _)| range.start < newline_ix)
671 .filter_map(|(mut range, highlight)| {
672 range.start = range.start.saturating_sub(preview_start_ix);
673 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
674 if range.is_empty() {
675 None
676 } else {
677 Some((range, highlight))
678 }
679 });
680
681 let preview = Self {
682 text: SharedString::new(preview_text),
683 highlights: preview_highlights.collect(),
684 };
685
686 (preview, self.text.len() > newline_ix)
687 }
688}
689
690impl HighlightedTextBuilder {
691 pub fn build(self) -> HighlightedText {
692 HighlightedText {
693 text: self.text.into(),
694 highlights: self.highlights,
695 }
696 }
697
698 pub fn add_text_from_buffer_range<T: ToOffset>(
699 &mut self,
700 range: Range<T>,
701 snapshot: &text::BufferSnapshot,
702 syntax_snapshot: &SyntaxSnapshot,
703 override_style: Option<HighlightStyle>,
704 syntax_theme: &SyntaxTheme,
705 ) {
706 let range = range.to_offset(snapshot);
707 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
708 let start = self.text.len();
709 self.text.push_str(chunk.text);
710 let end = self.text.len();
711
712 if let Some(highlight_style) = chunk
713 .syntax_highlight_id
714 .and_then(|id| id.style(syntax_theme))
715 {
716 let highlight_style = override_style.map_or(highlight_style, |override_style| {
717 highlight_style.highlight(override_style)
718 });
719 self.highlights.push((start..end, highlight_style));
720 } else if let Some(override_style) = override_style {
721 self.highlights.push((start..end, override_style));
722 }
723 }
724 }
725
726 fn highlighted_chunks<'a>(
727 range: Range<usize>,
728 snapshot: &'a text::BufferSnapshot,
729 syntax_snapshot: &'a SyntaxSnapshot,
730 ) -> BufferChunks<'a> {
731 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
732 grammar
733 .highlights_config
734 .as_ref()
735 .map(|config| &config.query)
736 });
737
738 let highlight_maps = captures
739 .grammars()
740 .iter()
741 .map(|grammar| grammar.highlight_map())
742 .collect();
743
744 BufferChunks::new(
745 snapshot.as_rope(),
746 range,
747 Some((captures, highlight_maps)),
748 false,
749 None,
750 )
751 }
752}
753
754#[derive(Clone)]
755pub struct EditPreview {
756 old_snapshot: text::BufferSnapshot,
757 applied_edits_snapshot: text::BufferSnapshot,
758 syntax_snapshot: SyntaxSnapshot,
759}
760
761impl EditPreview {
762 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
763 let (first, _) = edits.first()?;
764 let (last, _) = edits.last()?;
765
766 let start = first.start.to_point(&self.old_snapshot);
767 let old_end = last.end.to_point(&self.old_snapshot);
768 let new_end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 let start = Point::new(start.row.saturating_sub(3), 0);
774 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
775 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
776
777 Some(unified_diff(
778 &self
779 .old_snapshot
780 .text_for_range(start..old_end)
781 .collect::<String>(),
782 &self
783 .applied_edits_snapshot
784 .text_for_range(start..new_end)
785 .collect::<String>(),
786 ))
787 }
788
789 pub fn highlight_edits(
790 &self,
791 current_snapshot: &BufferSnapshot,
792 edits: &[(Range<Anchor>, impl AsRef<str>)],
793 include_deletions: bool,
794 cx: &App,
795 ) -> HighlightedText {
796 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
797 return HighlightedText::default();
798 };
799
800 let mut highlighted_text = HighlightedTextBuilder::default();
801
802 let visible_range_in_preview_snapshot =
803 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
804 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
805
806 let insertion_highlight_style = HighlightStyle {
807 background_color: Some(cx.theme().status().created_background),
808 ..Default::default()
809 };
810 let deletion_highlight_style = HighlightStyle {
811 background_color: Some(cx.theme().status().deleted_background),
812 ..Default::default()
813 };
814 let syntax_theme = cx.theme().syntax();
815
816 for (range, edit_text) in edits {
817 let edit_new_end_in_preview_snapshot = range
818 .end
819 .bias_right(&self.old_snapshot)
820 .to_offset(&self.applied_edits_snapshot);
821 let edit_start_in_preview_snapshot =
822 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
823
824 let unchanged_range_in_preview_snapshot =
825 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
826 if !unchanged_range_in_preview_snapshot.is_empty() {
827 highlighted_text.add_text_from_buffer_range(
828 unchanged_range_in_preview_snapshot,
829 &self.applied_edits_snapshot,
830 &self.syntax_snapshot,
831 None,
832 syntax_theme,
833 );
834 }
835
836 let range_in_current_snapshot = range.to_offset(current_snapshot);
837 if include_deletions && !range_in_current_snapshot.is_empty() {
838 highlighted_text.add_text_from_buffer_range(
839 range_in_current_snapshot,
840 ¤t_snapshot.text,
841 ¤t_snapshot.syntax,
842 Some(deletion_highlight_style),
843 syntax_theme,
844 );
845 }
846
847 if !edit_text.as_ref().is_empty() {
848 highlighted_text.add_text_from_buffer_range(
849 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
850 &self.applied_edits_snapshot,
851 &self.syntax_snapshot,
852 Some(insertion_highlight_style),
853 syntax_theme,
854 );
855 }
856
857 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
858 }
859
860 highlighted_text.add_text_from_buffer_range(
861 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
862 &self.applied_edits_snapshot,
863 &self.syntax_snapshot,
864 None,
865 syntax_theme,
866 );
867
868 highlighted_text.build()
869 }
870
871 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
872 cx.new(|cx| {
873 let mut buffer = Buffer::local_normalized(
874 self.applied_edits_snapshot.as_rope().clone(),
875 self.applied_edits_snapshot.line_ending(),
876 cx,
877 );
878 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
879 buffer
880 })
881 }
882
883 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
884 let (first, _) = edits.first()?;
885 let (last, _) = edits.last()?;
886
887 let start = first
888 .start
889 .bias_left(&self.old_snapshot)
890 .to_point(&self.applied_edits_snapshot);
891 let end = last
892 .end
893 .bias_right(&self.old_snapshot)
894 .to_point(&self.applied_edits_snapshot);
895
896 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
897 let range = Point::new(start.row, 0)
898 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
899
900 Some(range)
901 }
902}
903
904#[derive(Clone, Debug, PartialEq, Eq)]
905pub struct BracketMatch<T> {
906 pub open_range: Range<T>,
907 pub close_range: Range<T>,
908 pub newline_only: bool,
909 pub syntax_layer_depth: usize,
910 pub color_index: Option<usize>,
911}
912
913impl<T> BracketMatch<T> {
914 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
915 (self.open_range, self.close_range)
916 }
917}
918
919impl Buffer {
920 /// Create a new buffer with the given base text.
921 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
922 Self::build(
923 TextBuffer::new(
924 ReplicaId::LOCAL,
925 cx.entity_id().as_non_zero_u64().into(),
926 base_text.into(),
927 ),
928 None,
929 Capability::ReadWrite,
930 )
931 }
932
933 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
934 pub fn local_normalized(
935 base_text_normalized: Rope,
936 line_ending: LineEnding,
937 cx: &Context<Self>,
938 ) -> Self {
939 Self::build(
940 TextBuffer::new_normalized(
941 ReplicaId::LOCAL,
942 cx.entity_id().as_non_zero_u64().into(),
943 line_ending,
944 base_text_normalized,
945 ),
946 None,
947 Capability::ReadWrite,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer.
952 pub fn remote(
953 remote_id: BufferId,
954 replica_id: ReplicaId,
955 capability: Capability,
956 base_text: impl Into<String>,
957 ) -> Self {
958 Self::build(
959 TextBuffer::new(replica_id, remote_id, base_text.into()),
960 None,
961 capability,
962 )
963 }
964
965 /// Create a new buffer that is a replica of a remote buffer, populating its
966 /// state from the given protobuf message.
967 pub fn from_proto(
968 replica_id: ReplicaId,
969 capability: Capability,
970 message: proto::BufferState,
971 file: Option<Arc<dyn File>>,
972 ) -> Result<Self> {
973 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
974 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
975 let mut this = Self::build(buffer, file, capability);
976 this.text.set_line_ending(proto::deserialize_line_ending(
977 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
978 ));
979 this.saved_version = proto::deserialize_version(&message.saved_version);
980 this.saved_mtime = message.saved_mtime.map(|time| time.into());
981 Ok(this)
982 }
983
984 /// Serialize the buffer's state to a protobuf message.
985 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
986 proto::BufferState {
987 id: self.remote_id().into(),
988 file: self.file.as_ref().map(|f| f.to_proto(cx)),
989 base_text: self.base_text().to_string(),
990 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
991 saved_version: proto::serialize_version(&self.saved_version),
992 saved_mtime: self.saved_mtime.map(|time| time.into()),
993 }
994 }
995
996 /// Serialize as protobufs all of the changes to the buffer since the given version.
997 pub fn serialize_ops(
998 &self,
999 since: Option<clock::Global>,
1000 cx: &App,
1001 ) -> Task<Vec<proto::Operation>> {
1002 let mut operations = Vec::new();
1003 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1004
1005 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1006 proto::serialize_operation(&Operation::UpdateSelections {
1007 selections: set.selections.clone(),
1008 lamport_timestamp: set.lamport_timestamp,
1009 line_mode: set.line_mode,
1010 cursor_shape: set.cursor_shape,
1011 })
1012 }));
1013
1014 for (server_id, diagnostics) in &self.diagnostics {
1015 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1016 lamport_timestamp: self.diagnostics_timestamp,
1017 server_id: *server_id,
1018 diagnostics: diagnostics.iter().cloned().collect(),
1019 }));
1020 }
1021
1022 for (server_id, completions) in &self.completion_triggers_per_language_server {
1023 operations.push(proto::serialize_operation(
1024 &Operation::UpdateCompletionTriggers {
1025 triggers: completions.iter().cloned().collect(),
1026 lamport_timestamp: self.completion_triggers_timestamp,
1027 server_id: *server_id,
1028 },
1029 ));
1030 }
1031
1032 let text_operations = self.text.operations().clone();
1033 cx.background_spawn(async move {
1034 let since = since.unwrap_or_default();
1035 operations.extend(
1036 text_operations
1037 .iter()
1038 .filter(|(_, op)| !since.observed(op.timestamp()))
1039 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1040 );
1041 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1042 operations
1043 })
1044 }
1045
1046 /// Assign a language to the buffer, returning the buffer.
1047 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1048 self.set_language_async(Some(language), cx);
1049 self
1050 }
1051
1052 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1053 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1054 self.set_language(Some(language), cx);
1055 self
1056 }
1057
1058 /// Returns the [`Capability`] of this buffer.
1059 pub fn capability(&self) -> Capability {
1060 self.capability
1061 }
1062
1063 /// Whether this buffer can only be read.
1064 pub fn read_only(&self) -> bool {
1065 self.capability == Capability::ReadOnly
1066 }
1067
1068 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1069 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1070 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1071 let snapshot = buffer.snapshot();
1072 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1073 let tree_sitter_data = TreeSitterData::new(snapshot);
1074 Self {
1075 saved_mtime,
1076 tree_sitter_data: Arc::new(tree_sitter_data),
1077 saved_version: buffer.version(),
1078 preview_version: buffer.version(),
1079 reload_task: None,
1080 transaction_depth: 0,
1081 was_dirty_before_starting_transaction: None,
1082 has_unsaved_edits: Cell::new((buffer.version(), false)),
1083 text: buffer,
1084 branch_state: None,
1085 file,
1086 capability,
1087 syntax_map,
1088 reparse: None,
1089 non_text_state_update_count: 0,
1090 sync_parse_timeout: Duration::from_millis(1),
1091 parse_status: watch::channel(ParseStatus::Idle),
1092 autoindent_requests: Default::default(),
1093 wait_for_autoindent_txs: Default::default(),
1094 pending_autoindent: Default::default(),
1095 language: None,
1096 remote_selections: Default::default(),
1097 diagnostics: Default::default(),
1098 diagnostics_timestamp: Lamport::MIN,
1099 completion_triggers: Default::default(),
1100 completion_triggers_per_language_server: Default::default(),
1101 completion_triggers_timestamp: Lamport::MIN,
1102 deferred_ops: OperationQueue::new(),
1103 has_conflict: false,
1104 change_bits: Default::default(),
1105 _subscriptions: Vec::new(),
1106 encoding: encoding_rs::UTF_8,
1107 has_bom: false,
1108 }
1109 }
1110
1111 pub fn build_snapshot(
1112 text: Rope,
1113 language: Option<Arc<Language>>,
1114 language_registry: Option<Arc<LanguageRegistry>>,
1115 cx: &mut App,
1116 ) -> impl Future<Output = BufferSnapshot> + use<> {
1117 let entity_id = cx.reserve_entity::<Self>().entity_id();
1118 let buffer_id = entity_id.as_non_zero_u64().into();
1119 async move {
1120 let text =
1121 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1122 .snapshot();
1123 let mut syntax = SyntaxMap::new(&text).snapshot();
1124 if let Some(language) = language.clone() {
1125 let language_registry = language_registry.clone();
1126 syntax.reparse(&text, language_registry, language);
1127 }
1128 let tree_sitter_data = TreeSitterData::new(text.clone());
1129 BufferSnapshot {
1130 text,
1131 syntax,
1132 file: None,
1133 diagnostics: Default::default(),
1134 remote_selections: Default::default(),
1135 tree_sitter_data: Arc::new(tree_sitter_data),
1136 language,
1137 non_text_state_update_count: 0,
1138 }
1139 }
1140 }
1141
1142 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1143 let entity_id = cx.reserve_entity::<Self>().entity_id();
1144 let buffer_id = entity_id.as_non_zero_u64().into();
1145 let text = TextBuffer::new_normalized(
1146 ReplicaId::LOCAL,
1147 buffer_id,
1148 Default::default(),
1149 Rope::new(),
1150 )
1151 .snapshot();
1152 let syntax = SyntaxMap::new(&text).snapshot();
1153 let tree_sitter_data = TreeSitterData::new(text.clone());
1154 BufferSnapshot {
1155 text,
1156 syntax,
1157 tree_sitter_data: Arc::new(tree_sitter_data),
1158 file: None,
1159 diagnostics: Default::default(),
1160 remote_selections: Default::default(),
1161 language: None,
1162 non_text_state_update_count: 0,
1163 }
1164 }
1165
1166 #[cfg(any(test, feature = "test-support"))]
1167 pub fn build_snapshot_sync(
1168 text: Rope,
1169 language: Option<Arc<Language>>,
1170 language_registry: Option<Arc<LanguageRegistry>>,
1171 cx: &mut App,
1172 ) -> BufferSnapshot {
1173 let entity_id = cx.reserve_entity::<Self>().entity_id();
1174 let buffer_id = entity_id.as_non_zero_u64().into();
1175 let text =
1176 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1177 .snapshot();
1178 let mut syntax = SyntaxMap::new(&text).snapshot();
1179 if let Some(language) = language.clone() {
1180 syntax.reparse(&text, language_registry, language);
1181 }
1182 let tree_sitter_data = TreeSitterData::new(text.clone());
1183 BufferSnapshot {
1184 text,
1185 syntax,
1186 tree_sitter_data: Arc::new(tree_sitter_data),
1187 file: None,
1188 diagnostics: Default::default(),
1189 remote_selections: Default::default(),
1190 language,
1191 non_text_state_update_count: 0,
1192 }
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's current state. This is computationally
1196 /// cheap, and allows reading from the buffer on a background thread.
1197 pub fn snapshot(&self) -> BufferSnapshot {
1198 let text = self.text.snapshot();
1199 let mut syntax_map = self.syntax_map.lock();
1200 syntax_map.interpolate(&text);
1201 let syntax = syntax_map.snapshot();
1202
1203 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1204 Arc::new(TreeSitterData::new(text.clone()))
1205 } else {
1206 self.tree_sitter_data.clone()
1207 };
1208
1209 BufferSnapshot {
1210 text,
1211 syntax,
1212 tree_sitter_data,
1213 file: self.file.clone(),
1214 remote_selections: self.remote_selections.clone(),
1215 diagnostics: self.diagnostics.clone(),
1216 language: self.language.clone(),
1217 non_text_state_update_count: self.non_text_state_update_count,
1218 }
1219 }
1220
1221 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1222 let this = cx.entity();
1223 cx.new(|cx| {
1224 let mut branch = Self {
1225 branch_state: Some(BufferBranchState {
1226 base_buffer: this.clone(),
1227 merged_operations: Default::default(),
1228 }),
1229 language: self.language.clone(),
1230 has_conflict: self.has_conflict,
1231 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1232 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1233 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1234 };
1235 if let Some(language_registry) = self.language_registry() {
1236 branch.set_language_registry(language_registry);
1237 }
1238
1239 // Reparse the branch buffer so that we get syntax highlighting immediately.
1240 branch.reparse(cx, true);
1241
1242 branch
1243 })
1244 }
1245
1246 pub fn preview_edits(
1247 &self,
1248 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1249 cx: &App,
1250 ) -> Task<EditPreview> {
1251 let registry = self.language_registry();
1252 let language = self.language().cloned();
1253 let old_snapshot = self.text.snapshot();
1254 let mut branch_buffer = self.text.branch();
1255 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1256 cx.background_spawn(async move {
1257 if !edits.is_empty() {
1258 if let Some(language) = language.clone() {
1259 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1260 }
1261
1262 branch_buffer.edit(edits.iter().cloned());
1263 let snapshot = branch_buffer.snapshot();
1264 syntax_snapshot.interpolate(&snapshot);
1265
1266 if let Some(language) = language {
1267 syntax_snapshot.reparse(&snapshot, registry, language);
1268 }
1269 }
1270 EditPreview {
1271 old_snapshot,
1272 applied_edits_snapshot: branch_buffer.snapshot(),
1273 syntax_snapshot,
1274 }
1275 })
1276 }
1277
1278 /// Applies all of the changes in this buffer that intersect any of the
1279 /// given `ranges` to its base buffer.
1280 ///
1281 /// If `ranges` is empty, then all changes will be applied. This buffer must
1282 /// be a branch buffer to call this method.
1283 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1284 let Some(base_buffer) = self.base_buffer() else {
1285 debug_panic!("not a branch buffer");
1286 return;
1287 };
1288
1289 let mut ranges = if ranges.is_empty() {
1290 &[0..usize::MAX]
1291 } else {
1292 ranges.as_slice()
1293 }
1294 .iter()
1295 .peekable();
1296
1297 let mut edits = Vec::new();
1298 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1299 let mut is_included = false;
1300 while let Some(range) = ranges.peek() {
1301 if range.end < edit.new.start {
1302 ranges.next().unwrap();
1303 } else {
1304 if range.start <= edit.new.end {
1305 is_included = true;
1306 }
1307 break;
1308 }
1309 }
1310
1311 if is_included {
1312 edits.push((
1313 edit.old.clone(),
1314 self.text_for_range(edit.new.clone()).collect::<String>(),
1315 ));
1316 }
1317 }
1318
1319 let operation = base_buffer.update(cx, |base_buffer, cx| {
1320 // cx.emit(BufferEvent::DiffBaseChanged);
1321 base_buffer.edit(edits, None, cx)
1322 });
1323
1324 if let Some(operation) = operation
1325 && let Some(BufferBranchState {
1326 merged_operations, ..
1327 }) = &mut self.branch_state
1328 {
1329 merged_operations.push(operation);
1330 }
1331 }
1332
1333 fn on_base_buffer_event(
1334 &mut self,
1335 _: Entity<Buffer>,
1336 event: &BufferEvent,
1337 cx: &mut Context<Self>,
1338 ) {
1339 let BufferEvent::Operation { operation, .. } = event else {
1340 return;
1341 };
1342 let Some(BufferBranchState {
1343 merged_operations, ..
1344 }) = &mut self.branch_state
1345 else {
1346 return;
1347 };
1348
1349 let mut operation_to_undo = None;
1350 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1351 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1352 {
1353 merged_operations.remove(ix);
1354 operation_to_undo = Some(operation.timestamp);
1355 }
1356
1357 self.apply_ops([operation.clone()], cx);
1358
1359 if let Some(timestamp) = operation_to_undo {
1360 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1361 self.undo_operations(counts, cx);
1362 }
1363 }
1364
1365 #[cfg(test)]
1366 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1367 &self.text
1368 }
1369
1370 /// Retrieve a snapshot of the buffer's raw text, without any
1371 /// language-related state like the syntax tree or diagnostics.
1372 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1373 self.text.snapshot()
1374 }
1375
1376 /// The file associated with the buffer, if any.
1377 pub fn file(&self) -> Option<&Arc<dyn File>> {
1378 self.file.as_ref()
1379 }
1380
1381 /// The version of the buffer that was last saved or reloaded from disk.
1382 pub fn saved_version(&self) -> &clock::Global {
1383 &self.saved_version
1384 }
1385
1386 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1387 pub fn saved_mtime(&self) -> Option<MTime> {
1388 self.saved_mtime
1389 }
1390
1391 /// Returns the character encoding of the buffer's file.
1392 pub fn encoding(&self) -> &'static Encoding {
1393 self.encoding
1394 }
1395
1396 /// Sets the character encoding of the buffer.
1397 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1398 self.encoding = encoding;
1399 }
1400
1401 /// Returns whether the buffer has a Byte Order Mark.
1402 pub fn has_bom(&self) -> bool {
1403 self.has_bom
1404 }
1405
1406 /// Sets whether the buffer has a Byte Order Mark.
1407 pub fn set_has_bom(&mut self, has_bom: bool) {
1408 self.has_bom = has_bom;
1409 }
1410
1411 /// Assign a language to the buffer.
1412 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1413 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1414 }
1415
1416 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1417 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1418 self.set_language_(language, true, cx);
1419 }
1420
1421 fn set_language_(
1422 &mut self,
1423 language: Option<Arc<Language>>,
1424 may_block: bool,
1425 cx: &mut Context<Self>,
1426 ) {
1427 self.non_text_state_update_count += 1;
1428 self.syntax_map.lock().clear(&self.text);
1429 let old_language = std::mem::replace(&mut self.language, language);
1430 self.was_changed();
1431 self.reparse(cx, may_block);
1432 let has_fresh_language =
1433 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1434 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1435 }
1436
1437 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1438 /// other languages if parts of the buffer are written in different languages.
1439 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1440 self.syntax_map
1441 .lock()
1442 .set_language_registry(language_registry);
1443 }
1444
1445 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1446 self.syntax_map.lock().language_registry()
1447 }
1448
1449 /// Assign the line ending type to the buffer.
1450 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1451 self.text.set_line_ending(line_ending);
1452
1453 let lamport_timestamp = self.text.lamport_clock.tick();
1454 self.send_operation(
1455 Operation::UpdateLineEnding {
1456 line_ending,
1457 lamport_timestamp,
1458 },
1459 true,
1460 cx,
1461 );
1462 }
1463
1464 /// Assign the buffer a new [`Capability`].
1465 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1466 if self.capability != capability {
1467 self.capability = capability;
1468 cx.emit(BufferEvent::CapabilityChanged)
1469 }
1470 }
1471
1472 /// This method is called to signal that the buffer has been saved.
1473 pub fn did_save(
1474 &mut self,
1475 version: clock::Global,
1476 mtime: Option<MTime>,
1477 cx: &mut Context<Self>,
1478 ) {
1479 self.saved_version = version.clone();
1480 self.has_unsaved_edits.set((version, false));
1481 self.has_conflict = false;
1482 self.saved_mtime = mtime;
1483 self.was_changed();
1484 cx.emit(BufferEvent::Saved);
1485 cx.notify();
1486 }
1487
1488 /// Reloads the contents of the buffer from disk.
1489 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1490 let (tx, rx) = futures::channel::oneshot::channel();
1491 let prev_version = self.text.version();
1492 self.reload_task = Some(cx.spawn(async move |this, cx| {
1493 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1494 let file = this.file.as_ref()?.as_local()?;
1495
1496 Some((file.disk_state().mtime(), file.load(cx)))
1497 })?
1498 else {
1499 return Ok(());
1500 };
1501
1502 let new_text = new_text.await?;
1503 let diff = this
1504 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1505 .await;
1506 this.update(cx, |this, cx| {
1507 if this.version() == diff.base_version {
1508 this.finalize_last_transaction();
1509 this.apply_diff(diff, cx);
1510 tx.send(this.finalize_last_transaction().cloned()).ok();
1511 this.has_conflict = false;
1512 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1513 } else {
1514 if !diff.edits.is_empty()
1515 || this
1516 .edits_since::<usize>(&diff.base_version)
1517 .next()
1518 .is_some()
1519 {
1520 this.has_conflict = true;
1521 }
1522
1523 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1524 }
1525
1526 this.reload_task.take();
1527 })
1528 }));
1529 rx
1530 }
1531
1532 /// This method is called to signal that the buffer has been reloaded.
1533 pub fn did_reload(
1534 &mut self,
1535 version: clock::Global,
1536 line_ending: LineEnding,
1537 mtime: Option<MTime>,
1538 cx: &mut Context<Self>,
1539 ) {
1540 self.saved_version = version;
1541 self.has_unsaved_edits
1542 .set((self.saved_version.clone(), false));
1543 self.text.set_line_ending(line_ending);
1544 self.saved_mtime = mtime;
1545 cx.emit(BufferEvent::Reloaded);
1546 cx.notify();
1547 }
1548
1549 /// Updates the [`File`] backing this buffer. This should be called when
1550 /// the file has changed or has been deleted.
1551 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1552 let was_dirty = self.is_dirty();
1553 let mut file_changed = false;
1554
1555 if let Some(old_file) = self.file.as_ref() {
1556 if new_file.path() != old_file.path() {
1557 file_changed = true;
1558 }
1559
1560 let old_state = old_file.disk_state();
1561 let new_state = new_file.disk_state();
1562 if old_state != new_state {
1563 file_changed = true;
1564 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1565 cx.emit(BufferEvent::ReloadNeeded)
1566 }
1567 }
1568 } else {
1569 file_changed = true;
1570 };
1571
1572 self.file = Some(new_file);
1573 if file_changed {
1574 self.was_changed();
1575 self.non_text_state_update_count += 1;
1576 if was_dirty != self.is_dirty() {
1577 cx.emit(BufferEvent::DirtyChanged);
1578 }
1579 cx.emit(BufferEvent::FileHandleChanged);
1580 cx.notify();
1581 }
1582 }
1583
1584 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1585 Some(self.branch_state.as_ref()?.base_buffer.clone())
1586 }
1587
1588 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1589 pub fn language(&self) -> Option<&Arc<Language>> {
1590 self.language.as_ref()
1591 }
1592
1593 /// Returns the [`Language`] at the given location.
1594 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1595 let offset = position.to_offset(self);
1596 let mut is_first = true;
1597 let start_anchor = self.anchor_before(offset);
1598 let end_anchor = self.anchor_after(offset);
1599 self.syntax_map
1600 .lock()
1601 .layers_for_range(offset..offset, &self.text, false)
1602 .filter(|layer| {
1603 if is_first {
1604 is_first = false;
1605 return true;
1606 }
1607
1608 layer
1609 .included_sub_ranges
1610 .map(|sub_ranges| {
1611 sub_ranges.iter().any(|sub_range| {
1612 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1613 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1614 !is_before_start && !is_after_end
1615 })
1616 })
1617 .unwrap_or(true)
1618 })
1619 .last()
1620 .map(|info| info.language.clone())
1621 .or_else(|| self.language.clone())
1622 }
1623
1624 /// Returns each [`Language`] for the active syntax layers at the given location.
1625 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1626 let offset = position.to_offset(self);
1627 let mut languages: Vec<Arc<Language>> = self
1628 .syntax_map
1629 .lock()
1630 .layers_for_range(offset..offset, &self.text, false)
1631 .map(|info| info.language.clone())
1632 .collect();
1633
1634 if languages.is_empty()
1635 && let Some(buffer_language) = self.language()
1636 {
1637 languages.push(buffer_language.clone());
1638 }
1639
1640 languages
1641 }
1642
1643 /// An integer version number that accounts for all updates besides
1644 /// the buffer's text itself (which is versioned via a version vector).
1645 pub fn non_text_state_update_count(&self) -> usize {
1646 self.non_text_state_update_count
1647 }
1648
1649 /// Whether the buffer is being parsed in the background.
1650 #[cfg(any(test, feature = "test-support"))]
1651 pub fn is_parsing(&self) -> bool {
1652 self.reparse.is_some()
1653 }
1654
1655 /// Indicates whether the buffer contains any regions that may be
1656 /// written in a language that hasn't been loaded yet.
1657 pub fn contains_unknown_injections(&self) -> bool {
1658 self.syntax_map.lock().contains_unknown_injections()
1659 }
1660
1661 #[cfg(any(test, feature = "test-support"))]
1662 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1663 self.sync_parse_timeout = timeout;
1664 }
1665
1666 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1667 match Arc::get_mut(&mut self.tree_sitter_data) {
1668 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1669 None => {
1670 let tree_sitter_data = TreeSitterData::new(snapshot);
1671 self.tree_sitter_data = Arc::new(tree_sitter_data)
1672 }
1673 }
1674 }
1675
1676 /// Called after an edit to synchronize the buffer's main parse tree with
1677 /// the buffer's new underlying state.
1678 ///
1679 /// Locks the syntax map and interpolates the edits since the last reparse
1680 /// into the foreground syntax tree.
1681 ///
1682 /// Then takes a stable snapshot of the syntax map before unlocking it.
1683 /// The snapshot with the interpolated edits is sent to a background thread,
1684 /// where we ask Tree-sitter to perform an incremental parse.
1685 ///
1686 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1687 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1688 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1689 ///
1690 /// If we time out waiting on the parse, we spawn a second task waiting
1691 /// until the parse does complete and return with the interpolated tree still
1692 /// in the foreground. When the background parse completes, call back into
1693 /// the main thread and assign the foreground parse state.
1694 ///
1695 /// If the buffer or grammar changed since the start of the background parse,
1696 /// initiate an additional reparse recursively. To avoid concurrent parses
1697 /// for the same buffer, we only initiate a new parse if we are not already
1698 /// parsing in the background.
1699 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1700 if self.text.version() != *self.tree_sitter_data.version() {
1701 self.invalidate_tree_sitter_data(self.text.snapshot());
1702 }
1703 if self.reparse.is_some() {
1704 return;
1705 }
1706 let language = if let Some(language) = self.language.clone() {
1707 language
1708 } else {
1709 return;
1710 };
1711
1712 let text = self.text_snapshot();
1713 let parsed_version = self.version();
1714
1715 let mut syntax_map = self.syntax_map.lock();
1716 syntax_map.interpolate(&text);
1717 let language_registry = syntax_map.language_registry();
1718 let mut syntax_snapshot = syntax_map.snapshot();
1719 drop(syntax_map);
1720
1721 let parse_task = cx.background_spawn({
1722 let language = language.clone();
1723 let language_registry = language_registry.clone();
1724 async move {
1725 syntax_snapshot.reparse(&text, language_registry, language);
1726 syntax_snapshot
1727 }
1728 });
1729
1730 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1731 if may_block {
1732 match cx
1733 .background_executor()
1734 .block_with_timeout(self.sync_parse_timeout, parse_task)
1735 {
1736 Ok(new_syntax_snapshot) => {
1737 self.did_finish_parsing(new_syntax_snapshot, cx);
1738 self.reparse = None;
1739 }
1740 Err(parse_task) => {
1741 self.reparse = Some(cx.spawn(async move |this, cx| {
1742 let new_syntax_map = cx.background_spawn(parse_task).await;
1743 this.update(cx, move |this, cx| {
1744 let grammar_changed = || {
1745 this.language.as_ref().is_none_or(|current_language| {
1746 !Arc::ptr_eq(&language, current_language)
1747 })
1748 };
1749 let language_registry_changed = || {
1750 new_syntax_map.contains_unknown_injections()
1751 && language_registry.is_some_and(|registry| {
1752 registry.version()
1753 != new_syntax_map.language_registry_version()
1754 })
1755 };
1756 let parse_again = this.version.changed_since(&parsed_version)
1757 || language_registry_changed()
1758 || grammar_changed();
1759 this.did_finish_parsing(new_syntax_map, cx);
1760 this.reparse = None;
1761 if parse_again {
1762 this.reparse(cx, false);
1763 }
1764 })
1765 .ok();
1766 }));
1767 }
1768 }
1769 } else {
1770 self.reparse = Some(cx.spawn(async move |this, cx| {
1771 let new_syntax_map = cx.background_spawn(parse_task).await;
1772 this.update(cx, move |this, cx| {
1773 let grammar_changed = || {
1774 this.language.as_ref().is_none_or(|current_language| {
1775 !Arc::ptr_eq(&language, current_language)
1776 })
1777 };
1778 let language_registry_changed = || {
1779 new_syntax_map.contains_unknown_injections()
1780 && language_registry.is_some_and(|registry| {
1781 registry.version() != new_syntax_map.language_registry_version()
1782 })
1783 };
1784 let parse_again = this.version.changed_since(&parsed_version)
1785 || language_registry_changed()
1786 || grammar_changed();
1787 this.did_finish_parsing(new_syntax_map, cx);
1788 this.reparse = None;
1789 if parse_again {
1790 this.reparse(cx, false);
1791 }
1792 })
1793 .ok();
1794 }));
1795 }
1796 }
1797
1798 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1799 self.was_changed();
1800 self.non_text_state_update_count += 1;
1801 self.syntax_map.lock().did_parse(syntax_snapshot);
1802 self.request_autoindent(cx);
1803 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1804 if self.text.version() != *self.tree_sitter_data.version() {
1805 self.invalidate_tree_sitter_data(self.text.snapshot());
1806 }
1807 cx.emit(BufferEvent::Reparsed);
1808 cx.notify();
1809 }
1810
1811 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1812 self.parse_status.1.clone()
1813 }
1814
1815 /// Wait until the buffer is no longer parsing
1816 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1817 let mut parse_status = self.parse_status();
1818 async move {
1819 while *parse_status.borrow() != ParseStatus::Idle {
1820 if parse_status.changed().await.is_err() {
1821 break;
1822 }
1823 }
1824 }
1825 }
1826
1827 /// Assign to the buffer a set of diagnostics created by a given language server.
1828 pub fn update_diagnostics(
1829 &mut self,
1830 server_id: LanguageServerId,
1831 diagnostics: DiagnosticSet,
1832 cx: &mut Context<Self>,
1833 ) {
1834 let lamport_timestamp = self.text.lamport_clock.tick();
1835 let op = Operation::UpdateDiagnostics {
1836 server_id,
1837 diagnostics: diagnostics.iter().cloned().collect(),
1838 lamport_timestamp,
1839 };
1840
1841 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1842 self.send_operation(op, true, cx);
1843 }
1844
1845 pub fn buffer_diagnostics(
1846 &self,
1847 for_server: Option<LanguageServerId>,
1848 ) -> Vec<&DiagnosticEntry<Anchor>> {
1849 match for_server {
1850 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1851 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1852 Err(_) => Vec::new(),
1853 },
1854 None => self
1855 .diagnostics
1856 .iter()
1857 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1858 .collect(),
1859 }
1860 }
1861
1862 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1863 if let Some(indent_sizes) = self.compute_autoindents() {
1864 let indent_sizes = cx.background_spawn(indent_sizes);
1865 match cx
1866 .background_executor()
1867 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1868 {
1869 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1870 Err(indent_sizes) => {
1871 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1872 let indent_sizes = indent_sizes.await;
1873 this.update(cx, |this, cx| {
1874 this.apply_autoindents(indent_sizes, cx);
1875 })
1876 .ok();
1877 }));
1878 }
1879 }
1880 } else {
1881 self.autoindent_requests.clear();
1882 for tx in self.wait_for_autoindent_txs.drain(..) {
1883 tx.send(()).ok();
1884 }
1885 }
1886 }
1887
1888 fn compute_autoindents(
1889 &self,
1890 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1891 let max_rows_between_yields = 100;
1892 let snapshot = self.snapshot();
1893 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1894 return None;
1895 }
1896
1897 let autoindent_requests = self.autoindent_requests.clone();
1898 Some(async move {
1899 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1900 for request in autoindent_requests {
1901 // Resolve each edited range to its row in the current buffer and in the
1902 // buffer before this batch of edits.
1903 let mut row_ranges = Vec::new();
1904 let mut old_to_new_rows = BTreeMap::new();
1905 let mut language_indent_sizes_by_new_row = Vec::new();
1906 for entry in &request.entries {
1907 let position = entry.range.start;
1908 let new_row = position.to_point(&snapshot).row;
1909 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1910 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1911
1912 if !entry.first_line_is_new {
1913 let old_row = position.to_point(&request.before_edit).row;
1914 old_to_new_rows.insert(old_row, new_row);
1915 }
1916 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1917 }
1918
1919 // Build a map containing the suggested indentation for each of the edited lines
1920 // with respect to the state of the buffer before these edits. This map is keyed
1921 // by the rows for these lines in the current state of the buffer.
1922 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1923 let old_edited_ranges =
1924 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1925 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1926 let mut language_indent_size = IndentSize::default();
1927 for old_edited_range in old_edited_ranges {
1928 let suggestions = request
1929 .before_edit
1930 .suggest_autoindents(old_edited_range.clone())
1931 .into_iter()
1932 .flatten();
1933 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1934 if let Some(suggestion) = suggestion {
1935 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1936
1937 // Find the indent size based on the language for this row.
1938 while let Some((row, size)) = language_indent_sizes.peek() {
1939 if *row > new_row {
1940 break;
1941 }
1942 language_indent_size = *size;
1943 language_indent_sizes.next();
1944 }
1945
1946 let suggested_indent = old_to_new_rows
1947 .get(&suggestion.basis_row)
1948 .and_then(|from_row| {
1949 Some(old_suggestions.get(from_row).copied()?.0)
1950 })
1951 .unwrap_or_else(|| {
1952 request
1953 .before_edit
1954 .indent_size_for_line(suggestion.basis_row)
1955 })
1956 .with_delta(suggestion.delta, language_indent_size);
1957 old_suggestions
1958 .insert(new_row, (suggested_indent, suggestion.within_error));
1959 }
1960 }
1961 yield_now().await;
1962 }
1963
1964 // Compute new suggestions for each line, but only include them in the result
1965 // if they differ from the old suggestion for that line.
1966 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1967 let mut language_indent_size = IndentSize::default();
1968 for (row_range, original_indent_column) in row_ranges {
1969 let new_edited_row_range = if request.is_block_mode {
1970 row_range.start..row_range.start + 1
1971 } else {
1972 row_range.clone()
1973 };
1974
1975 let suggestions = snapshot
1976 .suggest_autoindents(new_edited_row_range.clone())
1977 .into_iter()
1978 .flatten();
1979 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1980 if let Some(suggestion) = suggestion {
1981 // Find the indent size based on the language for this row.
1982 while let Some((row, size)) = language_indent_sizes.peek() {
1983 if *row > new_row {
1984 break;
1985 }
1986 language_indent_size = *size;
1987 language_indent_sizes.next();
1988 }
1989
1990 let suggested_indent = indent_sizes
1991 .get(&suggestion.basis_row)
1992 .copied()
1993 .map(|e| e.0)
1994 .unwrap_or_else(|| {
1995 snapshot.indent_size_for_line(suggestion.basis_row)
1996 })
1997 .with_delta(suggestion.delta, language_indent_size);
1998
1999 if old_suggestions.get(&new_row).is_none_or(
2000 |(old_indentation, was_within_error)| {
2001 suggested_indent != *old_indentation
2002 && (!suggestion.within_error || *was_within_error)
2003 },
2004 ) {
2005 indent_sizes.insert(
2006 new_row,
2007 (suggested_indent, request.ignore_empty_lines),
2008 );
2009 }
2010 }
2011 }
2012
2013 if let (true, Some(original_indent_column)) =
2014 (request.is_block_mode, original_indent_column)
2015 {
2016 let new_indent =
2017 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2018 *indent
2019 } else {
2020 snapshot.indent_size_for_line(row_range.start)
2021 };
2022 let delta = new_indent.len as i64 - original_indent_column as i64;
2023 if delta != 0 {
2024 for row in row_range.skip(1) {
2025 indent_sizes.entry(row).or_insert_with(|| {
2026 let mut size = snapshot.indent_size_for_line(row);
2027 if size.kind == new_indent.kind {
2028 match delta.cmp(&0) {
2029 Ordering::Greater => size.len += delta as u32,
2030 Ordering::Less => {
2031 size.len = size.len.saturating_sub(-delta as u32)
2032 }
2033 Ordering::Equal => {}
2034 }
2035 }
2036 (size, request.ignore_empty_lines)
2037 });
2038 }
2039 }
2040 }
2041
2042 yield_now().await;
2043 }
2044 }
2045
2046 indent_sizes
2047 .into_iter()
2048 .filter_map(|(row, (indent, ignore_empty_lines))| {
2049 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2050 None
2051 } else {
2052 Some((row, indent))
2053 }
2054 })
2055 .collect()
2056 })
2057 }
2058
2059 fn apply_autoindents(
2060 &mut self,
2061 indent_sizes: BTreeMap<u32, IndentSize>,
2062 cx: &mut Context<Self>,
2063 ) {
2064 self.autoindent_requests.clear();
2065 for tx in self.wait_for_autoindent_txs.drain(..) {
2066 tx.send(()).ok();
2067 }
2068
2069 let edits: Vec<_> = indent_sizes
2070 .into_iter()
2071 .filter_map(|(row, indent_size)| {
2072 let current_size = indent_size_for_line(self, row);
2073 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2074 })
2075 .collect();
2076
2077 let preserve_preview = self.preserve_preview();
2078 self.edit(edits, None, cx);
2079 if preserve_preview {
2080 self.refresh_preview();
2081 }
2082 }
2083
2084 /// Create a minimal edit that will cause the given row to be indented
2085 /// with the given size. After applying this edit, the length of the line
2086 /// will always be at least `new_size.len`.
2087 pub fn edit_for_indent_size_adjustment(
2088 row: u32,
2089 current_size: IndentSize,
2090 new_size: IndentSize,
2091 ) -> Option<(Range<Point>, String)> {
2092 if new_size.kind == current_size.kind {
2093 match new_size.len.cmp(¤t_size.len) {
2094 Ordering::Greater => {
2095 let point = Point::new(row, 0);
2096 Some((
2097 point..point,
2098 iter::repeat(new_size.char())
2099 .take((new_size.len - current_size.len) as usize)
2100 .collect::<String>(),
2101 ))
2102 }
2103
2104 Ordering::Less => Some((
2105 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2106 String::new(),
2107 )),
2108
2109 Ordering::Equal => None,
2110 }
2111 } else {
2112 Some((
2113 Point::new(row, 0)..Point::new(row, current_size.len),
2114 iter::repeat(new_size.char())
2115 .take(new_size.len as usize)
2116 .collect::<String>(),
2117 ))
2118 }
2119 }
2120
2121 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2122 /// and the given new text.
2123 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2124 let old_text = self.as_rope().clone();
2125 let base_version = self.version();
2126 cx.background_executor()
2127 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2128 let old_text = old_text.to_string();
2129 let line_ending = LineEnding::detect(&new_text);
2130 LineEnding::normalize(&mut new_text);
2131 let edits = text_diff(&old_text, &new_text);
2132 Diff {
2133 base_version,
2134 line_ending,
2135 edits,
2136 }
2137 })
2138 }
2139
2140 /// Spawns a background task that searches the buffer for any whitespace
2141 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2142 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2143 let old_text = self.as_rope().clone();
2144 let line_ending = self.line_ending();
2145 let base_version = self.version();
2146 cx.background_spawn(async move {
2147 let ranges = trailing_whitespace_ranges(&old_text);
2148 let empty = Arc::<str>::from("");
2149 Diff {
2150 base_version,
2151 line_ending,
2152 edits: ranges
2153 .into_iter()
2154 .map(|range| (range, empty.clone()))
2155 .collect(),
2156 }
2157 })
2158 }
2159
2160 /// Ensures that the buffer ends with a single newline character, and
2161 /// no other whitespace. Skips if the buffer is empty.
2162 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2163 let len = self.len();
2164 if len == 0 {
2165 return;
2166 }
2167 let mut offset = len;
2168 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2169 let non_whitespace_len = chunk
2170 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2171 .len();
2172 offset -= chunk.len();
2173 offset += non_whitespace_len;
2174 if non_whitespace_len != 0 {
2175 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2176 return;
2177 }
2178 break;
2179 }
2180 }
2181 self.edit([(offset..len, "\n")], None, cx);
2182 }
2183
2184 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2185 /// calculated, then adjust the diff to account for those changes, and discard any
2186 /// parts of the diff that conflict with those changes.
2187 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2188 let snapshot = self.snapshot();
2189 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2190 let mut delta = 0;
2191 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2192 while let Some(edit_since) = edits_since.peek() {
2193 // If the edit occurs after a diff hunk, then it does not
2194 // affect that hunk.
2195 if edit_since.old.start > range.end {
2196 break;
2197 }
2198 // If the edit precedes the diff hunk, then adjust the hunk
2199 // to reflect the edit.
2200 else if edit_since.old.end < range.start {
2201 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2202 edits_since.next();
2203 }
2204 // If the edit intersects a diff hunk, then discard that hunk.
2205 else {
2206 return None;
2207 }
2208 }
2209
2210 let start = (range.start as i64 + delta) as usize;
2211 let end = (range.end as i64 + delta) as usize;
2212 Some((start..end, new_text))
2213 });
2214
2215 self.start_transaction();
2216 self.text.set_line_ending(diff.line_ending);
2217 self.edit(adjusted_edits, None, cx);
2218 self.end_transaction(cx)
2219 }
2220
2221 pub fn has_unsaved_edits(&self) -> bool {
2222 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2223
2224 if last_version == self.version {
2225 self.has_unsaved_edits
2226 .set((last_version, has_unsaved_edits));
2227 return has_unsaved_edits;
2228 }
2229
2230 let has_edits = self.has_edits_since(&self.saved_version);
2231 self.has_unsaved_edits
2232 .set((self.version.clone(), has_edits));
2233 has_edits
2234 }
2235
2236 /// Checks if the buffer has unsaved changes.
2237 pub fn is_dirty(&self) -> bool {
2238 if self.capability == Capability::ReadOnly {
2239 return false;
2240 }
2241 if self.has_conflict {
2242 return true;
2243 }
2244 match self.file.as_ref().map(|f| f.disk_state()) {
2245 Some(DiskState::New) | Some(DiskState::Deleted) => {
2246 !self.is_empty() && self.has_unsaved_edits()
2247 }
2248 _ => self.has_unsaved_edits(),
2249 }
2250 }
2251
2252 /// Marks the buffer as having a conflict regardless of current buffer state.
2253 pub fn set_conflict(&mut self) {
2254 self.has_conflict = true;
2255 }
2256
2257 /// Checks if the buffer and its file have both changed since the buffer
2258 /// was last saved or reloaded.
2259 pub fn has_conflict(&self) -> bool {
2260 if self.has_conflict {
2261 return true;
2262 }
2263 let Some(file) = self.file.as_ref() else {
2264 return false;
2265 };
2266 match file.disk_state() {
2267 DiskState::New => false,
2268 DiskState::Present { mtime } => match self.saved_mtime {
2269 Some(saved_mtime) => {
2270 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2271 }
2272 None => true,
2273 },
2274 DiskState::Deleted => false,
2275 }
2276 }
2277
2278 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2279 pub fn subscribe(&mut self) -> Subscription<usize> {
2280 self.text.subscribe()
2281 }
2282
2283 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2284 ///
2285 /// This allows downstream code to check if the buffer's text has changed without
2286 /// waiting for an effect cycle, which would be required if using eents.
2287 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2288 if let Err(ix) = self
2289 .change_bits
2290 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2291 {
2292 self.change_bits.insert(ix, bit);
2293 }
2294 }
2295
2296 /// Set the change bit for all "listeners".
2297 fn was_changed(&mut self) {
2298 self.change_bits.retain(|change_bit| {
2299 change_bit
2300 .upgrade()
2301 .inspect(|bit| {
2302 _ = bit.replace(true);
2303 })
2304 .is_some()
2305 });
2306 }
2307
2308 /// Starts a transaction, if one is not already in-progress. When undoing or
2309 /// redoing edits, all of the edits performed within a transaction are undone
2310 /// or redone together.
2311 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2312 self.start_transaction_at(Instant::now())
2313 }
2314
2315 /// Starts a transaction, providing the current time. Subsequent transactions
2316 /// that occur within a short period of time will be grouped together. This
2317 /// is controlled by the buffer's undo grouping duration.
2318 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2319 self.transaction_depth += 1;
2320 if self.was_dirty_before_starting_transaction.is_none() {
2321 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2322 }
2323 self.text.start_transaction_at(now)
2324 }
2325
2326 /// Terminates the current transaction, if this is the outermost transaction.
2327 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2328 self.end_transaction_at(Instant::now(), cx)
2329 }
2330
2331 /// Terminates the current transaction, providing the current time. Subsequent transactions
2332 /// that occur within a short period of time will be grouped together. This
2333 /// is controlled by the buffer's undo grouping duration.
2334 pub fn end_transaction_at(
2335 &mut self,
2336 now: Instant,
2337 cx: &mut Context<Self>,
2338 ) -> Option<TransactionId> {
2339 assert!(self.transaction_depth > 0);
2340 self.transaction_depth -= 1;
2341 let was_dirty = if self.transaction_depth == 0 {
2342 self.was_dirty_before_starting_transaction.take().unwrap()
2343 } else {
2344 false
2345 };
2346 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2347 self.did_edit(&start_version, was_dirty, cx);
2348 Some(transaction_id)
2349 } else {
2350 None
2351 }
2352 }
2353
2354 /// Manually add a transaction to the buffer's undo history.
2355 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2356 self.text.push_transaction(transaction, now);
2357 }
2358
2359 /// Differs from `push_transaction` in that it does not clear the redo
2360 /// stack. Intended to be used to create a parent transaction to merge
2361 /// potential child transactions into.
2362 ///
2363 /// The caller is responsible for removing it from the undo history using
2364 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2365 /// are merged into this transaction, the caller is responsible for ensuring
2366 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2367 /// cleared is to create transactions with the usual `start_transaction` and
2368 /// `end_transaction` methods and merging the resulting transactions into
2369 /// the transaction created by this method
2370 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2371 self.text.push_empty_transaction(now)
2372 }
2373
2374 /// Prevent the last transaction from being grouped with any subsequent transactions,
2375 /// even if they occur with the buffer's undo grouping duration.
2376 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2377 self.text.finalize_last_transaction()
2378 }
2379
2380 /// Manually group all changes since a given transaction.
2381 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2382 self.text.group_until_transaction(transaction_id);
2383 }
2384
2385 /// Manually remove a transaction from the buffer's undo history
2386 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2387 self.text.forget_transaction(transaction_id)
2388 }
2389
2390 /// Retrieve a transaction from the buffer's undo history
2391 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2392 self.text.get_transaction(transaction_id)
2393 }
2394
2395 /// Manually merge two transactions in the buffer's undo history.
2396 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2397 self.text.merge_transactions(transaction, destination);
2398 }
2399
2400 /// Waits for the buffer to receive operations with the given timestamps.
2401 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2402 &mut self,
2403 edit_ids: It,
2404 ) -> impl Future<Output = Result<()>> + use<It> {
2405 self.text.wait_for_edits(edit_ids)
2406 }
2407
2408 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2409 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2410 &mut self,
2411 anchors: It,
2412 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2413 self.text.wait_for_anchors(anchors)
2414 }
2415
2416 /// Waits for the buffer to receive operations up to the given version.
2417 pub fn wait_for_version(
2418 &mut self,
2419 version: clock::Global,
2420 ) -> impl Future<Output = Result<()>> + use<> {
2421 self.text.wait_for_version(version)
2422 }
2423
2424 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2425 /// [`Buffer::wait_for_version`] to resolve with an error.
2426 pub fn give_up_waiting(&mut self) {
2427 self.text.give_up_waiting();
2428 }
2429
2430 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2431 let mut rx = None;
2432 if !self.autoindent_requests.is_empty() {
2433 let channel = oneshot::channel();
2434 self.wait_for_autoindent_txs.push(channel.0);
2435 rx = Some(channel.1);
2436 }
2437 rx
2438 }
2439
2440 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2441 pub fn set_active_selections(
2442 &mut self,
2443 selections: Arc<[Selection<Anchor>]>,
2444 line_mode: bool,
2445 cursor_shape: CursorShape,
2446 cx: &mut Context<Self>,
2447 ) {
2448 let lamport_timestamp = self.text.lamport_clock.tick();
2449 self.remote_selections.insert(
2450 self.text.replica_id(),
2451 SelectionSet {
2452 selections: selections.clone(),
2453 lamport_timestamp,
2454 line_mode,
2455 cursor_shape,
2456 },
2457 );
2458 self.send_operation(
2459 Operation::UpdateSelections {
2460 selections,
2461 line_mode,
2462 lamport_timestamp,
2463 cursor_shape,
2464 },
2465 true,
2466 cx,
2467 );
2468 self.non_text_state_update_count += 1;
2469 cx.notify();
2470 }
2471
2472 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2473 /// this replica.
2474 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2475 if self
2476 .remote_selections
2477 .get(&self.text.replica_id())
2478 .is_none_or(|set| !set.selections.is_empty())
2479 {
2480 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2481 }
2482 }
2483
2484 pub fn set_agent_selections(
2485 &mut self,
2486 selections: Arc<[Selection<Anchor>]>,
2487 line_mode: bool,
2488 cursor_shape: CursorShape,
2489 cx: &mut Context<Self>,
2490 ) {
2491 let lamport_timestamp = self.text.lamport_clock.tick();
2492 self.remote_selections.insert(
2493 ReplicaId::AGENT,
2494 SelectionSet {
2495 selections,
2496 lamport_timestamp,
2497 line_mode,
2498 cursor_shape,
2499 },
2500 );
2501 self.non_text_state_update_count += 1;
2502 cx.notify();
2503 }
2504
2505 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2506 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2507 }
2508
2509 /// Replaces the buffer's entire text.
2510 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2511 where
2512 T: Into<Arc<str>>,
2513 {
2514 self.autoindent_requests.clear();
2515 self.edit([(0..self.len(), text)], None, cx)
2516 }
2517
2518 /// Appends the given text to the end of the buffer.
2519 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2520 where
2521 T: Into<Arc<str>>,
2522 {
2523 self.edit([(self.len()..self.len(), text)], None, cx)
2524 }
2525
2526 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2527 /// delete, and a string of text to insert at that location.
2528 ///
2529 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2530 /// request for the edited ranges, which will be processed when the buffer finishes
2531 /// parsing.
2532 ///
2533 /// Parsing takes place at the end of a transaction, and may compute synchronously
2534 /// or asynchronously, depending on the changes.
2535 pub fn edit<I, S, T>(
2536 &mut self,
2537 edits_iter: I,
2538 autoindent_mode: Option<AutoindentMode>,
2539 cx: &mut Context<Self>,
2540 ) -> Option<clock::Lamport>
2541 where
2542 I: IntoIterator<Item = (Range<S>, T)>,
2543 S: ToOffset,
2544 T: Into<Arc<str>>,
2545 {
2546 // Skip invalid edits and coalesce contiguous ones.
2547 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2548
2549 for (range, new_text) in edits_iter {
2550 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2551
2552 if range.start > range.end {
2553 mem::swap(&mut range.start, &mut range.end);
2554 }
2555 let new_text = new_text.into();
2556 if !new_text.is_empty() || !range.is_empty() {
2557 if let Some((prev_range, prev_text)) = edits.last_mut()
2558 && prev_range.end >= range.start
2559 {
2560 prev_range.end = cmp::max(prev_range.end, range.end);
2561 *prev_text = format!("{prev_text}{new_text}").into();
2562 } else {
2563 edits.push((range, new_text));
2564 }
2565 }
2566 }
2567 if edits.is_empty() {
2568 return None;
2569 }
2570
2571 self.start_transaction();
2572 self.pending_autoindent.take();
2573 let autoindent_request = autoindent_mode
2574 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2575
2576 let edit_operation = self.text.edit(edits.iter().cloned());
2577 let edit_id = edit_operation.timestamp();
2578
2579 if let Some((before_edit, mode)) = autoindent_request {
2580 let mut delta = 0isize;
2581 let mut previous_setting = None;
2582 let entries: Vec<_> = edits
2583 .into_iter()
2584 .enumerate()
2585 .zip(&edit_operation.as_edit().unwrap().new_text)
2586 .filter(|((_, (range, _)), _)| {
2587 let language = before_edit.language_at(range.start);
2588 let language_id = language.map(|l| l.id());
2589 if let Some((cached_language_id, auto_indent)) = previous_setting
2590 && cached_language_id == language_id
2591 {
2592 auto_indent
2593 } else {
2594 // The auto-indent setting is not present in editorconfigs, hence
2595 // we can avoid passing the file here.
2596 let auto_indent =
2597 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2598 previous_setting = Some((language_id, auto_indent));
2599 auto_indent
2600 }
2601 })
2602 .map(|((ix, (range, _)), new_text)| {
2603 let new_text_length = new_text.len();
2604 let old_start = range.start.to_point(&before_edit);
2605 let new_start = (delta + range.start as isize) as usize;
2606 let range_len = range.end - range.start;
2607 delta += new_text_length as isize - range_len as isize;
2608
2609 // Decide what range of the insertion to auto-indent, and whether
2610 // the first line of the insertion should be considered a newly-inserted line
2611 // or an edit to an existing line.
2612 let mut range_of_insertion_to_indent = 0..new_text_length;
2613 let mut first_line_is_new = true;
2614
2615 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2616 let old_line_end = before_edit.line_len(old_start.row);
2617
2618 if old_start.column > old_line_start {
2619 first_line_is_new = false;
2620 }
2621
2622 if !new_text.contains('\n')
2623 && (old_start.column + (range_len as u32) < old_line_end
2624 || old_line_end == old_line_start)
2625 {
2626 first_line_is_new = false;
2627 }
2628
2629 // When inserting text starting with a newline, avoid auto-indenting the
2630 // previous line.
2631 if new_text.starts_with('\n') {
2632 range_of_insertion_to_indent.start += 1;
2633 first_line_is_new = true;
2634 }
2635
2636 let mut original_indent_column = None;
2637 if let AutoindentMode::Block {
2638 original_indent_columns,
2639 } = &mode
2640 {
2641 original_indent_column = Some(if new_text.starts_with('\n') {
2642 indent_size_for_text(
2643 new_text[range_of_insertion_to_indent.clone()].chars(),
2644 )
2645 .len
2646 } else {
2647 original_indent_columns
2648 .get(ix)
2649 .copied()
2650 .flatten()
2651 .unwrap_or_else(|| {
2652 indent_size_for_text(
2653 new_text[range_of_insertion_to_indent.clone()].chars(),
2654 )
2655 .len
2656 })
2657 });
2658
2659 // Avoid auto-indenting the line after the edit.
2660 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2661 range_of_insertion_to_indent.end -= 1;
2662 }
2663 }
2664
2665 AutoindentRequestEntry {
2666 first_line_is_new,
2667 original_indent_column,
2668 indent_size: before_edit.language_indent_size_at(range.start, cx),
2669 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2670 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2671 }
2672 })
2673 .collect();
2674
2675 if !entries.is_empty() {
2676 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2677 before_edit,
2678 entries,
2679 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2680 ignore_empty_lines: false,
2681 }));
2682 }
2683 }
2684
2685 self.end_transaction(cx);
2686 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2687 Some(edit_id)
2688 }
2689
2690 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2691 self.was_changed();
2692
2693 if self.edits_since::<usize>(old_version).next().is_none() {
2694 return;
2695 }
2696
2697 self.reparse(cx, true);
2698 cx.emit(BufferEvent::Edited);
2699 if was_dirty != self.is_dirty() {
2700 cx.emit(BufferEvent::DirtyChanged);
2701 }
2702 cx.notify();
2703 }
2704
2705 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2706 where
2707 I: IntoIterator<Item = Range<T>>,
2708 T: ToOffset + Copy,
2709 {
2710 let before_edit = self.snapshot();
2711 let entries = ranges
2712 .into_iter()
2713 .map(|range| AutoindentRequestEntry {
2714 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2715 first_line_is_new: true,
2716 indent_size: before_edit.language_indent_size_at(range.start, cx),
2717 original_indent_column: None,
2718 })
2719 .collect();
2720 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2721 before_edit,
2722 entries,
2723 is_block_mode: false,
2724 ignore_empty_lines: true,
2725 }));
2726 self.request_autoindent(cx);
2727 }
2728
2729 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2730 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2731 pub fn insert_empty_line(
2732 &mut self,
2733 position: impl ToPoint,
2734 space_above: bool,
2735 space_below: bool,
2736 cx: &mut Context<Self>,
2737 ) -> Point {
2738 let mut position = position.to_point(self);
2739
2740 self.start_transaction();
2741
2742 self.edit(
2743 [(position..position, "\n")],
2744 Some(AutoindentMode::EachLine),
2745 cx,
2746 );
2747
2748 if position.column > 0 {
2749 position += Point::new(1, 0);
2750 }
2751
2752 if !self.is_line_blank(position.row) {
2753 self.edit(
2754 [(position..position, "\n")],
2755 Some(AutoindentMode::EachLine),
2756 cx,
2757 );
2758 }
2759
2760 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2761 self.edit(
2762 [(position..position, "\n")],
2763 Some(AutoindentMode::EachLine),
2764 cx,
2765 );
2766 position.row += 1;
2767 }
2768
2769 if space_below
2770 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2771 {
2772 self.edit(
2773 [(position..position, "\n")],
2774 Some(AutoindentMode::EachLine),
2775 cx,
2776 );
2777 }
2778
2779 self.end_transaction(cx);
2780
2781 position
2782 }
2783
2784 /// Applies the given remote operations to the buffer.
2785 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2786 self.pending_autoindent.take();
2787 let was_dirty = self.is_dirty();
2788 let old_version = self.version.clone();
2789 let mut deferred_ops = Vec::new();
2790 let buffer_ops = ops
2791 .into_iter()
2792 .filter_map(|op| match op {
2793 Operation::Buffer(op) => Some(op),
2794 _ => {
2795 if self.can_apply_op(&op) {
2796 self.apply_op(op, cx);
2797 } else {
2798 deferred_ops.push(op);
2799 }
2800 None
2801 }
2802 })
2803 .collect::<Vec<_>>();
2804 for operation in buffer_ops.iter() {
2805 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2806 }
2807 self.text.apply_ops(buffer_ops);
2808 self.deferred_ops.insert(deferred_ops);
2809 self.flush_deferred_ops(cx);
2810 self.did_edit(&old_version, was_dirty, cx);
2811 // Notify independently of whether the buffer was edited as the operations could include a
2812 // selection update.
2813 cx.notify();
2814 }
2815
2816 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2817 let mut deferred_ops = Vec::new();
2818 for op in self.deferred_ops.drain().iter().cloned() {
2819 if self.can_apply_op(&op) {
2820 self.apply_op(op, cx);
2821 } else {
2822 deferred_ops.push(op);
2823 }
2824 }
2825 self.deferred_ops.insert(deferred_ops);
2826 }
2827
2828 pub fn has_deferred_ops(&self) -> bool {
2829 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2830 }
2831
2832 fn can_apply_op(&self, operation: &Operation) -> bool {
2833 match operation {
2834 Operation::Buffer(_) => {
2835 unreachable!("buffer operations should never be applied at this layer")
2836 }
2837 Operation::UpdateDiagnostics {
2838 diagnostics: diagnostic_set,
2839 ..
2840 } => diagnostic_set.iter().all(|diagnostic| {
2841 self.text.can_resolve(&diagnostic.range.start)
2842 && self.text.can_resolve(&diagnostic.range.end)
2843 }),
2844 Operation::UpdateSelections { selections, .. } => selections
2845 .iter()
2846 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2847 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2848 }
2849 }
2850
2851 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2852 match operation {
2853 Operation::Buffer(_) => {
2854 unreachable!("buffer operations should never be applied at this layer")
2855 }
2856 Operation::UpdateDiagnostics {
2857 server_id,
2858 diagnostics: diagnostic_set,
2859 lamport_timestamp,
2860 } => {
2861 let snapshot = self.snapshot();
2862 self.apply_diagnostic_update(
2863 server_id,
2864 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2865 lamport_timestamp,
2866 cx,
2867 );
2868 }
2869 Operation::UpdateSelections {
2870 selections,
2871 lamport_timestamp,
2872 line_mode,
2873 cursor_shape,
2874 } => {
2875 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2876 && set.lamport_timestamp > lamport_timestamp
2877 {
2878 return;
2879 }
2880
2881 self.remote_selections.insert(
2882 lamport_timestamp.replica_id,
2883 SelectionSet {
2884 selections,
2885 lamport_timestamp,
2886 line_mode,
2887 cursor_shape,
2888 },
2889 );
2890 self.text.lamport_clock.observe(lamport_timestamp);
2891 self.non_text_state_update_count += 1;
2892 }
2893 Operation::UpdateCompletionTriggers {
2894 triggers,
2895 lamport_timestamp,
2896 server_id,
2897 } => {
2898 if triggers.is_empty() {
2899 self.completion_triggers_per_language_server
2900 .remove(&server_id);
2901 self.completion_triggers = self
2902 .completion_triggers_per_language_server
2903 .values()
2904 .flat_map(|triggers| triggers.iter().cloned())
2905 .collect();
2906 } else {
2907 self.completion_triggers_per_language_server
2908 .insert(server_id, triggers.iter().cloned().collect());
2909 self.completion_triggers.extend(triggers);
2910 }
2911 self.text.lamport_clock.observe(lamport_timestamp);
2912 }
2913 Operation::UpdateLineEnding {
2914 line_ending,
2915 lamport_timestamp,
2916 } => {
2917 self.text.set_line_ending(line_ending);
2918 self.text.lamport_clock.observe(lamport_timestamp);
2919 }
2920 }
2921 }
2922
2923 fn apply_diagnostic_update(
2924 &mut self,
2925 server_id: LanguageServerId,
2926 diagnostics: DiagnosticSet,
2927 lamport_timestamp: clock::Lamport,
2928 cx: &mut Context<Self>,
2929 ) {
2930 if lamport_timestamp > self.diagnostics_timestamp {
2931 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2932 if diagnostics.is_empty() {
2933 if let Ok(ix) = ix {
2934 self.diagnostics.remove(ix);
2935 }
2936 } else {
2937 match ix {
2938 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2939 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2940 };
2941 }
2942 self.diagnostics_timestamp = lamport_timestamp;
2943 self.non_text_state_update_count += 1;
2944 self.text.lamport_clock.observe(lamport_timestamp);
2945 cx.notify();
2946 cx.emit(BufferEvent::DiagnosticsUpdated);
2947 }
2948 }
2949
2950 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2951 self.was_changed();
2952 cx.emit(BufferEvent::Operation {
2953 operation,
2954 is_local,
2955 });
2956 }
2957
2958 /// Removes the selections for a given peer.
2959 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2960 self.remote_selections.remove(&replica_id);
2961 cx.notify();
2962 }
2963
2964 /// Undoes the most recent transaction.
2965 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2966 let was_dirty = self.is_dirty();
2967 let old_version = self.version.clone();
2968
2969 if let Some((transaction_id, operation)) = self.text.undo() {
2970 self.send_operation(Operation::Buffer(operation), true, cx);
2971 self.did_edit(&old_version, was_dirty, cx);
2972 Some(transaction_id)
2973 } else {
2974 None
2975 }
2976 }
2977
2978 /// Manually undoes a specific transaction in the buffer's undo history.
2979 pub fn undo_transaction(
2980 &mut self,
2981 transaction_id: TransactionId,
2982 cx: &mut Context<Self>,
2983 ) -> bool {
2984 let was_dirty = self.is_dirty();
2985 let old_version = self.version.clone();
2986 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2987 self.send_operation(Operation::Buffer(operation), true, cx);
2988 self.did_edit(&old_version, was_dirty, cx);
2989 true
2990 } else {
2991 false
2992 }
2993 }
2994
2995 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2996 pub fn undo_to_transaction(
2997 &mut self,
2998 transaction_id: TransactionId,
2999 cx: &mut Context<Self>,
3000 ) -> bool {
3001 let was_dirty = self.is_dirty();
3002 let old_version = self.version.clone();
3003
3004 let operations = self.text.undo_to_transaction(transaction_id);
3005 let undone = !operations.is_empty();
3006 for operation in operations {
3007 self.send_operation(Operation::Buffer(operation), true, cx);
3008 }
3009 if undone {
3010 self.did_edit(&old_version, was_dirty, cx)
3011 }
3012 undone
3013 }
3014
3015 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3016 let was_dirty = self.is_dirty();
3017 let operation = self.text.undo_operations(counts);
3018 let old_version = self.version.clone();
3019 self.send_operation(Operation::Buffer(operation), true, cx);
3020 self.did_edit(&old_version, was_dirty, cx);
3021 }
3022
3023 /// Manually redoes a specific transaction in the buffer's redo history.
3024 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3025 let was_dirty = self.is_dirty();
3026 let old_version = self.version.clone();
3027
3028 if let Some((transaction_id, operation)) = self.text.redo() {
3029 self.send_operation(Operation::Buffer(operation), true, cx);
3030 self.did_edit(&old_version, was_dirty, cx);
3031 Some(transaction_id)
3032 } else {
3033 None
3034 }
3035 }
3036
3037 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3038 pub fn redo_to_transaction(
3039 &mut self,
3040 transaction_id: TransactionId,
3041 cx: &mut Context<Self>,
3042 ) -> bool {
3043 let was_dirty = self.is_dirty();
3044 let old_version = self.version.clone();
3045
3046 let operations = self.text.redo_to_transaction(transaction_id);
3047 let redone = !operations.is_empty();
3048 for operation in operations {
3049 self.send_operation(Operation::Buffer(operation), true, cx);
3050 }
3051 if redone {
3052 self.did_edit(&old_version, was_dirty, cx)
3053 }
3054 redone
3055 }
3056
3057 /// Override current completion triggers with the user-provided completion triggers.
3058 pub fn set_completion_triggers(
3059 &mut self,
3060 server_id: LanguageServerId,
3061 triggers: BTreeSet<String>,
3062 cx: &mut Context<Self>,
3063 ) {
3064 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3065 if triggers.is_empty() {
3066 self.completion_triggers_per_language_server
3067 .remove(&server_id);
3068 self.completion_triggers = self
3069 .completion_triggers_per_language_server
3070 .values()
3071 .flat_map(|triggers| triggers.iter().cloned())
3072 .collect();
3073 } else {
3074 self.completion_triggers_per_language_server
3075 .insert(server_id, triggers.clone());
3076 self.completion_triggers.extend(triggers.iter().cloned());
3077 }
3078 self.send_operation(
3079 Operation::UpdateCompletionTriggers {
3080 triggers: triggers.into_iter().collect(),
3081 lamport_timestamp: self.completion_triggers_timestamp,
3082 server_id,
3083 },
3084 true,
3085 cx,
3086 );
3087 cx.notify();
3088 }
3089
3090 /// Returns a list of strings which trigger a completion menu for this language.
3091 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3092 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3093 &self.completion_triggers
3094 }
3095
3096 /// Call this directly after performing edits to prevent the preview tab
3097 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3098 /// to return false until there are additional edits.
3099 pub fn refresh_preview(&mut self) {
3100 self.preview_version = self.version.clone();
3101 }
3102
3103 /// Whether we should preserve the preview status of a tab containing this buffer.
3104 pub fn preserve_preview(&self) -> bool {
3105 !self.has_edits_since(&self.preview_version)
3106 }
3107}
3108
3109#[doc(hidden)]
3110#[cfg(any(test, feature = "test-support"))]
3111impl Buffer {
3112 pub fn edit_via_marked_text(
3113 &mut self,
3114 marked_string: &str,
3115 autoindent_mode: Option<AutoindentMode>,
3116 cx: &mut Context<Self>,
3117 ) {
3118 let edits = self.edits_for_marked_text(marked_string);
3119 self.edit(edits, autoindent_mode, cx);
3120 }
3121
3122 pub fn set_group_interval(&mut self, group_interval: Duration) {
3123 self.text.set_group_interval(group_interval);
3124 }
3125
3126 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3127 where
3128 T: rand::Rng,
3129 {
3130 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3131 let mut last_end = None;
3132 for _ in 0..old_range_count {
3133 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3134 break;
3135 }
3136
3137 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3138 let mut range = self.random_byte_range(new_start, rng);
3139 if rng.random_bool(0.2) {
3140 mem::swap(&mut range.start, &mut range.end);
3141 }
3142 last_end = Some(range.end);
3143
3144 let new_text_len = rng.random_range(0..10);
3145 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3146 new_text = new_text.to_uppercase();
3147
3148 edits.push((range, new_text));
3149 }
3150 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3151 self.edit(edits, None, cx);
3152 }
3153
3154 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3155 let was_dirty = self.is_dirty();
3156 let old_version = self.version.clone();
3157
3158 let ops = self.text.randomly_undo_redo(rng);
3159 if !ops.is_empty() {
3160 for op in ops {
3161 self.send_operation(Operation::Buffer(op), true, cx);
3162 self.did_edit(&old_version, was_dirty, cx);
3163 }
3164 }
3165 }
3166}
3167
3168impl EventEmitter<BufferEvent> for Buffer {}
3169
3170impl Deref for Buffer {
3171 type Target = TextBuffer;
3172
3173 fn deref(&self) -> &Self::Target {
3174 &self.text
3175 }
3176}
3177
3178impl BufferSnapshot {
3179 /// Returns [`IndentSize`] for a given line that respects user settings and
3180 /// language preferences.
3181 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3182 indent_size_for_line(self, row)
3183 }
3184
3185 /// Returns [`IndentSize`] for a given position that respects user settings
3186 /// and language preferences.
3187 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3188 let settings = language_settings(
3189 self.language_at(position).map(|l| l.name()),
3190 self.file(),
3191 cx,
3192 );
3193 if settings.hard_tabs {
3194 IndentSize::tab()
3195 } else {
3196 IndentSize::spaces(settings.tab_size.get())
3197 }
3198 }
3199
3200 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3201 /// is passed in as `single_indent_size`.
3202 pub fn suggested_indents(
3203 &self,
3204 rows: impl Iterator<Item = u32>,
3205 single_indent_size: IndentSize,
3206 ) -> BTreeMap<u32, IndentSize> {
3207 let mut result = BTreeMap::new();
3208
3209 for row_range in contiguous_ranges(rows, 10) {
3210 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3211 Some(suggestions) => suggestions,
3212 _ => break,
3213 };
3214
3215 for (row, suggestion) in row_range.zip(suggestions) {
3216 let indent_size = if let Some(suggestion) = suggestion {
3217 result
3218 .get(&suggestion.basis_row)
3219 .copied()
3220 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3221 .with_delta(suggestion.delta, single_indent_size)
3222 } else {
3223 self.indent_size_for_line(row)
3224 };
3225
3226 result.insert(row, indent_size);
3227 }
3228 }
3229
3230 result
3231 }
3232
3233 fn suggest_autoindents(
3234 &self,
3235 row_range: Range<u32>,
3236 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3237 let config = &self.language.as_ref()?.config;
3238 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3239
3240 #[derive(Debug, Clone)]
3241 struct StartPosition {
3242 start: Point,
3243 suffix: SharedString,
3244 language: Arc<Language>,
3245 }
3246
3247 // Find the suggested indentation ranges based on the syntax tree.
3248 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3249 let end = Point::new(row_range.end, 0);
3250 let range = (start..end).to_offset(&self.text);
3251 let mut matches = self.syntax.matches_with_options(
3252 range.clone(),
3253 &self.text,
3254 TreeSitterOptions {
3255 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3256 max_start_depth: None,
3257 },
3258 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3259 );
3260 let indent_configs = matches
3261 .grammars()
3262 .iter()
3263 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3264 .collect::<Vec<_>>();
3265
3266 let mut indent_ranges = Vec::<Range<Point>>::new();
3267 let mut start_positions = Vec::<StartPosition>::new();
3268 let mut outdent_positions = Vec::<Point>::new();
3269 while let Some(mat) = matches.peek() {
3270 let mut start: Option<Point> = None;
3271 let mut end: Option<Point> = None;
3272
3273 let config = indent_configs[mat.grammar_index];
3274 for capture in mat.captures {
3275 if capture.index == config.indent_capture_ix {
3276 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3277 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3278 } else if Some(capture.index) == config.start_capture_ix {
3279 start = Some(Point::from_ts_point(capture.node.end_position()));
3280 } else if Some(capture.index) == config.end_capture_ix {
3281 end = Some(Point::from_ts_point(capture.node.start_position()));
3282 } else if Some(capture.index) == config.outdent_capture_ix {
3283 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3284 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3285 start_positions.push(StartPosition {
3286 start: Point::from_ts_point(capture.node.start_position()),
3287 suffix: suffix.clone(),
3288 language: mat.language.clone(),
3289 });
3290 }
3291 }
3292
3293 matches.advance();
3294 if let Some((start, end)) = start.zip(end) {
3295 if start.row == end.row {
3296 continue;
3297 }
3298 let range = start..end;
3299 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3300 Err(ix) => indent_ranges.insert(ix, range),
3301 Ok(ix) => {
3302 let prev_range = &mut indent_ranges[ix];
3303 prev_range.end = prev_range.end.max(range.end);
3304 }
3305 }
3306 }
3307 }
3308
3309 let mut error_ranges = Vec::<Range<Point>>::new();
3310 let mut matches = self
3311 .syntax
3312 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3313 while let Some(mat) = matches.peek() {
3314 let node = mat.captures[0].node;
3315 let start = Point::from_ts_point(node.start_position());
3316 let end = Point::from_ts_point(node.end_position());
3317 let range = start..end;
3318 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3319 Ok(ix) | Err(ix) => ix,
3320 };
3321 let mut end_ix = ix;
3322 while let Some(existing_range) = error_ranges.get(end_ix) {
3323 if existing_range.end < end {
3324 end_ix += 1;
3325 } else {
3326 break;
3327 }
3328 }
3329 error_ranges.splice(ix..end_ix, [range]);
3330 matches.advance();
3331 }
3332
3333 outdent_positions.sort();
3334 for outdent_position in outdent_positions {
3335 // find the innermost indent range containing this outdent_position
3336 // set its end to the outdent position
3337 if let Some(range_to_truncate) = indent_ranges
3338 .iter_mut()
3339 .rfind(|indent_range| indent_range.contains(&outdent_position))
3340 {
3341 range_to_truncate.end = outdent_position;
3342 }
3343 }
3344
3345 start_positions.sort_by_key(|b| b.start);
3346
3347 // Find the suggested indentation increases and decreased based on regexes.
3348 let mut regex_outdent_map = HashMap::default();
3349 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3350 let mut start_positions_iter = start_positions.iter().peekable();
3351
3352 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3353 self.for_each_line(
3354 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3355 ..Point::new(row_range.end, 0),
3356 |row, line| {
3357 let indent_len = self.indent_size_for_line(row).len;
3358 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3359 let row_language_config = row_language
3360 .as_ref()
3361 .map(|lang| lang.config())
3362 .unwrap_or(config);
3363
3364 if row_language_config
3365 .decrease_indent_pattern
3366 .as_ref()
3367 .is_some_and(|regex| regex.is_match(line))
3368 {
3369 indent_change_rows.push((row, Ordering::Less));
3370 }
3371 if row_language_config
3372 .increase_indent_pattern
3373 .as_ref()
3374 .is_some_and(|regex| regex.is_match(line))
3375 {
3376 indent_change_rows.push((row + 1, Ordering::Greater));
3377 }
3378 while let Some(pos) = start_positions_iter.peek() {
3379 if pos.start.row < row {
3380 let pos = start_positions_iter.next().unwrap().clone();
3381 last_seen_suffix
3382 .entry(pos.suffix.to_string())
3383 .or_default()
3384 .push(pos);
3385 } else {
3386 break;
3387 }
3388 }
3389 for rule in &row_language_config.decrease_indent_patterns {
3390 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3391 let row_start_column = self.indent_size_for_line(row).len;
3392 let basis_row = rule
3393 .valid_after
3394 .iter()
3395 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3396 .flatten()
3397 .filter(|pos| {
3398 row_language
3399 .as_ref()
3400 .or(self.language.as_ref())
3401 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3402 })
3403 .filter(|pos| pos.start.column <= row_start_column)
3404 .max_by_key(|pos| pos.start.row);
3405 if let Some(outdent_to) = basis_row {
3406 regex_outdent_map.insert(row, outdent_to.start.row);
3407 }
3408 break;
3409 }
3410 }
3411 },
3412 );
3413
3414 let mut indent_changes = indent_change_rows.into_iter().peekable();
3415 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3416 prev_non_blank_row.unwrap_or(0)
3417 } else {
3418 row_range.start.saturating_sub(1)
3419 };
3420
3421 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3422 Some(row_range.map(move |row| {
3423 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3424
3425 let mut indent_from_prev_row = false;
3426 let mut outdent_from_prev_row = false;
3427 let mut outdent_to_row = u32::MAX;
3428 let mut from_regex = false;
3429
3430 while let Some((indent_row, delta)) = indent_changes.peek() {
3431 match indent_row.cmp(&row) {
3432 Ordering::Equal => match delta {
3433 Ordering::Less => {
3434 from_regex = true;
3435 outdent_from_prev_row = true
3436 }
3437 Ordering::Greater => {
3438 indent_from_prev_row = true;
3439 from_regex = true
3440 }
3441 _ => {}
3442 },
3443
3444 Ordering::Greater => break,
3445 Ordering::Less => {}
3446 }
3447
3448 indent_changes.next();
3449 }
3450
3451 for range in &indent_ranges {
3452 if range.start.row >= row {
3453 break;
3454 }
3455 if range.start.row == prev_row && range.end > row_start {
3456 indent_from_prev_row = true;
3457 }
3458 if range.end > prev_row_start && range.end <= row_start {
3459 outdent_to_row = outdent_to_row.min(range.start.row);
3460 }
3461 }
3462
3463 if let Some(basis_row) = regex_outdent_map.get(&row) {
3464 indent_from_prev_row = false;
3465 outdent_to_row = *basis_row;
3466 from_regex = true;
3467 }
3468
3469 let within_error = error_ranges
3470 .iter()
3471 .any(|e| e.start.row < row && e.end > row_start);
3472
3473 let suggestion = if outdent_to_row == prev_row
3474 || (outdent_from_prev_row && indent_from_prev_row)
3475 {
3476 Some(IndentSuggestion {
3477 basis_row: prev_row,
3478 delta: Ordering::Equal,
3479 within_error: within_error && !from_regex,
3480 })
3481 } else if indent_from_prev_row {
3482 Some(IndentSuggestion {
3483 basis_row: prev_row,
3484 delta: Ordering::Greater,
3485 within_error: within_error && !from_regex,
3486 })
3487 } else if outdent_to_row < prev_row {
3488 Some(IndentSuggestion {
3489 basis_row: outdent_to_row,
3490 delta: Ordering::Equal,
3491 within_error: within_error && !from_regex,
3492 })
3493 } else if outdent_from_prev_row {
3494 Some(IndentSuggestion {
3495 basis_row: prev_row,
3496 delta: Ordering::Less,
3497 within_error: within_error && !from_regex,
3498 })
3499 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3500 {
3501 Some(IndentSuggestion {
3502 basis_row: prev_row,
3503 delta: Ordering::Equal,
3504 within_error: within_error && !from_regex,
3505 })
3506 } else {
3507 None
3508 };
3509
3510 prev_row = row;
3511 prev_row_start = row_start;
3512 suggestion
3513 }))
3514 }
3515
3516 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3517 while row > 0 {
3518 row -= 1;
3519 if !self.is_line_blank(row) {
3520 return Some(row);
3521 }
3522 }
3523 None
3524 }
3525
3526 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3527 let captures = self.syntax.captures(range, &self.text, |grammar| {
3528 grammar
3529 .highlights_config
3530 .as_ref()
3531 .map(|config| &config.query)
3532 });
3533 let highlight_maps = captures
3534 .grammars()
3535 .iter()
3536 .map(|grammar| grammar.highlight_map())
3537 .collect();
3538 (captures, highlight_maps)
3539 }
3540
3541 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3542 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3543 /// returned in chunks where each chunk has a single syntax highlighting style and
3544 /// diagnostic status.
3545 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3546 let range = range.start.to_offset(self)..range.end.to_offset(self);
3547
3548 let mut syntax = None;
3549 if language_aware {
3550 syntax = Some(self.get_highlights(range.clone()));
3551 }
3552 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3553 let diagnostics = language_aware;
3554 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3555 }
3556
3557 pub fn highlighted_text_for_range<T: ToOffset>(
3558 &self,
3559 range: Range<T>,
3560 override_style: Option<HighlightStyle>,
3561 syntax_theme: &SyntaxTheme,
3562 ) -> HighlightedText {
3563 HighlightedText::from_buffer_range(
3564 range,
3565 &self.text,
3566 &self.syntax,
3567 override_style,
3568 syntax_theme,
3569 )
3570 }
3571
3572 /// Invokes the given callback for each line of text in the given range of the buffer.
3573 /// Uses callback to avoid allocating a string for each line.
3574 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3575 let mut line = String::new();
3576 let mut row = range.start.row;
3577 for chunk in self
3578 .as_rope()
3579 .chunks_in_range(range.to_offset(self))
3580 .chain(["\n"])
3581 {
3582 for (newline_ix, text) in chunk.split('\n').enumerate() {
3583 if newline_ix > 0 {
3584 callback(row, &line);
3585 row += 1;
3586 line.clear();
3587 }
3588 line.push_str(text);
3589 }
3590 }
3591 }
3592
3593 /// Iterates over every [`SyntaxLayer`] in the buffer.
3594 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3595 self.syntax_layers_for_range(0..self.len(), true)
3596 }
3597
3598 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3599 let offset = position.to_offset(self);
3600 self.syntax_layers_for_range(offset..offset, false)
3601 .filter(|l| {
3602 if let Some(ranges) = l.included_sub_ranges {
3603 ranges.iter().any(|range| {
3604 let start = range.start.to_offset(self);
3605 start <= offset && {
3606 let end = range.end.to_offset(self);
3607 offset < end
3608 }
3609 })
3610 } else {
3611 l.node().start_byte() <= offset && l.node().end_byte() > offset
3612 }
3613 })
3614 .last()
3615 }
3616
3617 pub fn syntax_layers_for_range<D: ToOffset>(
3618 &self,
3619 range: Range<D>,
3620 include_hidden: bool,
3621 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3622 self.syntax
3623 .layers_for_range(range, &self.text, include_hidden)
3624 }
3625
3626 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3627 &self,
3628 range: Range<D>,
3629 ) -> Option<SyntaxLayer<'_>> {
3630 let range = range.to_offset(self);
3631 self.syntax
3632 .layers_for_range(range, &self.text, false)
3633 .max_by(|a, b| {
3634 if a.depth != b.depth {
3635 a.depth.cmp(&b.depth)
3636 } else if a.offset.0 != b.offset.0 {
3637 a.offset.0.cmp(&b.offset.0)
3638 } else {
3639 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3640 }
3641 })
3642 }
3643
3644 /// Returns the main [`Language`].
3645 pub fn language(&self) -> Option<&Arc<Language>> {
3646 self.language.as_ref()
3647 }
3648
3649 /// Returns the [`Language`] at the given location.
3650 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3651 self.syntax_layer_at(position)
3652 .map(|info| info.language)
3653 .or(self.language.as_ref())
3654 }
3655
3656 /// Returns the settings for the language at the given location.
3657 pub fn settings_at<'a, D: ToOffset>(
3658 &'a self,
3659 position: D,
3660 cx: &'a App,
3661 ) -> Cow<'a, LanguageSettings> {
3662 language_settings(
3663 self.language_at(position).map(|l| l.name()),
3664 self.file.as_ref(),
3665 cx,
3666 )
3667 }
3668
3669 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3670 CharClassifier::new(self.language_scope_at(point))
3671 }
3672
3673 /// Returns the [`LanguageScope`] at the given location.
3674 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3675 let offset = position.to_offset(self);
3676 let mut scope = None;
3677 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3678
3679 // Use the layer that has the smallest node intersecting the given point.
3680 for layer in self
3681 .syntax
3682 .layers_for_range(offset..offset, &self.text, false)
3683 {
3684 let mut cursor = layer.node().walk();
3685
3686 let mut range = None;
3687 loop {
3688 let child_range = cursor.node().byte_range();
3689 if !child_range.contains(&offset) {
3690 break;
3691 }
3692
3693 range = Some(child_range);
3694 if cursor.goto_first_child_for_byte(offset).is_none() {
3695 break;
3696 }
3697 }
3698
3699 if let Some(range) = range
3700 && smallest_range_and_depth.as_ref().is_none_or(
3701 |(smallest_range, smallest_range_depth)| {
3702 if layer.depth > *smallest_range_depth {
3703 true
3704 } else if layer.depth == *smallest_range_depth {
3705 range.len() < smallest_range.len()
3706 } else {
3707 false
3708 }
3709 },
3710 )
3711 {
3712 smallest_range_and_depth = Some((range, layer.depth));
3713 scope = Some(LanguageScope {
3714 language: layer.language.clone(),
3715 override_id: layer.override_id(offset, &self.text),
3716 });
3717 }
3718 }
3719
3720 scope.or_else(|| {
3721 self.language.clone().map(|language| LanguageScope {
3722 language,
3723 override_id: None,
3724 })
3725 })
3726 }
3727
3728 /// Returns a tuple of the range and character kind of the word
3729 /// surrounding the given position.
3730 pub fn surrounding_word<T: ToOffset>(
3731 &self,
3732 start: T,
3733 scope_context: Option<CharScopeContext>,
3734 ) -> (Range<usize>, Option<CharKind>) {
3735 let mut start = start.to_offset(self);
3736 let mut end = start;
3737 let mut next_chars = self.chars_at(start).take(128).peekable();
3738 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3739
3740 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3741 let word_kind = cmp::max(
3742 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3743 next_chars.peek().copied().map(|c| classifier.kind(c)),
3744 );
3745
3746 for ch in prev_chars {
3747 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3748 start -= ch.len_utf8();
3749 } else {
3750 break;
3751 }
3752 }
3753
3754 for ch in next_chars {
3755 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3756 end += ch.len_utf8();
3757 } else {
3758 break;
3759 }
3760 }
3761
3762 (start..end, word_kind)
3763 }
3764
3765 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3766 /// range. When `require_larger` is true, the node found must be larger than the query range.
3767 ///
3768 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3769 /// be moved to the root of the tree.
3770 fn goto_node_enclosing_range(
3771 cursor: &mut tree_sitter::TreeCursor,
3772 query_range: &Range<usize>,
3773 require_larger: bool,
3774 ) -> bool {
3775 let mut ascending = false;
3776 loop {
3777 let mut range = cursor.node().byte_range();
3778 if query_range.is_empty() {
3779 // When the query range is empty and the current node starts after it, move to the
3780 // previous sibling to find the node the containing node.
3781 if range.start > query_range.start {
3782 cursor.goto_previous_sibling();
3783 range = cursor.node().byte_range();
3784 }
3785 } else {
3786 // When the query range is non-empty and the current node ends exactly at the start,
3787 // move to the next sibling to find a node that extends beyond the start.
3788 if range.end == query_range.start {
3789 cursor.goto_next_sibling();
3790 range = cursor.node().byte_range();
3791 }
3792 }
3793
3794 let encloses = range.contains_inclusive(query_range)
3795 && (!require_larger || range.len() > query_range.len());
3796 if !encloses {
3797 ascending = true;
3798 if !cursor.goto_parent() {
3799 return false;
3800 }
3801 continue;
3802 } else if ascending {
3803 return true;
3804 }
3805
3806 // Descend into the current node.
3807 if cursor
3808 .goto_first_child_for_byte(query_range.start)
3809 .is_none()
3810 {
3811 return true;
3812 }
3813 }
3814 }
3815
3816 pub fn syntax_ancestor<'a, T: ToOffset>(
3817 &'a self,
3818 range: Range<T>,
3819 ) -> Option<tree_sitter::Node<'a>> {
3820 let range = range.start.to_offset(self)..range.end.to_offset(self);
3821 let mut result: Option<tree_sitter::Node<'a>> = None;
3822 for layer in self
3823 .syntax
3824 .layers_for_range(range.clone(), &self.text, true)
3825 {
3826 let mut cursor = layer.node().walk();
3827
3828 // Find the node that both contains the range and is larger than it.
3829 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3830 continue;
3831 }
3832
3833 let left_node = cursor.node();
3834 let mut layer_result = left_node;
3835
3836 // For an empty range, try to find another node immediately to the right of the range.
3837 if left_node.end_byte() == range.start {
3838 let mut right_node = None;
3839 while !cursor.goto_next_sibling() {
3840 if !cursor.goto_parent() {
3841 break;
3842 }
3843 }
3844
3845 while cursor.node().start_byte() == range.start {
3846 right_node = Some(cursor.node());
3847 if !cursor.goto_first_child() {
3848 break;
3849 }
3850 }
3851
3852 // If there is a candidate node on both sides of the (empty) range, then
3853 // decide between the two by favoring a named node over an anonymous token.
3854 // If both nodes are the same in that regard, favor the right one.
3855 if let Some(right_node) = right_node
3856 && (right_node.is_named() || !left_node.is_named())
3857 {
3858 layer_result = right_node;
3859 }
3860 }
3861
3862 if let Some(previous_result) = &result
3863 && previous_result.byte_range().len() < layer_result.byte_range().len()
3864 {
3865 continue;
3866 }
3867 result = Some(layer_result);
3868 }
3869
3870 result
3871 }
3872
3873 /// Find the previous sibling syntax node at the given range.
3874 ///
3875 /// This function locates the syntax node that precedes the node containing
3876 /// the given range. It searches hierarchically by:
3877 /// 1. Finding the node that contains the given range
3878 /// 2. Looking for the previous sibling at the same tree level
3879 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3880 ///
3881 /// Returns `None` if there is no previous sibling at any ancestor level.
3882 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3883 &'a self,
3884 range: Range<T>,
3885 ) -> Option<tree_sitter::Node<'a>> {
3886 let range = range.start.to_offset(self)..range.end.to_offset(self);
3887 let mut result: Option<tree_sitter::Node<'a>> = None;
3888
3889 for layer in self
3890 .syntax
3891 .layers_for_range(range.clone(), &self.text, true)
3892 {
3893 let mut cursor = layer.node().walk();
3894
3895 // Find the node that contains the range
3896 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3897 continue;
3898 }
3899
3900 // Look for the previous sibling, moving up ancestor levels if needed
3901 loop {
3902 if cursor.goto_previous_sibling() {
3903 let layer_result = cursor.node();
3904
3905 if let Some(previous_result) = &result {
3906 if previous_result.byte_range().end < layer_result.byte_range().end {
3907 continue;
3908 }
3909 }
3910 result = Some(layer_result);
3911 break;
3912 }
3913
3914 // No sibling found at this level, try moving up to parent
3915 if !cursor.goto_parent() {
3916 break;
3917 }
3918 }
3919 }
3920
3921 result
3922 }
3923
3924 /// Find the next sibling syntax node at the given range.
3925 ///
3926 /// This function locates the syntax node that follows the node containing
3927 /// the given range. It searches hierarchically by:
3928 /// 1. Finding the node that contains the given range
3929 /// 2. Looking for the next sibling at the same tree level
3930 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3931 ///
3932 /// Returns `None` if there is no next sibling at any ancestor level.
3933 pub fn syntax_next_sibling<'a, T: ToOffset>(
3934 &'a self,
3935 range: Range<T>,
3936 ) -> Option<tree_sitter::Node<'a>> {
3937 let range = range.start.to_offset(self)..range.end.to_offset(self);
3938 let mut result: Option<tree_sitter::Node<'a>> = None;
3939
3940 for layer in self
3941 .syntax
3942 .layers_for_range(range.clone(), &self.text, true)
3943 {
3944 let mut cursor = layer.node().walk();
3945
3946 // Find the node that contains the range
3947 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3948 continue;
3949 }
3950
3951 // Look for the next sibling, moving up ancestor levels if needed
3952 loop {
3953 if cursor.goto_next_sibling() {
3954 let layer_result = cursor.node();
3955
3956 if let Some(previous_result) = &result {
3957 if previous_result.byte_range().start > layer_result.byte_range().start {
3958 continue;
3959 }
3960 }
3961 result = Some(layer_result);
3962 break;
3963 }
3964
3965 // No sibling found at this level, try moving up to parent
3966 if !cursor.goto_parent() {
3967 break;
3968 }
3969 }
3970 }
3971
3972 result
3973 }
3974
3975 /// Returns the root syntax node within the given row
3976 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3977 let start_offset = position.to_offset(self);
3978
3979 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3980
3981 let layer = self
3982 .syntax
3983 .layers_for_range(start_offset..start_offset, &self.text, true)
3984 .next()?;
3985
3986 let mut cursor = layer.node().walk();
3987
3988 // Descend to the first leaf that touches the start of the range.
3989 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3990 if cursor.node().end_byte() == start_offset {
3991 cursor.goto_next_sibling();
3992 }
3993 }
3994
3995 // Ascend to the root node within the same row.
3996 while cursor.goto_parent() {
3997 if cursor.node().start_position().row != row {
3998 break;
3999 }
4000 }
4001
4002 Some(cursor.node())
4003 }
4004
4005 /// Returns the outline for the buffer.
4006 ///
4007 /// This method allows passing an optional [`SyntaxTheme`] to
4008 /// syntax-highlight the returned symbols.
4009 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4010 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4011 }
4012
4013 /// Returns all the symbols that contain the given position.
4014 ///
4015 /// This method allows passing an optional [`SyntaxTheme`] to
4016 /// syntax-highlight the returned symbols.
4017 pub fn symbols_containing<T: ToOffset>(
4018 &self,
4019 position: T,
4020 theme: Option<&SyntaxTheme>,
4021 ) -> Vec<OutlineItem<Anchor>> {
4022 let position = position.to_offset(self);
4023 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4024 let end = self.clip_offset(position + 1, Bias::Right);
4025 let mut items = self.outline_items_containing(start..end, false, theme);
4026 let mut prev_depth = None;
4027 items.retain(|item| {
4028 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4029 prev_depth = Some(item.depth);
4030 result
4031 });
4032 items
4033 }
4034
4035 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4036 let range = range.to_offset(self);
4037 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4038 grammar.outline_config.as_ref().map(|c| &c.query)
4039 });
4040 let configs = matches
4041 .grammars()
4042 .iter()
4043 .map(|g| g.outline_config.as_ref().unwrap())
4044 .collect::<Vec<_>>();
4045
4046 while let Some(mat) = matches.peek() {
4047 let config = &configs[mat.grammar_index];
4048 let containing_item_node = maybe!({
4049 let item_node = mat.captures.iter().find_map(|cap| {
4050 if cap.index == config.item_capture_ix {
4051 Some(cap.node)
4052 } else {
4053 None
4054 }
4055 })?;
4056
4057 let item_byte_range = item_node.byte_range();
4058 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4059 None
4060 } else {
4061 Some(item_node)
4062 }
4063 });
4064
4065 if let Some(item_node) = containing_item_node {
4066 return Some(
4067 Point::from_ts_point(item_node.start_position())
4068 ..Point::from_ts_point(item_node.end_position()),
4069 );
4070 }
4071
4072 matches.advance();
4073 }
4074 None
4075 }
4076
4077 pub fn outline_items_containing<T: ToOffset>(
4078 &self,
4079 range: Range<T>,
4080 include_extra_context: bool,
4081 theme: Option<&SyntaxTheme>,
4082 ) -> Vec<OutlineItem<Anchor>> {
4083 self.outline_items_containing_internal(
4084 range,
4085 include_extra_context,
4086 theme,
4087 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4088 )
4089 }
4090
4091 pub fn outline_items_as_points_containing<T: ToOffset>(
4092 &self,
4093 range: Range<T>,
4094 include_extra_context: bool,
4095 theme: Option<&SyntaxTheme>,
4096 ) -> Vec<OutlineItem<Point>> {
4097 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4098 range
4099 })
4100 }
4101
4102 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4103 &self,
4104 range: Range<T>,
4105 include_extra_context: bool,
4106 theme: Option<&SyntaxTheme>,
4107 ) -> Vec<OutlineItem<usize>> {
4108 self.outline_items_containing_internal(
4109 range,
4110 include_extra_context,
4111 theme,
4112 |buffer, range| range.to_offset(buffer),
4113 )
4114 }
4115
4116 fn outline_items_containing_internal<T: ToOffset, U>(
4117 &self,
4118 range: Range<T>,
4119 include_extra_context: bool,
4120 theme: Option<&SyntaxTheme>,
4121 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4122 ) -> Vec<OutlineItem<U>> {
4123 let range = range.to_offset(self);
4124 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4125 grammar.outline_config.as_ref().map(|c| &c.query)
4126 });
4127
4128 let mut items = Vec::new();
4129 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4130 while let Some(mat) = matches.peek() {
4131 let config = matches.grammars()[mat.grammar_index]
4132 .outline_config
4133 .as_ref()
4134 .unwrap();
4135 if let Some(item) =
4136 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4137 {
4138 items.push(item);
4139 } else if let Some(capture) = mat
4140 .captures
4141 .iter()
4142 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4143 {
4144 let capture_range = capture.node.start_position()..capture.node.end_position();
4145 let mut capture_row_range =
4146 capture_range.start.row as u32..capture_range.end.row as u32;
4147 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4148 {
4149 capture_row_range.end -= 1;
4150 }
4151 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4152 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4153 last_row_range.end = capture_row_range.end;
4154 } else {
4155 annotation_row_ranges.push(capture_row_range);
4156 }
4157 } else {
4158 annotation_row_ranges.push(capture_row_range);
4159 }
4160 }
4161 matches.advance();
4162 }
4163
4164 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4165
4166 // Assign depths based on containment relationships and convert to anchors.
4167 let mut item_ends_stack = Vec::<Point>::new();
4168 let mut anchor_items = Vec::new();
4169 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4170 for item in items {
4171 while let Some(last_end) = item_ends_stack.last().copied() {
4172 if last_end < item.range.end {
4173 item_ends_stack.pop();
4174 } else {
4175 break;
4176 }
4177 }
4178
4179 let mut annotation_row_range = None;
4180 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4181 let row_preceding_item = item.range.start.row.saturating_sub(1);
4182 if next_annotation_row_range.end < row_preceding_item {
4183 annotation_row_ranges.next();
4184 } else {
4185 if next_annotation_row_range.end == row_preceding_item {
4186 annotation_row_range = Some(next_annotation_row_range.clone());
4187 annotation_row_ranges.next();
4188 }
4189 break;
4190 }
4191 }
4192
4193 anchor_items.push(OutlineItem {
4194 depth: item_ends_stack.len(),
4195 range: range_callback(self, item.range.clone()),
4196 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4197 text: item.text,
4198 highlight_ranges: item.highlight_ranges,
4199 name_ranges: item.name_ranges,
4200 body_range: item.body_range.map(|r| range_callback(self, r)),
4201 annotation_range: annotation_row_range.map(|annotation_range| {
4202 let point_range = Point::new(annotation_range.start, 0)
4203 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4204 range_callback(self, point_range)
4205 }),
4206 });
4207 item_ends_stack.push(item.range.end);
4208 }
4209
4210 anchor_items
4211 }
4212
4213 fn next_outline_item(
4214 &self,
4215 config: &OutlineConfig,
4216 mat: &SyntaxMapMatch,
4217 range: &Range<usize>,
4218 include_extra_context: bool,
4219 theme: Option<&SyntaxTheme>,
4220 ) -> Option<OutlineItem<Point>> {
4221 let item_node = mat.captures.iter().find_map(|cap| {
4222 if cap.index == config.item_capture_ix {
4223 Some(cap.node)
4224 } else {
4225 None
4226 }
4227 })?;
4228
4229 let item_byte_range = item_node.byte_range();
4230 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4231 return None;
4232 }
4233 let item_point_range = Point::from_ts_point(item_node.start_position())
4234 ..Point::from_ts_point(item_node.end_position());
4235
4236 let mut open_point = None;
4237 let mut close_point = None;
4238
4239 let mut buffer_ranges = Vec::new();
4240 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4241 let mut range = node.start_byte()..node.end_byte();
4242 let start = node.start_position();
4243 if node.end_position().row > start.row {
4244 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4245 }
4246
4247 if !range.is_empty() {
4248 buffer_ranges.push((range, node_is_name));
4249 }
4250 };
4251
4252 for capture in mat.captures {
4253 if capture.index == config.name_capture_ix {
4254 add_to_buffer_ranges(capture.node, true);
4255 } else if Some(capture.index) == config.context_capture_ix
4256 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4257 {
4258 add_to_buffer_ranges(capture.node, false);
4259 } else {
4260 if Some(capture.index) == config.open_capture_ix {
4261 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4262 } else if Some(capture.index) == config.close_capture_ix {
4263 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4264 }
4265 }
4266 }
4267
4268 if buffer_ranges.is_empty() {
4269 return None;
4270 }
4271 let source_range_for_text =
4272 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4273
4274 let mut text = String::new();
4275 let mut highlight_ranges = Vec::new();
4276 let mut name_ranges = Vec::new();
4277 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4278 let mut last_buffer_range_end = 0;
4279 for (buffer_range, is_name) in buffer_ranges {
4280 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4281 if space_added {
4282 text.push(' ');
4283 }
4284 let before_append_len = text.len();
4285 let mut offset = buffer_range.start;
4286 chunks.seek(buffer_range.clone());
4287 for mut chunk in chunks.by_ref() {
4288 if chunk.text.len() > buffer_range.end - offset {
4289 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4290 offset = buffer_range.end;
4291 } else {
4292 offset += chunk.text.len();
4293 }
4294 let style = chunk
4295 .syntax_highlight_id
4296 .zip(theme)
4297 .and_then(|(highlight, theme)| highlight.style(theme));
4298 if let Some(style) = style {
4299 let start = text.len();
4300 let end = start + chunk.text.len();
4301 highlight_ranges.push((start..end, style));
4302 }
4303 text.push_str(chunk.text);
4304 if offset >= buffer_range.end {
4305 break;
4306 }
4307 }
4308 if is_name {
4309 let after_append_len = text.len();
4310 let start = if space_added && !name_ranges.is_empty() {
4311 before_append_len - 1
4312 } else {
4313 before_append_len
4314 };
4315 name_ranges.push(start..after_append_len);
4316 }
4317 last_buffer_range_end = buffer_range.end;
4318 }
4319
4320 Some(OutlineItem {
4321 depth: 0, // We'll calculate the depth later
4322 range: item_point_range,
4323 source_range_for_text: source_range_for_text.to_point(self),
4324 text,
4325 highlight_ranges,
4326 name_ranges,
4327 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4328 annotation_range: None,
4329 })
4330 }
4331
4332 pub fn function_body_fold_ranges<T: ToOffset>(
4333 &self,
4334 within: Range<T>,
4335 ) -> impl Iterator<Item = Range<usize>> + '_ {
4336 self.text_object_ranges(within, TreeSitterOptions::default())
4337 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4338 }
4339
4340 /// For each grammar in the language, runs the provided
4341 /// [`tree_sitter::Query`] against the given range.
4342 pub fn matches(
4343 &self,
4344 range: Range<usize>,
4345 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4346 ) -> SyntaxMapMatches<'_> {
4347 self.syntax.matches(range, self, query)
4348 }
4349
4350 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4351 /// Hence, may return more bracket pairs than the range contains.
4352 ///
4353 /// Will omit known chunks.
4354 /// The resulting bracket match collections are not ordered.
4355 pub fn fetch_bracket_ranges(
4356 &self,
4357 range: Range<usize>,
4358 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4359 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4360 let mut all_bracket_matches = HashMap::default();
4361
4362 for chunk in self
4363 .tree_sitter_data
4364 .chunks
4365 .applicable_chunks(&[range.to_point(self)])
4366 {
4367 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4368 continue;
4369 }
4370 let chunk_range = chunk.anchor_range();
4371 let chunk_range = chunk_range.to_offset(&self);
4372
4373 if let Some(cached_brackets) =
4374 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4375 {
4376 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4377 continue;
4378 }
4379
4380 let mut all_brackets = Vec::new();
4381 let mut opens = Vec::new();
4382 let mut color_pairs = Vec::new();
4383
4384 let mut matches = self.syntax.matches_with_options(
4385 chunk_range.clone(),
4386 &self.text,
4387 TreeSitterOptions {
4388 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4389 max_start_depth: None,
4390 },
4391 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4392 );
4393 let configs = matches
4394 .grammars()
4395 .iter()
4396 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4397 .collect::<Vec<_>>();
4398
4399 while let Some(mat) = matches.peek() {
4400 let mut open = None;
4401 let mut close = None;
4402 let syntax_layer_depth = mat.depth;
4403 let config = configs[mat.grammar_index];
4404 let pattern = &config.patterns[mat.pattern_index];
4405 for capture in mat.captures {
4406 if capture.index == config.open_capture_ix {
4407 open = Some(capture.node.byte_range());
4408 } else if capture.index == config.close_capture_ix {
4409 close = Some(capture.node.byte_range());
4410 }
4411 }
4412
4413 matches.advance();
4414
4415 let Some((open_range, close_range)) = open.zip(close) else {
4416 continue;
4417 };
4418
4419 let bracket_range = open_range.start..=close_range.end;
4420 if !bracket_range.overlaps(&chunk_range) {
4421 continue;
4422 }
4423
4424 let index = all_brackets.len();
4425 all_brackets.push(BracketMatch {
4426 open_range: open_range.clone(),
4427 close_range: close_range.clone(),
4428 newline_only: pattern.newline_only,
4429 syntax_layer_depth,
4430 color_index: None,
4431 });
4432
4433 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4434 // bracket will match the entire tag with all text inside.
4435 // For now, avoid highlighting any pair that has more than single char in each bracket.
4436 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4437 let should_color =
4438 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4439 if should_color {
4440 opens.push(open_range.clone());
4441 color_pairs.push((open_range, close_range, index));
4442 }
4443 }
4444
4445 opens.sort_by_key(|r| (r.start, r.end));
4446 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4447 color_pairs.sort_by_key(|(_, close, _)| close.end);
4448
4449 let mut open_stack = Vec::new();
4450 let mut open_index = 0;
4451 for (open, close, index) in color_pairs {
4452 while open_index < opens.len() && opens[open_index].start < close.start {
4453 open_stack.push(opens[open_index].clone());
4454 open_index += 1;
4455 }
4456
4457 if open_stack.last() == Some(&open) {
4458 let depth_index = open_stack.len() - 1;
4459 all_brackets[index].color_index = Some(depth_index);
4460 open_stack.pop();
4461 }
4462 }
4463
4464 all_brackets.sort_by_key(|bracket_match| {
4465 (bracket_match.open_range.start, bracket_match.open_range.end)
4466 });
4467
4468 if let empty_slot @ None =
4469 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4470 {
4471 *empty_slot = Some(all_brackets.clone());
4472 }
4473 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4474 }
4475
4476 all_bracket_matches
4477 }
4478
4479 pub fn all_bracket_ranges(
4480 &self,
4481 range: Range<usize>,
4482 ) -> impl Iterator<Item = BracketMatch<usize>> {
4483 self.fetch_bracket_ranges(range.clone(), None)
4484 .into_values()
4485 .flatten()
4486 .filter(move |bracket_match| {
4487 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4488 bracket_range.overlaps(&range)
4489 })
4490 }
4491
4492 /// Returns bracket range pairs overlapping or adjacent to `range`
4493 pub fn bracket_ranges<T: ToOffset>(
4494 &self,
4495 range: Range<T>,
4496 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4497 // Find bracket pairs that *inclusively* contain the given range.
4498 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4499 self.all_bracket_ranges(range)
4500 .filter(|pair| !pair.newline_only)
4501 }
4502
4503 pub fn debug_variables_query<T: ToOffset>(
4504 &self,
4505 range: Range<T>,
4506 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4507 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4508
4509 let mut matches = self.syntax.matches_with_options(
4510 range.clone(),
4511 &self.text,
4512 TreeSitterOptions::default(),
4513 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4514 );
4515
4516 let configs = matches
4517 .grammars()
4518 .iter()
4519 .map(|grammar| grammar.debug_variables_config.as_ref())
4520 .collect::<Vec<_>>();
4521
4522 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4523
4524 iter::from_fn(move || {
4525 loop {
4526 while let Some(capture) = captures.pop() {
4527 if capture.0.overlaps(&range) {
4528 return Some(capture);
4529 }
4530 }
4531
4532 let mat = matches.peek()?;
4533
4534 let Some(config) = configs[mat.grammar_index].as_ref() else {
4535 matches.advance();
4536 continue;
4537 };
4538
4539 for capture in mat.captures {
4540 let Some(ix) = config
4541 .objects_by_capture_ix
4542 .binary_search_by_key(&capture.index, |e| e.0)
4543 .ok()
4544 else {
4545 continue;
4546 };
4547 let text_object = config.objects_by_capture_ix[ix].1;
4548 let byte_range = capture.node.byte_range();
4549
4550 let mut found = false;
4551 for (range, existing) in captures.iter_mut() {
4552 if existing == &text_object {
4553 range.start = range.start.min(byte_range.start);
4554 range.end = range.end.max(byte_range.end);
4555 found = true;
4556 break;
4557 }
4558 }
4559
4560 if !found {
4561 captures.push((byte_range, text_object));
4562 }
4563 }
4564
4565 matches.advance();
4566 }
4567 })
4568 }
4569
4570 pub fn text_object_ranges<T: ToOffset>(
4571 &self,
4572 range: Range<T>,
4573 options: TreeSitterOptions,
4574 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4575 let range =
4576 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4577
4578 let mut matches =
4579 self.syntax
4580 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4581 grammar.text_object_config.as_ref().map(|c| &c.query)
4582 });
4583
4584 let configs = matches
4585 .grammars()
4586 .iter()
4587 .map(|grammar| grammar.text_object_config.as_ref())
4588 .collect::<Vec<_>>();
4589
4590 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4591
4592 iter::from_fn(move || {
4593 loop {
4594 while let Some(capture) = captures.pop() {
4595 if capture.0.overlaps(&range) {
4596 return Some(capture);
4597 }
4598 }
4599
4600 let mat = matches.peek()?;
4601
4602 let Some(config) = configs[mat.grammar_index].as_ref() else {
4603 matches.advance();
4604 continue;
4605 };
4606
4607 for capture in mat.captures {
4608 let Some(ix) = config
4609 .text_objects_by_capture_ix
4610 .binary_search_by_key(&capture.index, |e| e.0)
4611 .ok()
4612 else {
4613 continue;
4614 };
4615 let text_object = config.text_objects_by_capture_ix[ix].1;
4616 let byte_range = capture.node.byte_range();
4617
4618 let mut found = false;
4619 for (range, existing) in captures.iter_mut() {
4620 if existing == &text_object {
4621 range.start = range.start.min(byte_range.start);
4622 range.end = range.end.max(byte_range.end);
4623 found = true;
4624 break;
4625 }
4626 }
4627
4628 if !found {
4629 captures.push((byte_range, text_object));
4630 }
4631 }
4632
4633 matches.advance();
4634 }
4635 })
4636 }
4637
4638 /// Returns enclosing bracket ranges containing the given range
4639 pub fn enclosing_bracket_ranges<T: ToOffset>(
4640 &self,
4641 range: Range<T>,
4642 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4643 let range = range.start.to_offset(self)..range.end.to_offset(self);
4644
4645 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4646 let max_depth = result
4647 .iter()
4648 .map(|mat| mat.syntax_layer_depth)
4649 .max()
4650 .unwrap_or(0);
4651 result.into_iter().filter(move |pair| {
4652 pair.open_range.start <= range.start
4653 && pair.close_range.end >= range.end
4654 && pair.syntax_layer_depth == max_depth
4655 })
4656 }
4657
4658 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4659 ///
4660 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4661 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4662 &self,
4663 range: Range<T>,
4664 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4665 ) -> Option<(Range<usize>, Range<usize>)> {
4666 let range = range.start.to_offset(self)..range.end.to_offset(self);
4667
4668 // Get the ranges of the innermost pair of brackets.
4669 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4670
4671 for pair in self.enclosing_bracket_ranges(range) {
4672 if let Some(range_filter) = range_filter
4673 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4674 {
4675 continue;
4676 }
4677
4678 let len = pair.close_range.end - pair.open_range.start;
4679
4680 if let Some((existing_open, existing_close)) = &result {
4681 let existing_len = existing_close.end - existing_open.start;
4682 if len > existing_len {
4683 continue;
4684 }
4685 }
4686
4687 result = Some((pair.open_range, pair.close_range));
4688 }
4689
4690 result
4691 }
4692
4693 /// Returns anchor ranges for any matches of the redaction query.
4694 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4695 /// will be run on the relevant section of the buffer.
4696 pub fn redacted_ranges<T: ToOffset>(
4697 &self,
4698 range: Range<T>,
4699 ) -> impl Iterator<Item = Range<usize>> + '_ {
4700 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4701 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4702 grammar
4703 .redactions_config
4704 .as_ref()
4705 .map(|config| &config.query)
4706 });
4707
4708 let configs = syntax_matches
4709 .grammars()
4710 .iter()
4711 .map(|grammar| grammar.redactions_config.as_ref())
4712 .collect::<Vec<_>>();
4713
4714 iter::from_fn(move || {
4715 let redacted_range = syntax_matches
4716 .peek()
4717 .and_then(|mat| {
4718 configs[mat.grammar_index].and_then(|config| {
4719 mat.captures
4720 .iter()
4721 .find(|capture| capture.index == config.redaction_capture_ix)
4722 })
4723 })
4724 .map(|mat| mat.node.byte_range());
4725 syntax_matches.advance();
4726 redacted_range
4727 })
4728 }
4729
4730 pub fn injections_intersecting_range<T: ToOffset>(
4731 &self,
4732 range: Range<T>,
4733 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4734 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4735
4736 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4737 grammar
4738 .injection_config
4739 .as_ref()
4740 .map(|config| &config.query)
4741 });
4742
4743 let configs = syntax_matches
4744 .grammars()
4745 .iter()
4746 .map(|grammar| grammar.injection_config.as_ref())
4747 .collect::<Vec<_>>();
4748
4749 iter::from_fn(move || {
4750 let ranges = syntax_matches.peek().and_then(|mat| {
4751 let config = &configs[mat.grammar_index]?;
4752 let content_capture_range = mat.captures.iter().find_map(|capture| {
4753 if capture.index == config.content_capture_ix {
4754 Some(capture.node.byte_range())
4755 } else {
4756 None
4757 }
4758 })?;
4759 let language = self.language_at(content_capture_range.start)?;
4760 Some((content_capture_range, language))
4761 });
4762 syntax_matches.advance();
4763 ranges
4764 })
4765 }
4766
4767 pub fn runnable_ranges(
4768 &self,
4769 offset_range: Range<usize>,
4770 ) -> impl Iterator<Item = RunnableRange> + '_ {
4771 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4772 grammar.runnable_config.as_ref().map(|config| &config.query)
4773 });
4774
4775 let test_configs = syntax_matches
4776 .grammars()
4777 .iter()
4778 .map(|grammar| grammar.runnable_config.as_ref())
4779 .collect::<Vec<_>>();
4780
4781 iter::from_fn(move || {
4782 loop {
4783 let mat = syntax_matches.peek()?;
4784
4785 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4786 let mut run_range = None;
4787 let full_range = mat.captures.iter().fold(
4788 Range {
4789 start: usize::MAX,
4790 end: 0,
4791 },
4792 |mut acc, next| {
4793 let byte_range = next.node.byte_range();
4794 if acc.start > byte_range.start {
4795 acc.start = byte_range.start;
4796 }
4797 if acc.end < byte_range.end {
4798 acc.end = byte_range.end;
4799 }
4800 acc
4801 },
4802 );
4803 if full_range.start > full_range.end {
4804 // We did not find a full spanning range of this match.
4805 return None;
4806 }
4807 let extra_captures: SmallVec<[_; 1]> =
4808 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4809 test_configs
4810 .extra_captures
4811 .get(capture.index as usize)
4812 .cloned()
4813 .and_then(|tag_name| match tag_name {
4814 RunnableCapture::Named(name) => {
4815 Some((capture.node.byte_range(), name))
4816 }
4817 RunnableCapture::Run => {
4818 let _ = run_range.insert(capture.node.byte_range());
4819 None
4820 }
4821 })
4822 }));
4823 let run_range = run_range?;
4824 let tags = test_configs
4825 .query
4826 .property_settings(mat.pattern_index)
4827 .iter()
4828 .filter_map(|property| {
4829 if *property.key == *"tag" {
4830 property
4831 .value
4832 .as_ref()
4833 .map(|value| RunnableTag(value.to_string().into()))
4834 } else {
4835 None
4836 }
4837 })
4838 .collect();
4839 let extra_captures = extra_captures
4840 .into_iter()
4841 .map(|(range, name)| {
4842 (
4843 name.to_string(),
4844 self.text_for_range(range).collect::<String>(),
4845 )
4846 })
4847 .collect();
4848 // All tags should have the same range.
4849 Some(RunnableRange {
4850 run_range,
4851 full_range,
4852 runnable: Runnable {
4853 tags,
4854 language: mat.language,
4855 buffer: self.remote_id(),
4856 },
4857 extra_captures,
4858 buffer_id: self.remote_id(),
4859 })
4860 });
4861
4862 syntax_matches.advance();
4863 if test_range.is_some() {
4864 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4865 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4866 return test_range;
4867 }
4868 }
4869 })
4870 }
4871
4872 /// Returns selections for remote peers intersecting the given range.
4873 #[allow(clippy::type_complexity)]
4874 pub fn selections_in_range(
4875 &self,
4876 range: Range<Anchor>,
4877 include_local: bool,
4878 ) -> impl Iterator<
4879 Item = (
4880 ReplicaId,
4881 bool,
4882 CursorShape,
4883 impl Iterator<Item = &Selection<Anchor>> + '_,
4884 ),
4885 > + '_ {
4886 self.remote_selections
4887 .iter()
4888 .filter(move |(replica_id, set)| {
4889 (include_local || **replica_id != self.text.replica_id())
4890 && !set.selections.is_empty()
4891 })
4892 .map(move |(replica_id, set)| {
4893 let start_ix = match set.selections.binary_search_by(|probe| {
4894 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4895 }) {
4896 Ok(ix) | Err(ix) => ix,
4897 };
4898 let end_ix = match set.selections.binary_search_by(|probe| {
4899 probe.start.cmp(&range.end, self).then(Ordering::Less)
4900 }) {
4901 Ok(ix) | Err(ix) => ix,
4902 };
4903
4904 (
4905 *replica_id,
4906 set.line_mode,
4907 set.cursor_shape,
4908 set.selections[start_ix..end_ix].iter(),
4909 )
4910 })
4911 }
4912
4913 /// Returns if the buffer contains any diagnostics.
4914 pub fn has_diagnostics(&self) -> bool {
4915 !self.diagnostics.is_empty()
4916 }
4917
4918 /// Returns all the diagnostics intersecting the given range.
4919 pub fn diagnostics_in_range<'a, T, O>(
4920 &'a self,
4921 search_range: Range<T>,
4922 reversed: bool,
4923 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4924 where
4925 T: 'a + Clone + ToOffset,
4926 O: 'a + FromAnchor,
4927 {
4928 let mut iterators: Vec<_> = self
4929 .diagnostics
4930 .iter()
4931 .map(|(_, collection)| {
4932 collection
4933 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4934 .peekable()
4935 })
4936 .collect();
4937
4938 std::iter::from_fn(move || {
4939 let (next_ix, _) = iterators
4940 .iter_mut()
4941 .enumerate()
4942 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4943 .min_by(|(_, a), (_, b)| {
4944 let cmp = a
4945 .range
4946 .start
4947 .cmp(&b.range.start, self)
4948 // when range is equal, sort by diagnostic severity
4949 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4950 // and stabilize order with group_id
4951 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4952 if reversed { cmp.reverse() } else { cmp }
4953 })?;
4954 iterators[next_ix]
4955 .next()
4956 .map(
4957 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4958 diagnostic,
4959 range: FromAnchor::from_anchor(&range.start, self)
4960 ..FromAnchor::from_anchor(&range.end, self),
4961 },
4962 )
4963 })
4964 }
4965
4966 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4967 /// should be used instead.
4968 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4969 &self.diagnostics
4970 }
4971
4972 /// Returns all the diagnostic groups associated with the given
4973 /// language server ID. If no language server ID is provided,
4974 /// all diagnostics groups are returned.
4975 pub fn diagnostic_groups(
4976 &self,
4977 language_server_id: Option<LanguageServerId>,
4978 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4979 let mut groups = Vec::new();
4980
4981 if let Some(language_server_id) = language_server_id {
4982 if let Ok(ix) = self
4983 .diagnostics
4984 .binary_search_by_key(&language_server_id, |e| e.0)
4985 {
4986 self.diagnostics[ix]
4987 .1
4988 .groups(language_server_id, &mut groups, self);
4989 }
4990 } else {
4991 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4992 diagnostics.groups(*language_server_id, &mut groups, self);
4993 }
4994 }
4995
4996 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4997 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4998 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4999 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5000 });
5001
5002 groups
5003 }
5004
5005 /// Returns an iterator over the diagnostics for the given group.
5006 pub fn diagnostic_group<O>(
5007 &self,
5008 group_id: usize,
5009 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5010 where
5011 O: FromAnchor + 'static,
5012 {
5013 self.diagnostics
5014 .iter()
5015 .flat_map(move |(_, set)| set.group(group_id, self))
5016 }
5017
5018 /// An integer version number that accounts for all updates besides
5019 /// the buffer's text itself (which is versioned via a version vector).
5020 pub fn non_text_state_update_count(&self) -> usize {
5021 self.non_text_state_update_count
5022 }
5023
5024 /// An integer version that changes when the buffer's syntax changes.
5025 pub fn syntax_update_count(&self) -> usize {
5026 self.syntax.update_count()
5027 }
5028
5029 /// Returns a snapshot of underlying file.
5030 pub fn file(&self) -> Option<&Arc<dyn File>> {
5031 self.file.as_ref()
5032 }
5033
5034 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5035 if let Some(file) = self.file() {
5036 if file.path().file_name().is_none() || include_root {
5037 Some(file.full_path(cx).to_string_lossy().into_owned())
5038 } else {
5039 Some(file.path().display(file.path_style(cx)).to_string())
5040 }
5041 } else {
5042 None
5043 }
5044 }
5045
5046 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5047 let query_str = query.fuzzy_contents;
5048 if query_str.is_some_and(|query| query.is_empty()) {
5049 return BTreeMap::default();
5050 }
5051
5052 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5053 language,
5054 override_id: None,
5055 }));
5056
5057 let mut query_ix = 0;
5058 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5059 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5060
5061 let mut words = BTreeMap::default();
5062 let mut current_word_start_ix = None;
5063 let mut chunk_ix = query.range.start;
5064 for chunk in self.chunks(query.range, false) {
5065 for (i, c) in chunk.text.char_indices() {
5066 let ix = chunk_ix + i;
5067 if classifier.is_word(c) {
5068 if current_word_start_ix.is_none() {
5069 current_word_start_ix = Some(ix);
5070 }
5071
5072 if let Some(query_chars) = &query_chars
5073 && query_ix < query_len
5074 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5075 {
5076 query_ix += 1;
5077 }
5078 continue;
5079 } else if let Some(word_start) = current_word_start_ix.take()
5080 && query_ix == query_len
5081 {
5082 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5083 let mut word_text = self.text_for_range(word_start..ix).peekable();
5084 let first_char = word_text
5085 .peek()
5086 .and_then(|first_chunk| first_chunk.chars().next());
5087 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5088 if !query.skip_digits
5089 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5090 {
5091 words.insert(word_text.collect(), word_range);
5092 }
5093 }
5094 query_ix = 0;
5095 }
5096 chunk_ix += chunk.text.len();
5097 }
5098
5099 words
5100 }
5101}
5102
5103pub struct WordsQuery<'a> {
5104 /// Only returns words with all chars from the fuzzy string in them.
5105 pub fuzzy_contents: Option<&'a str>,
5106 /// Skips words that start with a digit.
5107 pub skip_digits: bool,
5108 /// Buffer offset range, to look for words.
5109 pub range: Range<usize>,
5110}
5111
5112fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5113 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5114}
5115
5116fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5117 let mut result = IndentSize::spaces(0);
5118 for c in text {
5119 let kind = match c {
5120 ' ' => IndentKind::Space,
5121 '\t' => IndentKind::Tab,
5122 _ => break,
5123 };
5124 if result.len == 0 {
5125 result.kind = kind;
5126 }
5127 result.len += 1;
5128 }
5129 result
5130}
5131
5132impl Clone for BufferSnapshot {
5133 fn clone(&self) -> Self {
5134 Self {
5135 text: self.text.clone(),
5136 syntax: self.syntax.clone(),
5137 file: self.file.clone(),
5138 remote_selections: self.remote_selections.clone(),
5139 diagnostics: self.diagnostics.clone(),
5140 language: self.language.clone(),
5141 tree_sitter_data: self.tree_sitter_data.clone(),
5142 non_text_state_update_count: self.non_text_state_update_count,
5143 }
5144 }
5145}
5146
5147impl Deref for BufferSnapshot {
5148 type Target = text::BufferSnapshot;
5149
5150 fn deref(&self) -> &Self::Target {
5151 &self.text
5152 }
5153}
5154
5155unsafe impl Send for BufferChunks<'_> {}
5156
5157impl<'a> BufferChunks<'a> {
5158 pub(crate) fn new(
5159 text: &'a Rope,
5160 range: Range<usize>,
5161 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5162 diagnostics: bool,
5163 buffer_snapshot: Option<&'a BufferSnapshot>,
5164 ) -> Self {
5165 let mut highlights = None;
5166 if let Some((captures, highlight_maps)) = syntax {
5167 highlights = Some(BufferChunkHighlights {
5168 captures,
5169 next_capture: None,
5170 stack: Default::default(),
5171 highlight_maps,
5172 })
5173 }
5174
5175 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5176 let chunks = text.chunks_in_range(range.clone());
5177
5178 let mut this = BufferChunks {
5179 range,
5180 buffer_snapshot,
5181 chunks,
5182 diagnostic_endpoints,
5183 error_depth: 0,
5184 warning_depth: 0,
5185 information_depth: 0,
5186 hint_depth: 0,
5187 unnecessary_depth: 0,
5188 underline: true,
5189 highlights,
5190 };
5191 this.initialize_diagnostic_endpoints();
5192 this
5193 }
5194
5195 /// Seeks to the given byte offset in the buffer.
5196 pub fn seek(&mut self, range: Range<usize>) {
5197 let old_range = std::mem::replace(&mut self.range, range.clone());
5198 self.chunks.set_range(self.range.clone());
5199 if let Some(highlights) = self.highlights.as_mut() {
5200 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5201 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5202 highlights
5203 .stack
5204 .retain(|(end_offset, _)| *end_offset > range.start);
5205 if let Some(capture) = &highlights.next_capture
5206 && range.start >= capture.node.start_byte()
5207 {
5208 let next_capture_end = capture.node.end_byte();
5209 if range.start < next_capture_end {
5210 highlights.stack.push((
5211 next_capture_end,
5212 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5213 ));
5214 }
5215 highlights.next_capture.take();
5216 }
5217 } else if let Some(snapshot) = self.buffer_snapshot {
5218 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5219 *highlights = BufferChunkHighlights {
5220 captures,
5221 next_capture: None,
5222 stack: Default::default(),
5223 highlight_maps,
5224 };
5225 } else {
5226 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5227 // Seeking such BufferChunks is not supported.
5228 debug_assert!(
5229 false,
5230 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5231 );
5232 }
5233
5234 highlights.captures.set_byte_range(self.range.clone());
5235 self.initialize_diagnostic_endpoints();
5236 }
5237 }
5238
5239 fn initialize_diagnostic_endpoints(&mut self) {
5240 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5241 && let Some(buffer) = self.buffer_snapshot
5242 {
5243 let mut diagnostic_endpoints = Vec::new();
5244 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5245 diagnostic_endpoints.push(DiagnosticEndpoint {
5246 offset: entry.range.start,
5247 is_start: true,
5248 severity: entry.diagnostic.severity,
5249 is_unnecessary: entry.diagnostic.is_unnecessary,
5250 underline: entry.diagnostic.underline,
5251 });
5252 diagnostic_endpoints.push(DiagnosticEndpoint {
5253 offset: entry.range.end,
5254 is_start: false,
5255 severity: entry.diagnostic.severity,
5256 is_unnecessary: entry.diagnostic.is_unnecessary,
5257 underline: entry.diagnostic.underline,
5258 });
5259 }
5260 diagnostic_endpoints
5261 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5262 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5263 self.hint_depth = 0;
5264 self.error_depth = 0;
5265 self.warning_depth = 0;
5266 self.information_depth = 0;
5267 }
5268 }
5269
5270 /// The current byte offset in the buffer.
5271 pub fn offset(&self) -> usize {
5272 self.range.start
5273 }
5274
5275 pub fn range(&self) -> Range<usize> {
5276 self.range.clone()
5277 }
5278
5279 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5280 let depth = match endpoint.severity {
5281 DiagnosticSeverity::ERROR => &mut self.error_depth,
5282 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5283 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5284 DiagnosticSeverity::HINT => &mut self.hint_depth,
5285 _ => return,
5286 };
5287 if endpoint.is_start {
5288 *depth += 1;
5289 } else {
5290 *depth -= 1;
5291 }
5292
5293 if endpoint.is_unnecessary {
5294 if endpoint.is_start {
5295 self.unnecessary_depth += 1;
5296 } else {
5297 self.unnecessary_depth -= 1;
5298 }
5299 }
5300 }
5301
5302 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5303 if self.error_depth > 0 {
5304 Some(DiagnosticSeverity::ERROR)
5305 } else if self.warning_depth > 0 {
5306 Some(DiagnosticSeverity::WARNING)
5307 } else if self.information_depth > 0 {
5308 Some(DiagnosticSeverity::INFORMATION)
5309 } else if self.hint_depth > 0 {
5310 Some(DiagnosticSeverity::HINT)
5311 } else {
5312 None
5313 }
5314 }
5315
5316 fn current_code_is_unnecessary(&self) -> bool {
5317 self.unnecessary_depth > 0
5318 }
5319}
5320
5321impl<'a> Iterator for BufferChunks<'a> {
5322 type Item = Chunk<'a>;
5323
5324 fn next(&mut self) -> Option<Self::Item> {
5325 let mut next_capture_start = usize::MAX;
5326 let mut next_diagnostic_endpoint = usize::MAX;
5327
5328 if let Some(highlights) = self.highlights.as_mut() {
5329 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5330 if *parent_capture_end <= self.range.start {
5331 highlights.stack.pop();
5332 } else {
5333 break;
5334 }
5335 }
5336
5337 if highlights.next_capture.is_none() {
5338 highlights.next_capture = highlights.captures.next();
5339 }
5340
5341 while let Some(capture) = highlights.next_capture.as_ref() {
5342 if self.range.start < capture.node.start_byte() {
5343 next_capture_start = capture.node.start_byte();
5344 break;
5345 } else {
5346 let highlight_id =
5347 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5348 highlights
5349 .stack
5350 .push((capture.node.end_byte(), highlight_id));
5351 highlights.next_capture = highlights.captures.next();
5352 }
5353 }
5354 }
5355
5356 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5357 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5358 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5359 if endpoint.offset <= self.range.start {
5360 self.update_diagnostic_depths(endpoint);
5361 diagnostic_endpoints.next();
5362 self.underline = endpoint.underline;
5363 } else {
5364 next_diagnostic_endpoint = endpoint.offset;
5365 break;
5366 }
5367 }
5368 }
5369 self.diagnostic_endpoints = diagnostic_endpoints;
5370
5371 if let Some(ChunkBitmaps {
5372 text: chunk,
5373 chars: chars_map,
5374 tabs,
5375 }) = self.chunks.peek_with_bitmaps()
5376 {
5377 let chunk_start = self.range.start;
5378 let mut chunk_end = (self.chunks.offset() + chunk.len())
5379 .min(next_capture_start)
5380 .min(next_diagnostic_endpoint);
5381 let mut highlight_id = None;
5382 if let Some(highlights) = self.highlights.as_ref()
5383 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5384 {
5385 chunk_end = chunk_end.min(*parent_capture_end);
5386 highlight_id = Some(*parent_highlight_id);
5387 }
5388 let bit_start = chunk_start - self.chunks.offset();
5389 let bit_end = chunk_end - self.chunks.offset();
5390
5391 let slice = &chunk[bit_start..bit_end];
5392
5393 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5394 let tabs = (tabs >> bit_start) & mask;
5395 let chars = (chars_map >> bit_start) & mask;
5396
5397 self.range.start = chunk_end;
5398 if self.range.start == self.chunks.offset() + chunk.len() {
5399 self.chunks.next().unwrap();
5400 }
5401
5402 Some(Chunk {
5403 text: slice,
5404 syntax_highlight_id: highlight_id,
5405 underline: self.underline,
5406 diagnostic_severity: self.current_diagnostic_severity(),
5407 is_unnecessary: self.current_code_is_unnecessary(),
5408 tabs,
5409 chars,
5410 ..Chunk::default()
5411 })
5412 } else {
5413 None
5414 }
5415 }
5416}
5417
5418impl operation_queue::Operation for Operation {
5419 fn lamport_timestamp(&self) -> clock::Lamport {
5420 match self {
5421 Operation::Buffer(_) => {
5422 unreachable!("buffer operations should never be deferred at this layer")
5423 }
5424 Operation::UpdateDiagnostics {
5425 lamport_timestamp, ..
5426 }
5427 | Operation::UpdateSelections {
5428 lamport_timestamp, ..
5429 }
5430 | Operation::UpdateCompletionTriggers {
5431 lamport_timestamp, ..
5432 }
5433 | Operation::UpdateLineEnding {
5434 lamport_timestamp, ..
5435 } => *lamport_timestamp,
5436 }
5437 }
5438}
5439
5440impl Default for Diagnostic {
5441 fn default() -> Self {
5442 Self {
5443 source: Default::default(),
5444 source_kind: DiagnosticSourceKind::Other,
5445 code: None,
5446 code_description: None,
5447 severity: DiagnosticSeverity::ERROR,
5448 message: Default::default(),
5449 markdown: None,
5450 group_id: 0,
5451 is_primary: false,
5452 is_disk_based: false,
5453 is_unnecessary: false,
5454 underline: true,
5455 data: None,
5456 registration_id: None,
5457 }
5458 }
5459}
5460
5461impl IndentSize {
5462 /// Returns an [`IndentSize`] representing the given spaces.
5463 pub fn spaces(len: u32) -> Self {
5464 Self {
5465 len,
5466 kind: IndentKind::Space,
5467 }
5468 }
5469
5470 /// Returns an [`IndentSize`] representing a tab.
5471 pub fn tab() -> Self {
5472 Self {
5473 len: 1,
5474 kind: IndentKind::Tab,
5475 }
5476 }
5477
5478 /// An iterator over the characters represented by this [`IndentSize`].
5479 pub fn chars(&self) -> impl Iterator<Item = char> {
5480 iter::repeat(self.char()).take(self.len as usize)
5481 }
5482
5483 /// The character representation of this [`IndentSize`].
5484 pub fn char(&self) -> char {
5485 match self.kind {
5486 IndentKind::Space => ' ',
5487 IndentKind::Tab => '\t',
5488 }
5489 }
5490
5491 /// Consumes the current [`IndentSize`] and returns a new one that has
5492 /// been shrunk or enlarged by the given size along the given direction.
5493 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5494 match direction {
5495 Ordering::Less => {
5496 if self.kind == size.kind && self.len >= size.len {
5497 self.len -= size.len;
5498 }
5499 }
5500 Ordering::Equal => {}
5501 Ordering::Greater => {
5502 if self.len == 0 {
5503 self = size;
5504 } else if self.kind == size.kind {
5505 self.len += size.len;
5506 }
5507 }
5508 }
5509 self
5510 }
5511
5512 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5513 match self.kind {
5514 IndentKind::Space => self.len as usize,
5515 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5516 }
5517 }
5518}
5519
5520#[cfg(any(test, feature = "test-support"))]
5521pub struct TestFile {
5522 pub path: Arc<RelPath>,
5523 pub root_name: String,
5524 pub local_root: Option<PathBuf>,
5525}
5526
5527#[cfg(any(test, feature = "test-support"))]
5528impl File for TestFile {
5529 fn path(&self) -> &Arc<RelPath> {
5530 &self.path
5531 }
5532
5533 fn full_path(&self, _: &gpui::App) -> PathBuf {
5534 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5535 }
5536
5537 fn as_local(&self) -> Option<&dyn LocalFile> {
5538 if self.local_root.is_some() {
5539 Some(self)
5540 } else {
5541 None
5542 }
5543 }
5544
5545 fn disk_state(&self) -> DiskState {
5546 unimplemented!()
5547 }
5548
5549 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5550 self.path().file_name().unwrap_or(self.root_name.as_ref())
5551 }
5552
5553 fn worktree_id(&self, _: &App) -> WorktreeId {
5554 WorktreeId::from_usize(0)
5555 }
5556
5557 fn to_proto(&self, _: &App) -> rpc::proto::File {
5558 unimplemented!()
5559 }
5560
5561 fn is_private(&self) -> bool {
5562 false
5563 }
5564
5565 fn path_style(&self, _cx: &App) -> PathStyle {
5566 PathStyle::local()
5567 }
5568}
5569
5570#[cfg(any(test, feature = "test-support"))]
5571impl LocalFile for TestFile {
5572 fn abs_path(&self, _cx: &App) -> PathBuf {
5573 PathBuf::from(self.local_root.as_ref().unwrap())
5574 .join(&self.root_name)
5575 .join(self.path.as_std_path())
5576 }
5577
5578 fn load(&self, _cx: &App) -> Task<Result<String>> {
5579 unimplemented!()
5580 }
5581
5582 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5583 unimplemented!()
5584 }
5585}
5586
5587pub(crate) fn contiguous_ranges(
5588 values: impl Iterator<Item = u32>,
5589 max_len: usize,
5590) -> impl Iterator<Item = Range<u32>> {
5591 let mut values = values;
5592 let mut current_range: Option<Range<u32>> = None;
5593 std::iter::from_fn(move || {
5594 loop {
5595 if let Some(value) = values.next() {
5596 if let Some(range) = &mut current_range
5597 && value == range.end
5598 && range.len() < max_len
5599 {
5600 range.end += 1;
5601 continue;
5602 }
5603
5604 let prev_range = current_range.clone();
5605 current_range = Some(value..(value + 1));
5606 if prev_range.is_some() {
5607 return prev_range;
5608 }
5609 } else {
5610 return current_range.take();
5611 }
5612 }
5613 })
5614}
5615
5616#[derive(Default, Debug)]
5617pub struct CharClassifier {
5618 scope: Option<LanguageScope>,
5619 scope_context: Option<CharScopeContext>,
5620 ignore_punctuation: bool,
5621}
5622
5623impl CharClassifier {
5624 pub fn new(scope: Option<LanguageScope>) -> Self {
5625 Self {
5626 scope,
5627 scope_context: None,
5628 ignore_punctuation: false,
5629 }
5630 }
5631
5632 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5633 Self {
5634 scope_context,
5635 ..self
5636 }
5637 }
5638
5639 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5640 Self {
5641 ignore_punctuation,
5642 ..self
5643 }
5644 }
5645
5646 pub fn is_whitespace(&self, c: char) -> bool {
5647 self.kind(c) == CharKind::Whitespace
5648 }
5649
5650 pub fn is_word(&self, c: char) -> bool {
5651 self.kind(c) == CharKind::Word
5652 }
5653
5654 pub fn is_punctuation(&self, c: char) -> bool {
5655 self.kind(c) == CharKind::Punctuation
5656 }
5657
5658 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5659 if c.is_alphanumeric() || c == '_' {
5660 return CharKind::Word;
5661 }
5662
5663 if let Some(scope) = &self.scope {
5664 let characters = match self.scope_context {
5665 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5666 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5667 None => scope.word_characters(),
5668 };
5669 if let Some(characters) = characters
5670 && characters.contains(&c)
5671 {
5672 return CharKind::Word;
5673 }
5674 }
5675
5676 if c.is_whitespace() {
5677 return CharKind::Whitespace;
5678 }
5679
5680 if ignore_punctuation {
5681 CharKind::Word
5682 } else {
5683 CharKind::Punctuation
5684 }
5685 }
5686
5687 pub fn kind(&self, c: char) -> CharKind {
5688 self.kind_with(c, self.ignore_punctuation)
5689 }
5690}
5691
5692/// Find all of the ranges of whitespace that occur at the ends of lines
5693/// in the given rope.
5694///
5695/// This could also be done with a regex search, but this implementation
5696/// avoids copying text.
5697pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5698 let mut ranges = Vec::new();
5699
5700 let mut offset = 0;
5701 let mut prev_chunk_trailing_whitespace_range = 0..0;
5702 for chunk in rope.chunks() {
5703 let mut prev_line_trailing_whitespace_range = 0..0;
5704 for (i, line) in chunk.split('\n').enumerate() {
5705 let line_end_offset = offset + line.len();
5706 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5707 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5708
5709 if i == 0 && trimmed_line_len == 0 {
5710 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5711 }
5712 if !prev_line_trailing_whitespace_range.is_empty() {
5713 ranges.push(prev_line_trailing_whitespace_range);
5714 }
5715
5716 offset = line_end_offset + 1;
5717 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5718 }
5719
5720 offset -= 1;
5721 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5722 }
5723
5724 if !prev_chunk_trailing_whitespace_range.is_empty() {
5725 ranges.push(prev_chunk_trailing_whitespace_range);
5726 }
5727
5728 ranges
5729}