1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::{FutureExt as _, channel::oneshot, select};
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430}
431
432impl DiskState {
433 /// Returns the file's last known modification time on disk.
434 pub fn mtime(self) -> Option<MTime> {
435 match self {
436 DiskState::New => None,
437 DiskState::Present { mtime } => Some(mtime),
438 DiskState::Deleted => None,
439 }
440 }
441
442 pub fn exists(&self) -> bool {
443 match self {
444 DiskState::New => false,
445 DiskState::Present { .. } => true,
446 DiskState::Deleted => false,
447 }
448 }
449}
450
451/// The file associated with a buffer, in the case where the file is on the local disk.
452pub trait LocalFile: File {
453 /// Returns the absolute path of this file
454 fn abs_path(&self, cx: &App) -> PathBuf;
455
456 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
457 fn load(&self, cx: &App) -> Task<Result<String>>;
458
459 /// Loads the file's contents from disk.
460 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
461}
462
463/// The auto-indent behavior associated with an editing operation.
464/// For some editing operations, each affected line of text has its
465/// indentation recomputed. For other operations, the entire block
466/// of edited text is adjusted uniformly.
467#[derive(Clone, Debug)]
468pub enum AutoindentMode {
469 /// Indent each line of inserted text.
470 EachLine,
471 /// Apply the same indentation adjustment to all of the lines
472 /// in a given insertion.
473 Block {
474 /// The original indentation column of the first line of each
475 /// insertion, if it has been copied.
476 ///
477 /// Knowing this makes it possible to preserve the relative indentation
478 /// of every line in the insertion from when it was copied.
479 ///
480 /// If the original indent column is `a`, and the first line of insertion
481 /// is then auto-indented to column `b`, then every other line of
482 /// the insertion will be auto-indented to column `b - a`
483 original_indent_columns: Vec<Option<u32>>,
484 },
485}
486
487#[derive(Clone)]
488struct AutoindentRequest {
489 before_edit: BufferSnapshot,
490 entries: Vec<AutoindentRequestEntry>,
491 is_block_mode: bool,
492 ignore_empty_lines: bool,
493}
494
495#[derive(Debug, Clone)]
496struct AutoindentRequestEntry {
497 /// A range of the buffer whose indentation should be adjusted.
498 range: Range<Anchor>,
499 /// Whether or not these lines should be considered brand new, for the
500 /// purpose of auto-indent. When text is not new, its indentation will
501 /// only be adjusted if the suggested indentation level has *changed*
502 /// since the edit was made.
503 first_line_is_new: bool,
504 indent_size: IndentSize,
505 original_indent_column: Option<u32>,
506}
507
508#[derive(Debug)]
509struct IndentSuggestion {
510 basis_row: u32,
511 delta: Ordering,
512 within_error: bool,
513}
514
515struct BufferChunkHighlights<'a> {
516 captures: SyntaxMapCaptures<'a>,
517 next_capture: Option<SyntaxMapCapture<'a>>,
518 stack: Vec<(usize, HighlightId)>,
519 highlight_maps: Vec<HighlightMap>,
520}
521
522/// An iterator that yields chunks of a buffer's text, along with their
523/// syntax highlights and diagnostic status.
524pub struct BufferChunks<'a> {
525 buffer_snapshot: Option<&'a BufferSnapshot>,
526 range: Range<usize>,
527 chunks: text::Chunks<'a>,
528 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
529 error_depth: usize,
530 warning_depth: usize,
531 information_depth: usize,
532 hint_depth: usize,
533 unnecessary_depth: usize,
534 underline: bool,
535 highlights: Option<BufferChunkHighlights<'a>>,
536}
537
538/// A chunk of a buffer's text, along with its syntax highlight and
539/// diagnostic status.
540#[derive(Clone, Debug, Default)]
541pub struct Chunk<'a> {
542 /// The text of the chunk.
543 pub text: &'a str,
544 /// The syntax highlighting style of the chunk.
545 pub syntax_highlight_id: Option<HighlightId>,
546 /// The highlight style that has been applied to this chunk in
547 /// the editor.
548 pub highlight_style: Option<HighlightStyle>,
549 /// The severity of diagnostic associated with this chunk, if any.
550 pub diagnostic_severity: Option<DiagnosticSeverity>,
551 /// A bitset of which characters are tabs in this string.
552 pub tabs: u128,
553 /// Bitmap of character indices in this chunk
554 pub chars: u128,
555 /// Whether this chunk of text is marked as unnecessary.
556 pub is_unnecessary: bool,
557 /// Whether this chunk of text was originally a tab character.
558 pub is_tab: bool,
559 /// Whether this chunk of text was originally an inlay.
560 pub is_inlay: bool,
561 /// Whether to underline the corresponding text range in the editor.
562 pub underline: bool,
563}
564
565/// A set of edits to a given version of a buffer, computed asynchronously.
566#[derive(Debug)]
567pub struct Diff {
568 pub base_version: clock::Global,
569 pub line_ending: LineEnding,
570 pub edits: Vec<(Range<usize>, Arc<str>)>,
571}
572
573#[derive(Debug, Clone, Copy)]
574pub(crate) struct DiagnosticEndpoint {
575 offset: usize,
576 is_start: bool,
577 underline: bool,
578 severity: DiagnosticSeverity,
579 is_unnecessary: bool,
580}
581
582/// A class of characters, used for characterizing a run of text.
583#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
584pub enum CharKind {
585 /// Whitespace.
586 Whitespace,
587 /// Punctuation.
588 Punctuation,
589 /// Word.
590 Word,
591}
592
593/// Context for character classification within a specific scope.
594#[derive(Copy, Clone, Eq, PartialEq, Debug)]
595pub enum CharScopeContext {
596 /// Character classification for completion queries.
597 ///
598 /// This context treats certain characters as word constituents that would
599 /// normally be considered punctuation, such as '-' in Tailwind classes
600 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
601 Completion,
602 /// Character classification for linked edits.
603 ///
604 /// This context handles characters that should be treated as part of
605 /// identifiers during linked editing operations, such as '.' in JSX
606 /// component names like `<Animated.View>`.
607 LinkedEdit,
608}
609
610/// A runnable is a set of data about a region that could be resolved into a task
611pub struct Runnable {
612 pub tags: SmallVec<[RunnableTag; 1]>,
613 pub language: Arc<Language>,
614 pub buffer: BufferId,
615}
616
617#[derive(Default, Clone, Debug)]
618pub struct HighlightedText {
619 pub text: SharedString,
620 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
621}
622
623#[derive(Default, Debug)]
624struct HighlightedTextBuilder {
625 pub text: String,
626 highlights: Vec<(Range<usize>, HighlightStyle)>,
627}
628
629impl HighlightedText {
630 pub fn from_buffer_range<T: ToOffset>(
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) -> Self {
637 let mut highlighted_text = HighlightedTextBuilder::default();
638 highlighted_text.add_text_from_buffer_range(
639 range,
640 snapshot,
641 syntax_snapshot,
642 override_style,
643 syntax_theme,
644 );
645 highlighted_text.build()
646 }
647
648 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
649 gpui::StyledText::new(self.text.clone())
650 .with_default_highlights(default_style, self.highlights.iter().cloned())
651 }
652
653 /// Returns the first line without leading whitespace unless highlighted
654 /// and a boolean indicating if there are more lines after
655 pub fn first_line_preview(self) -> (Self, bool) {
656 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
657 let first_line = &self.text[..newline_ix];
658
659 // Trim leading whitespace, unless an edit starts prior to it.
660 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
661 if let Some((first_highlight_range, _)) = self.highlights.first() {
662 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
663 }
664
665 let preview_text = &first_line[preview_start_ix..];
666 let preview_highlights = self
667 .highlights
668 .into_iter()
669 .skip_while(|(range, _)| range.end <= preview_start_ix)
670 .take_while(|(range, _)| range.start < newline_ix)
671 .filter_map(|(mut range, highlight)| {
672 range.start = range.start.saturating_sub(preview_start_ix);
673 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
674 if range.is_empty() {
675 None
676 } else {
677 Some((range, highlight))
678 }
679 });
680
681 let preview = Self {
682 text: SharedString::new(preview_text),
683 highlights: preview_highlights.collect(),
684 };
685
686 (preview, self.text.len() > newline_ix)
687 }
688}
689
690impl HighlightedTextBuilder {
691 pub fn build(self) -> HighlightedText {
692 HighlightedText {
693 text: self.text.into(),
694 highlights: self.highlights,
695 }
696 }
697
698 pub fn add_text_from_buffer_range<T: ToOffset>(
699 &mut self,
700 range: Range<T>,
701 snapshot: &text::BufferSnapshot,
702 syntax_snapshot: &SyntaxSnapshot,
703 override_style: Option<HighlightStyle>,
704 syntax_theme: &SyntaxTheme,
705 ) {
706 let range = range.to_offset(snapshot);
707 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
708 let start = self.text.len();
709 self.text.push_str(chunk.text);
710 let end = self.text.len();
711
712 if let Some(highlight_style) = chunk
713 .syntax_highlight_id
714 .and_then(|id| id.style(syntax_theme))
715 {
716 let highlight_style = override_style.map_or(highlight_style, |override_style| {
717 highlight_style.highlight(override_style)
718 });
719 self.highlights.push((start..end, highlight_style));
720 } else if let Some(override_style) = override_style {
721 self.highlights.push((start..end, override_style));
722 }
723 }
724 }
725
726 fn highlighted_chunks<'a>(
727 range: Range<usize>,
728 snapshot: &'a text::BufferSnapshot,
729 syntax_snapshot: &'a SyntaxSnapshot,
730 ) -> BufferChunks<'a> {
731 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
732 grammar
733 .highlights_config
734 .as_ref()
735 .map(|config| &config.query)
736 });
737
738 let highlight_maps = captures
739 .grammars()
740 .iter()
741 .map(|grammar| grammar.highlight_map())
742 .collect();
743
744 BufferChunks::new(
745 snapshot.as_rope(),
746 range,
747 Some((captures, highlight_maps)),
748 false,
749 None,
750 )
751 }
752}
753
754#[derive(Clone)]
755pub struct EditPreview {
756 old_snapshot: text::BufferSnapshot,
757 applied_edits_snapshot: text::BufferSnapshot,
758 syntax_snapshot: SyntaxSnapshot,
759}
760
761impl EditPreview {
762 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
763 let (first, _) = edits.first()?;
764 let (last, _) = edits.last()?;
765
766 let start = first.start.to_point(&self.old_snapshot);
767 let old_end = last.end.to_point(&self.old_snapshot);
768 let new_end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 let start = Point::new(start.row.saturating_sub(3), 0);
774 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
775 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
776
777 Some(unified_diff(
778 &self
779 .old_snapshot
780 .text_for_range(start..old_end)
781 .collect::<String>(),
782 &self
783 .applied_edits_snapshot
784 .text_for_range(start..new_end)
785 .collect::<String>(),
786 ))
787 }
788
789 pub fn highlight_edits(
790 &self,
791 current_snapshot: &BufferSnapshot,
792 edits: &[(Range<Anchor>, impl AsRef<str>)],
793 include_deletions: bool,
794 cx: &App,
795 ) -> HighlightedText {
796 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
797 return HighlightedText::default();
798 };
799
800 let mut highlighted_text = HighlightedTextBuilder::default();
801
802 let visible_range_in_preview_snapshot =
803 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
804 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
805
806 let insertion_highlight_style = HighlightStyle {
807 background_color: Some(cx.theme().status().created_background),
808 ..Default::default()
809 };
810 let deletion_highlight_style = HighlightStyle {
811 background_color: Some(cx.theme().status().deleted_background),
812 ..Default::default()
813 };
814 let syntax_theme = cx.theme().syntax();
815
816 for (range, edit_text) in edits {
817 let edit_new_end_in_preview_snapshot = range
818 .end
819 .bias_right(&self.old_snapshot)
820 .to_offset(&self.applied_edits_snapshot);
821 let edit_start_in_preview_snapshot =
822 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
823
824 let unchanged_range_in_preview_snapshot =
825 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
826 if !unchanged_range_in_preview_snapshot.is_empty() {
827 highlighted_text.add_text_from_buffer_range(
828 unchanged_range_in_preview_snapshot,
829 &self.applied_edits_snapshot,
830 &self.syntax_snapshot,
831 None,
832 syntax_theme,
833 );
834 }
835
836 let range_in_current_snapshot = range.to_offset(current_snapshot);
837 if include_deletions && !range_in_current_snapshot.is_empty() {
838 highlighted_text.add_text_from_buffer_range(
839 range_in_current_snapshot,
840 ¤t_snapshot.text,
841 ¤t_snapshot.syntax,
842 Some(deletion_highlight_style),
843 syntax_theme,
844 );
845 }
846
847 if !edit_text.as_ref().is_empty() {
848 highlighted_text.add_text_from_buffer_range(
849 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
850 &self.applied_edits_snapshot,
851 &self.syntax_snapshot,
852 Some(insertion_highlight_style),
853 syntax_theme,
854 );
855 }
856
857 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
858 }
859
860 highlighted_text.add_text_from_buffer_range(
861 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
862 &self.applied_edits_snapshot,
863 &self.syntax_snapshot,
864 None,
865 syntax_theme,
866 );
867
868 highlighted_text.build()
869 }
870
871 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
872 cx.new(|cx| {
873 let mut buffer = Buffer::local_normalized(
874 self.applied_edits_snapshot.as_rope().clone(),
875 self.applied_edits_snapshot.line_ending(),
876 cx,
877 );
878 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
879 buffer
880 })
881 }
882
883 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
884 let (first, _) = edits.first()?;
885 let (last, _) = edits.last()?;
886
887 let start = first
888 .start
889 .bias_left(&self.old_snapshot)
890 .to_point(&self.applied_edits_snapshot);
891 let end = last
892 .end
893 .bias_right(&self.old_snapshot)
894 .to_point(&self.applied_edits_snapshot);
895
896 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
897 let range = Point::new(start.row, 0)
898 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
899
900 Some(range)
901 }
902}
903
904#[derive(Clone, Debug, PartialEq, Eq)]
905pub struct BracketMatch<T> {
906 pub open_range: Range<T>,
907 pub close_range: Range<T>,
908 pub newline_only: bool,
909 pub syntax_layer_depth: usize,
910 pub color_index: Option<usize>,
911}
912
913impl<T> BracketMatch<T> {
914 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
915 (self.open_range, self.close_range)
916 }
917}
918
919impl Buffer {
920 /// Create a new buffer with the given base text.
921 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
922 Self::build(
923 TextBuffer::new(
924 ReplicaId::LOCAL,
925 cx.entity_id().as_non_zero_u64().into(),
926 base_text.into(),
927 ),
928 None,
929 Capability::ReadWrite,
930 )
931 }
932
933 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
934 pub fn local_normalized(
935 base_text_normalized: Rope,
936 line_ending: LineEnding,
937 cx: &Context<Self>,
938 ) -> Self {
939 Self::build(
940 TextBuffer::new_normalized(
941 ReplicaId::LOCAL,
942 cx.entity_id().as_non_zero_u64().into(),
943 line_ending,
944 base_text_normalized,
945 ),
946 None,
947 Capability::ReadWrite,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer.
952 pub fn remote(
953 remote_id: BufferId,
954 replica_id: ReplicaId,
955 capability: Capability,
956 base_text: impl Into<String>,
957 ) -> Self {
958 Self::build(
959 TextBuffer::new(replica_id, remote_id, base_text.into()),
960 None,
961 capability,
962 )
963 }
964
965 /// Create a new buffer that is a replica of a remote buffer, populating its
966 /// state from the given protobuf message.
967 pub fn from_proto(
968 replica_id: ReplicaId,
969 capability: Capability,
970 message: proto::BufferState,
971 file: Option<Arc<dyn File>>,
972 ) -> Result<Self> {
973 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
974 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
975 let mut this = Self::build(buffer, file, capability);
976 this.text.set_line_ending(proto::deserialize_line_ending(
977 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
978 ));
979 this.saved_version = proto::deserialize_version(&message.saved_version);
980 this.saved_mtime = message.saved_mtime.map(|time| time.into());
981 Ok(this)
982 }
983
984 /// Serialize the buffer's state to a protobuf message.
985 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
986 proto::BufferState {
987 id: self.remote_id().into(),
988 file: self.file.as_ref().map(|f| f.to_proto(cx)),
989 base_text: self.base_text().to_string(),
990 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
991 saved_version: proto::serialize_version(&self.saved_version),
992 saved_mtime: self.saved_mtime.map(|time| time.into()),
993 }
994 }
995
996 /// Serialize as protobufs all of the changes to the buffer since the given version.
997 pub fn serialize_ops(
998 &self,
999 since: Option<clock::Global>,
1000 cx: &App,
1001 ) -> Task<Vec<proto::Operation>> {
1002 let mut operations = Vec::new();
1003 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1004
1005 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1006 proto::serialize_operation(&Operation::UpdateSelections {
1007 selections: set.selections.clone(),
1008 lamport_timestamp: set.lamport_timestamp,
1009 line_mode: set.line_mode,
1010 cursor_shape: set.cursor_shape,
1011 })
1012 }));
1013
1014 for (server_id, diagnostics) in &self.diagnostics {
1015 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1016 lamport_timestamp: self.diagnostics_timestamp,
1017 server_id: *server_id,
1018 diagnostics: diagnostics.iter().cloned().collect(),
1019 }));
1020 }
1021
1022 for (server_id, completions) in &self.completion_triggers_per_language_server {
1023 operations.push(proto::serialize_operation(
1024 &Operation::UpdateCompletionTriggers {
1025 triggers: completions.iter().cloned().collect(),
1026 lamport_timestamp: self.completion_triggers_timestamp,
1027 server_id: *server_id,
1028 },
1029 ));
1030 }
1031
1032 let text_operations = self.text.operations().clone();
1033 cx.background_spawn(async move {
1034 let since = since.unwrap_or_default();
1035 operations.extend(
1036 text_operations
1037 .iter()
1038 .filter(|(_, op)| !since.observed(op.timestamp()))
1039 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1040 );
1041 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1042 operations
1043 })
1044 }
1045
1046 /// Assign a language to the buffer, returning the buffer.
1047 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1048 self.set_language_async(Some(language), cx);
1049 self
1050 }
1051
1052 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1053 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1054 self.set_language(Some(language), cx);
1055 self
1056 }
1057
1058 /// Returns the [`Capability`] of this buffer.
1059 pub fn capability(&self) -> Capability {
1060 self.capability
1061 }
1062
1063 /// Whether this buffer can only be read.
1064 pub fn read_only(&self) -> bool {
1065 self.capability == Capability::ReadOnly
1066 }
1067
1068 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1069 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1070 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1071 let snapshot = buffer.snapshot();
1072 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1073 let tree_sitter_data = TreeSitterData::new(snapshot);
1074 Self {
1075 saved_mtime,
1076 tree_sitter_data: Arc::new(tree_sitter_data),
1077 saved_version: buffer.version(),
1078 preview_version: buffer.version(),
1079 reload_task: None,
1080 transaction_depth: 0,
1081 was_dirty_before_starting_transaction: None,
1082 has_unsaved_edits: Cell::new((buffer.version(), false)),
1083 text: buffer,
1084 branch_state: None,
1085 file,
1086 capability,
1087 syntax_map,
1088 reparse: None,
1089 non_text_state_update_count: 0,
1090 sync_parse_timeout: Duration::from_millis(1),
1091 parse_status: watch::channel(ParseStatus::Idle),
1092 autoindent_requests: Default::default(),
1093 wait_for_autoindent_txs: Default::default(),
1094 pending_autoindent: Default::default(),
1095 language: None,
1096 remote_selections: Default::default(),
1097 diagnostics: Default::default(),
1098 diagnostics_timestamp: Lamport::MIN,
1099 completion_triggers: Default::default(),
1100 completion_triggers_per_language_server: Default::default(),
1101 completion_triggers_timestamp: Lamport::MIN,
1102 deferred_ops: OperationQueue::new(),
1103 has_conflict: false,
1104 change_bits: Default::default(),
1105 _subscriptions: Vec::new(),
1106 encoding: encoding_rs::UTF_8,
1107 has_bom: false,
1108 }
1109 }
1110
1111 pub fn build_snapshot(
1112 text: Rope,
1113 language: Option<Arc<Language>>,
1114 language_registry: Option<Arc<LanguageRegistry>>,
1115 cx: &mut App,
1116 ) -> impl Future<Output = BufferSnapshot> + use<> {
1117 let entity_id = cx.reserve_entity::<Self>().entity_id();
1118 let buffer_id = entity_id.as_non_zero_u64().into();
1119 async move {
1120 let text =
1121 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1122 .snapshot();
1123 let mut syntax = SyntaxMap::new(&text).snapshot();
1124 if let Some(language) = language.clone() {
1125 let language_registry = language_registry.clone();
1126 syntax.reparse(&text, language_registry, language);
1127 }
1128 let tree_sitter_data = TreeSitterData::new(text.clone());
1129 BufferSnapshot {
1130 text,
1131 syntax,
1132 file: None,
1133 diagnostics: Default::default(),
1134 remote_selections: Default::default(),
1135 tree_sitter_data: Arc::new(tree_sitter_data),
1136 language,
1137 non_text_state_update_count: 0,
1138 }
1139 }
1140 }
1141
1142 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1143 let entity_id = cx.reserve_entity::<Self>().entity_id();
1144 let buffer_id = entity_id.as_non_zero_u64().into();
1145 let text = TextBuffer::new_normalized(
1146 ReplicaId::LOCAL,
1147 buffer_id,
1148 Default::default(),
1149 Rope::new(),
1150 )
1151 .snapshot();
1152 let syntax = SyntaxMap::new(&text).snapshot();
1153 let tree_sitter_data = TreeSitterData::new(text.clone());
1154 BufferSnapshot {
1155 text,
1156 syntax,
1157 tree_sitter_data: Arc::new(tree_sitter_data),
1158 file: None,
1159 diagnostics: Default::default(),
1160 remote_selections: Default::default(),
1161 language: None,
1162 non_text_state_update_count: 0,
1163 }
1164 }
1165
1166 #[cfg(any(test, feature = "test-support"))]
1167 pub fn build_snapshot_sync(
1168 text: Rope,
1169 language: Option<Arc<Language>>,
1170 language_registry: Option<Arc<LanguageRegistry>>,
1171 cx: &mut App,
1172 ) -> BufferSnapshot {
1173 let entity_id = cx.reserve_entity::<Self>().entity_id();
1174 let buffer_id = entity_id.as_non_zero_u64().into();
1175 let text =
1176 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1177 .snapshot();
1178 let mut syntax = SyntaxMap::new(&text).snapshot();
1179 if let Some(language) = language.clone() {
1180 syntax.reparse(&text, language_registry, language);
1181 }
1182 let tree_sitter_data = TreeSitterData::new(text.clone());
1183 BufferSnapshot {
1184 text,
1185 syntax,
1186 tree_sitter_data: Arc::new(tree_sitter_data),
1187 file: None,
1188 diagnostics: Default::default(),
1189 remote_selections: Default::default(),
1190 language,
1191 non_text_state_update_count: 0,
1192 }
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's current state. This is computationally
1196 /// cheap, and allows reading from the buffer on a background thread.
1197 pub fn snapshot(&self) -> BufferSnapshot {
1198 let text = self.text.snapshot();
1199 let mut syntax_map = self.syntax_map.lock();
1200 syntax_map.interpolate(&text);
1201 let syntax = syntax_map.snapshot();
1202
1203 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1204 Arc::new(TreeSitterData::new(text.clone()))
1205 } else {
1206 self.tree_sitter_data.clone()
1207 };
1208
1209 BufferSnapshot {
1210 text,
1211 syntax,
1212 tree_sitter_data,
1213 file: self.file.clone(),
1214 remote_selections: self.remote_selections.clone(),
1215 diagnostics: self.diagnostics.clone(),
1216 language: self.language.clone(),
1217 non_text_state_update_count: self.non_text_state_update_count,
1218 }
1219 }
1220
1221 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1222 let this = cx.entity();
1223 cx.new(|cx| {
1224 let mut branch = Self {
1225 branch_state: Some(BufferBranchState {
1226 base_buffer: this.clone(),
1227 merged_operations: Default::default(),
1228 }),
1229 language: self.language.clone(),
1230 has_conflict: self.has_conflict,
1231 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1232 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1233 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1234 };
1235 if let Some(language_registry) = self.language_registry() {
1236 branch.set_language_registry(language_registry);
1237 }
1238
1239 // Reparse the branch buffer so that we get syntax highlighting immediately.
1240 branch.reparse(cx, true);
1241
1242 branch
1243 })
1244 }
1245
1246 pub fn preview_edits(
1247 &self,
1248 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1249 cx: &App,
1250 ) -> Task<EditPreview> {
1251 let registry = self.language_registry();
1252 let language = self.language().cloned();
1253 let old_snapshot = self.text.snapshot();
1254 let mut branch_buffer = self.text.branch();
1255 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1256 cx.background_spawn(async move {
1257 if !edits.is_empty() {
1258 if let Some(language) = language.clone() {
1259 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1260 }
1261
1262 branch_buffer.edit(edits.iter().cloned());
1263 let snapshot = branch_buffer.snapshot();
1264 syntax_snapshot.interpolate(&snapshot);
1265
1266 if let Some(language) = language {
1267 syntax_snapshot.reparse(&snapshot, registry, language);
1268 }
1269 }
1270 EditPreview {
1271 old_snapshot,
1272 applied_edits_snapshot: branch_buffer.snapshot(),
1273 syntax_snapshot,
1274 }
1275 })
1276 }
1277
1278 /// Applies all of the changes in this buffer that intersect any of the
1279 /// given `ranges` to its base buffer.
1280 ///
1281 /// If `ranges` is empty, then all changes will be applied. This buffer must
1282 /// be a branch buffer to call this method.
1283 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1284 let Some(base_buffer) = self.base_buffer() else {
1285 debug_panic!("not a branch buffer");
1286 return;
1287 };
1288
1289 let mut ranges = if ranges.is_empty() {
1290 &[0..usize::MAX]
1291 } else {
1292 ranges.as_slice()
1293 }
1294 .iter()
1295 .peekable();
1296
1297 let mut edits = Vec::new();
1298 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1299 let mut is_included = false;
1300 while let Some(range) = ranges.peek() {
1301 if range.end < edit.new.start {
1302 ranges.next().unwrap();
1303 } else {
1304 if range.start <= edit.new.end {
1305 is_included = true;
1306 }
1307 break;
1308 }
1309 }
1310
1311 if is_included {
1312 edits.push((
1313 edit.old.clone(),
1314 self.text_for_range(edit.new.clone()).collect::<String>(),
1315 ));
1316 }
1317 }
1318
1319 let operation = base_buffer.update(cx, |base_buffer, cx| {
1320 // cx.emit(BufferEvent::DiffBaseChanged);
1321 base_buffer.edit(edits, None, cx)
1322 });
1323
1324 if let Some(operation) = operation
1325 && let Some(BufferBranchState {
1326 merged_operations, ..
1327 }) = &mut self.branch_state
1328 {
1329 merged_operations.push(operation);
1330 }
1331 }
1332
1333 fn on_base_buffer_event(
1334 &mut self,
1335 _: Entity<Buffer>,
1336 event: &BufferEvent,
1337 cx: &mut Context<Self>,
1338 ) {
1339 let BufferEvent::Operation { operation, .. } = event else {
1340 return;
1341 };
1342 let Some(BufferBranchState {
1343 merged_operations, ..
1344 }) = &mut self.branch_state
1345 else {
1346 return;
1347 };
1348
1349 let mut operation_to_undo = None;
1350 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1351 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1352 {
1353 merged_operations.remove(ix);
1354 operation_to_undo = Some(operation.timestamp);
1355 }
1356
1357 self.apply_ops([operation.clone()], cx);
1358
1359 if let Some(timestamp) = operation_to_undo {
1360 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1361 self.undo_operations(counts, cx);
1362 }
1363 }
1364
1365 #[cfg(test)]
1366 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1367 &self.text
1368 }
1369
1370 /// Retrieve a snapshot of the buffer's raw text, without any
1371 /// language-related state like the syntax tree or diagnostics.
1372 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1373 self.text.snapshot()
1374 }
1375
1376 /// The file associated with the buffer, if any.
1377 pub fn file(&self) -> Option<&Arc<dyn File>> {
1378 self.file.as_ref()
1379 }
1380
1381 /// The version of the buffer that was last saved or reloaded from disk.
1382 pub fn saved_version(&self) -> &clock::Global {
1383 &self.saved_version
1384 }
1385
1386 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1387 pub fn saved_mtime(&self) -> Option<MTime> {
1388 self.saved_mtime
1389 }
1390
1391 /// Returns the character encoding of the buffer's file.
1392 pub fn encoding(&self) -> &'static Encoding {
1393 self.encoding
1394 }
1395
1396 /// Sets the character encoding of the buffer.
1397 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1398 self.encoding = encoding;
1399 }
1400
1401 /// Returns whether the buffer has a Byte Order Mark.
1402 pub fn has_bom(&self) -> bool {
1403 self.has_bom
1404 }
1405
1406 /// Sets whether the buffer has a Byte Order Mark.
1407 pub fn set_has_bom(&mut self, has_bom: bool) {
1408 self.has_bom = has_bom;
1409 }
1410
1411 /// Assign a language to the buffer.
1412 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1413 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1414 }
1415
1416 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1417 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1418 self.set_language_(language, true, cx);
1419 }
1420
1421 fn set_language_(
1422 &mut self,
1423 language: Option<Arc<Language>>,
1424 may_block: bool,
1425 cx: &mut Context<Self>,
1426 ) {
1427 self.non_text_state_update_count += 1;
1428 self.syntax_map.lock().clear(&self.text);
1429 let old_language = std::mem::replace(&mut self.language, language);
1430 self.was_changed();
1431 self.reparse(cx, may_block);
1432 let has_fresh_language =
1433 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1434 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1435 }
1436
1437 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1438 /// other languages if parts of the buffer are written in different languages.
1439 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1440 self.syntax_map
1441 .lock()
1442 .set_language_registry(language_registry);
1443 }
1444
1445 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1446 self.syntax_map.lock().language_registry()
1447 }
1448
1449 /// Assign the line ending type to the buffer.
1450 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1451 self.text.set_line_ending(line_ending);
1452
1453 let lamport_timestamp = self.text.lamport_clock.tick();
1454 self.send_operation(
1455 Operation::UpdateLineEnding {
1456 line_ending,
1457 lamport_timestamp,
1458 },
1459 true,
1460 cx,
1461 );
1462 }
1463
1464 /// Assign the buffer a new [`Capability`].
1465 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1466 if self.capability != capability {
1467 self.capability = capability;
1468 cx.emit(BufferEvent::CapabilityChanged)
1469 }
1470 }
1471
1472 /// This method is called to signal that the buffer has been saved.
1473 pub fn did_save(
1474 &mut self,
1475 version: clock::Global,
1476 mtime: Option<MTime>,
1477 cx: &mut Context<Self>,
1478 ) {
1479 self.saved_version = version.clone();
1480 self.has_unsaved_edits.set((version, false));
1481 self.has_conflict = false;
1482 self.saved_mtime = mtime;
1483 self.was_changed();
1484 cx.emit(BufferEvent::Saved);
1485 cx.notify();
1486 }
1487
1488 /// Reloads the contents of the buffer from disk.
1489 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1490 let (tx, rx) = futures::channel::oneshot::channel();
1491 let prev_version = self.text.version();
1492 self.reload_task = Some(cx.spawn(async move |this, cx| {
1493 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1494 let file = this.file.as_ref()?.as_local()?;
1495
1496 Some((file.disk_state().mtime(), file.load(cx)))
1497 })?
1498 else {
1499 return Ok(());
1500 };
1501
1502 let new_text = new_text.await?;
1503 let diff = this
1504 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1505 .await;
1506 this.update(cx, |this, cx| {
1507 if this.version() == diff.base_version {
1508 this.finalize_last_transaction();
1509 this.apply_diff(diff, cx);
1510 tx.send(this.finalize_last_transaction().cloned()).ok();
1511 this.has_conflict = false;
1512 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1513 } else {
1514 if !diff.edits.is_empty()
1515 || this
1516 .edits_since::<usize>(&diff.base_version)
1517 .next()
1518 .is_some()
1519 {
1520 this.has_conflict = true;
1521 }
1522
1523 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1524 }
1525
1526 this.reload_task.take();
1527 })
1528 }));
1529 rx
1530 }
1531
1532 /// This method is called to signal that the buffer has been reloaded.
1533 pub fn did_reload(
1534 &mut self,
1535 version: clock::Global,
1536 line_ending: LineEnding,
1537 mtime: Option<MTime>,
1538 cx: &mut Context<Self>,
1539 ) {
1540 self.saved_version = version;
1541 self.has_unsaved_edits
1542 .set((self.saved_version.clone(), false));
1543 self.text.set_line_ending(line_ending);
1544 self.saved_mtime = mtime;
1545 cx.emit(BufferEvent::Reloaded);
1546 cx.notify();
1547 }
1548
1549 /// Updates the [`File`] backing this buffer. This should be called when
1550 /// the file has changed or has been deleted.
1551 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1552 let was_dirty = self.is_dirty();
1553 let mut file_changed = false;
1554
1555 if let Some(old_file) = self.file.as_ref() {
1556 if new_file.path() != old_file.path() {
1557 file_changed = true;
1558 }
1559
1560 let old_state = old_file.disk_state();
1561 let new_state = new_file.disk_state();
1562 if old_state != new_state {
1563 file_changed = true;
1564 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1565 cx.emit(BufferEvent::ReloadNeeded)
1566 }
1567 }
1568 } else {
1569 file_changed = true;
1570 };
1571
1572 self.file = Some(new_file);
1573 if file_changed {
1574 self.was_changed();
1575 self.non_text_state_update_count += 1;
1576 if was_dirty != self.is_dirty() {
1577 cx.emit(BufferEvent::DirtyChanged);
1578 }
1579 cx.emit(BufferEvent::FileHandleChanged);
1580 cx.notify();
1581 }
1582 }
1583
1584 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1585 Some(self.branch_state.as_ref()?.base_buffer.clone())
1586 }
1587
1588 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1589 pub fn language(&self) -> Option<&Arc<Language>> {
1590 self.language.as_ref()
1591 }
1592
1593 /// Returns the [`Language`] at the given location.
1594 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1595 let offset = position.to_offset(self);
1596 let mut is_first = true;
1597 let start_anchor = self.anchor_before(offset);
1598 let end_anchor = self.anchor_after(offset);
1599 self.syntax_map
1600 .lock()
1601 .layers_for_range(offset..offset, &self.text, false)
1602 .filter(|layer| {
1603 if is_first {
1604 is_first = false;
1605 return true;
1606 }
1607
1608 layer
1609 .included_sub_ranges
1610 .map(|sub_ranges| {
1611 sub_ranges.iter().any(|sub_range| {
1612 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1613 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1614 !is_before_start && !is_after_end
1615 })
1616 })
1617 .unwrap_or(true)
1618 })
1619 .last()
1620 .map(|info| info.language.clone())
1621 .or_else(|| self.language.clone())
1622 }
1623
1624 /// Returns each [`Language`] for the active syntax layers at the given location.
1625 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1626 let offset = position.to_offset(self);
1627 let mut languages: Vec<Arc<Language>> = self
1628 .syntax_map
1629 .lock()
1630 .layers_for_range(offset..offset, &self.text, false)
1631 .map(|info| info.language.clone())
1632 .collect();
1633
1634 if languages.is_empty()
1635 && let Some(buffer_language) = self.language()
1636 {
1637 languages.push(buffer_language.clone());
1638 }
1639
1640 languages
1641 }
1642
1643 /// An integer version number that accounts for all updates besides
1644 /// the buffer's text itself (which is versioned via a version vector).
1645 pub fn non_text_state_update_count(&self) -> usize {
1646 self.non_text_state_update_count
1647 }
1648
1649 /// Whether the buffer is being parsed in the background.
1650 #[cfg(any(test, feature = "test-support"))]
1651 pub fn is_parsing(&self) -> bool {
1652 self.reparse.is_some()
1653 }
1654
1655 /// Indicates whether the buffer contains any regions that may be
1656 /// written in a language that hasn't been loaded yet.
1657 pub fn contains_unknown_injections(&self) -> bool {
1658 self.syntax_map.lock().contains_unknown_injections()
1659 }
1660
1661 #[cfg(any(test, feature = "test-support"))]
1662 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1663 self.sync_parse_timeout = timeout;
1664 }
1665
1666 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1667 match Arc::get_mut(&mut self.tree_sitter_data) {
1668 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1669 None => {
1670 let tree_sitter_data = TreeSitterData::new(snapshot);
1671 self.tree_sitter_data = Arc::new(tree_sitter_data)
1672 }
1673 }
1674 }
1675
1676 /// Called after an edit to synchronize the buffer's main parse tree with
1677 /// the buffer's new underlying state.
1678 ///
1679 /// Locks the syntax map and interpolates the edits since the last reparse
1680 /// into the foreground syntax tree.
1681 ///
1682 /// Then takes a stable snapshot of the syntax map before unlocking it.
1683 /// The snapshot with the interpolated edits is sent to a background thread,
1684 /// where we ask Tree-sitter to perform an incremental parse.
1685 ///
1686 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1687 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1688 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1689 ///
1690 /// If we time out waiting on the parse, we spawn a second task waiting
1691 /// until the parse does complete and return with the interpolated tree still
1692 /// in the foreground. When the background parse completes, call back into
1693 /// the main thread and assign the foreground parse state.
1694 ///
1695 /// If the buffer or grammar changed since the start of the background parse,
1696 /// initiate an additional reparse recursively. To avoid concurrent parses
1697 /// for the same buffer, we only initiate a new parse if we are not already
1698 /// parsing in the background.
1699 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1700 if self.text.version() != *self.tree_sitter_data.version() {
1701 self.invalidate_tree_sitter_data(self.text.snapshot());
1702 }
1703 if self.reparse.is_some() {
1704 return;
1705 }
1706 let language = if let Some(language) = self.language.clone() {
1707 language
1708 } else {
1709 return;
1710 };
1711
1712 let text = self.text_snapshot();
1713 let parsed_version = self.version();
1714
1715 let mut syntax_map = self.syntax_map.lock();
1716 syntax_map.interpolate(&text);
1717 let language_registry = syntax_map.language_registry();
1718 let mut syntax_snapshot = syntax_map.snapshot();
1719 drop(syntax_map);
1720
1721 let mut parse_task = cx
1722 .background_spawn({
1723 let language = language.clone();
1724 let language_registry = language_registry.clone();
1725 async move {
1726 syntax_snapshot.reparse(&text, language_registry, language);
1727 syntax_snapshot
1728 }
1729 })
1730 .fuse();
1731
1732 let mut timeout = cx
1733 .background_executor()
1734 .timer(self.sync_parse_timeout)
1735 .fuse();
1736
1737 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1738 let parse_task = if may_block {
1739 match cx.background_executor().block(async move {
1740 select! {
1741 _ = timeout => {
1742 Err(parse_task)
1743 }
1744 new_syntax_snapshot = parse_task => {
1745 Ok(new_syntax_snapshot)
1746 }
1747 }
1748 }) {
1749 Ok(new_syntax_snapshot) => {
1750 self.did_finish_parsing(new_syntax_snapshot, cx);
1751 self.reparse = None;
1752 return;
1753 }
1754 Err(parse_task) => parse_task,
1755 }
1756 } else {
1757 parse_task
1758 };
1759 self.reparse = Some(cx.spawn(async move |this, cx| {
1760 let new_syntax_map = cx.background_spawn(parse_task).await;
1761 this.update(cx, move |this, cx| {
1762 let grammar_changed = || {
1763 this.language
1764 .as_ref()
1765 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1766 };
1767 let language_registry_changed = || {
1768 new_syntax_map.contains_unknown_injections()
1769 && language_registry.is_some_and(|registry| {
1770 registry.version() != new_syntax_map.language_registry_version()
1771 })
1772 };
1773 let parse_again = this.version.changed_since(&parsed_version)
1774 || language_registry_changed()
1775 || grammar_changed();
1776 this.did_finish_parsing(new_syntax_map, cx);
1777 this.reparse = None;
1778 if parse_again {
1779 this.reparse(cx, false);
1780 }
1781 })
1782 .ok();
1783 }));
1784 }
1785
1786 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1787 self.was_changed();
1788 self.non_text_state_update_count += 1;
1789 self.syntax_map.lock().did_parse(syntax_snapshot);
1790 self.request_autoindent(cx);
1791 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1792 self.invalidate_tree_sitter_data(self.text.snapshot());
1793 cx.emit(BufferEvent::Reparsed);
1794 cx.notify();
1795 }
1796
1797 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1798 self.parse_status.1.clone()
1799 }
1800
1801 /// Wait until the buffer is no longer parsing
1802 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1803 let mut parse_status = self.parse_status();
1804 async move {
1805 while *parse_status.borrow() != ParseStatus::Idle {
1806 if parse_status.changed().await.is_err() {
1807 break;
1808 }
1809 }
1810 }
1811 }
1812
1813 /// Assign to the buffer a set of diagnostics created by a given language server.
1814 pub fn update_diagnostics(
1815 &mut self,
1816 server_id: LanguageServerId,
1817 diagnostics: DiagnosticSet,
1818 cx: &mut Context<Self>,
1819 ) {
1820 let lamport_timestamp = self.text.lamport_clock.tick();
1821 let op = Operation::UpdateDiagnostics {
1822 server_id,
1823 diagnostics: diagnostics.iter().cloned().collect(),
1824 lamport_timestamp,
1825 };
1826
1827 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1828 self.send_operation(op, true, cx);
1829 }
1830
1831 pub fn buffer_diagnostics(
1832 &self,
1833 for_server: Option<LanguageServerId>,
1834 ) -> Vec<&DiagnosticEntry<Anchor>> {
1835 match for_server {
1836 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1837 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1838 Err(_) => Vec::new(),
1839 },
1840 None => self
1841 .diagnostics
1842 .iter()
1843 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1844 .collect(),
1845 }
1846 }
1847
1848 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1849 if let Some(indent_sizes) = self.compute_autoindents() {
1850 let indent_sizes = cx.background_spawn(indent_sizes);
1851 match cx
1852 .background_executor()
1853 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1854 {
1855 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1856 Err(indent_sizes) => {
1857 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1858 let indent_sizes = indent_sizes.await;
1859 this.update(cx, |this, cx| {
1860 this.apply_autoindents(indent_sizes, cx);
1861 })
1862 .ok();
1863 }));
1864 }
1865 }
1866 } else {
1867 self.autoindent_requests.clear();
1868 for tx in self.wait_for_autoindent_txs.drain(..) {
1869 tx.send(()).ok();
1870 }
1871 }
1872 }
1873
1874 fn compute_autoindents(
1875 &self,
1876 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1877 let max_rows_between_yields = 100;
1878 let snapshot = self.snapshot();
1879 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1880 return None;
1881 }
1882
1883 let autoindent_requests = self.autoindent_requests.clone();
1884 Some(async move {
1885 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1886 for request in autoindent_requests {
1887 // Resolve each edited range to its row in the current buffer and in the
1888 // buffer before this batch of edits.
1889 let mut row_ranges = Vec::new();
1890 let mut old_to_new_rows = BTreeMap::new();
1891 let mut language_indent_sizes_by_new_row = Vec::new();
1892 for entry in &request.entries {
1893 let position = entry.range.start;
1894 let new_row = position.to_point(&snapshot).row;
1895 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1896 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1897
1898 if !entry.first_line_is_new {
1899 let old_row = position.to_point(&request.before_edit).row;
1900 old_to_new_rows.insert(old_row, new_row);
1901 }
1902 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1903 }
1904
1905 // Build a map containing the suggested indentation for each of the edited lines
1906 // with respect to the state of the buffer before these edits. This map is keyed
1907 // by the rows for these lines in the current state of the buffer.
1908 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1909 let old_edited_ranges =
1910 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1911 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1912 let mut language_indent_size = IndentSize::default();
1913 for old_edited_range in old_edited_ranges {
1914 let suggestions = request
1915 .before_edit
1916 .suggest_autoindents(old_edited_range.clone())
1917 .into_iter()
1918 .flatten();
1919 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1920 if let Some(suggestion) = suggestion {
1921 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1922
1923 // Find the indent size based on the language for this row.
1924 while let Some((row, size)) = language_indent_sizes.peek() {
1925 if *row > new_row {
1926 break;
1927 }
1928 language_indent_size = *size;
1929 language_indent_sizes.next();
1930 }
1931
1932 let suggested_indent = old_to_new_rows
1933 .get(&suggestion.basis_row)
1934 .and_then(|from_row| {
1935 Some(old_suggestions.get(from_row).copied()?.0)
1936 })
1937 .unwrap_or_else(|| {
1938 request
1939 .before_edit
1940 .indent_size_for_line(suggestion.basis_row)
1941 })
1942 .with_delta(suggestion.delta, language_indent_size);
1943 old_suggestions
1944 .insert(new_row, (suggested_indent, suggestion.within_error));
1945 }
1946 }
1947 yield_now().await;
1948 }
1949
1950 // Compute new suggestions for each line, but only include them in the result
1951 // if they differ from the old suggestion for that line.
1952 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1953 let mut language_indent_size = IndentSize::default();
1954 for (row_range, original_indent_column) in row_ranges {
1955 let new_edited_row_range = if request.is_block_mode {
1956 row_range.start..row_range.start + 1
1957 } else {
1958 row_range.clone()
1959 };
1960
1961 let suggestions = snapshot
1962 .suggest_autoindents(new_edited_row_range.clone())
1963 .into_iter()
1964 .flatten();
1965 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1966 if let Some(suggestion) = suggestion {
1967 // Find the indent size based on the language for this row.
1968 while let Some((row, size)) = language_indent_sizes.peek() {
1969 if *row > new_row {
1970 break;
1971 }
1972 language_indent_size = *size;
1973 language_indent_sizes.next();
1974 }
1975
1976 let suggested_indent = indent_sizes
1977 .get(&suggestion.basis_row)
1978 .copied()
1979 .map(|e| e.0)
1980 .unwrap_or_else(|| {
1981 snapshot.indent_size_for_line(suggestion.basis_row)
1982 })
1983 .with_delta(suggestion.delta, language_indent_size);
1984
1985 if old_suggestions.get(&new_row).is_none_or(
1986 |(old_indentation, was_within_error)| {
1987 suggested_indent != *old_indentation
1988 && (!suggestion.within_error || *was_within_error)
1989 },
1990 ) {
1991 indent_sizes.insert(
1992 new_row,
1993 (suggested_indent, request.ignore_empty_lines),
1994 );
1995 }
1996 }
1997 }
1998
1999 if let (true, Some(original_indent_column)) =
2000 (request.is_block_mode, original_indent_column)
2001 {
2002 let new_indent =
2003 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2004 *indent
2005 } else {
2006 snapshot.indent_size_for_line(row_range.start)
2007 };
2008 let delta = new_indent.len as i64 - original_indent_column as i64;
2009 if delta != 0 {
2010 for row in row_range.skip(1) {
2011 indent_sizes.entry(row).or_insert_with(|| {
2012 let mut size = snapshot.indent_size_for_line(row);
2013 if size.kind == new_indent.kind {
2014 match delta.cmp(&0) {
2015 Ordering::Greater => size.len += delta as u32,
2016 Ordering::Less => {
2017 size.len = size.len.saturating_sub(-delta as u32)
2018 }
2019 Ordering::Equal => {}
2020 }
2021 }
2022 (size, request.ignore_empty_lines)
2023 });
2024 }
2025 }
2026 }
2027
2028 yield_now().await;
2029 }
2030 }
2031
2032 indent_sizes
2033 .into_iter()
2034 .filter_map(|(row, (indent, ignore_empty_lines))| {
2035 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2036 None
2037 } else {
2038 Some((row, indent))
2039 }
2040 })
2041 .collect()
2042 })
2043 }
2044
2045 fn apply_autoindents(
2046 &mut self,
2047 indent_sizes: BTreeMap<u32, IndentSize>,
2048 cx: &mut Context<Self>,
2049 ) {
2050 self.autoindent_requests.clear();
2051 for tx in self.wait_for_autoindent_txs.drain(..) {
2052 tx.send(()).ok();
2053 }
2054
2055 let edits: Vec<_> = indent_sizes
2056 .into_iter()
2057 .filter_map(|(row, indent_size)| {
2058 let current_size = indent_size_for_line(self, row);
2059 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2060 })
2061 .collect();
2062
2063 let preserve_preview = self.preserve_preview();
2064 self.edit(edits, None, cx);
2065 if preserve_preview {
2066 self.refresh_preview();
2067 }
2068 }
2069
2070 /// Create a minimal edit that will cause the given row to be indented
2071 /// with the given size. After applying this edit, the length of the line
2072 /// will always be at least `new_size.len`.
2073 pub fn edit_for_indent_size_adjustment(
2074 row: u32,
2075 current_size: IndentSize,
2076 new_size: IndentSize,
2077 ) -> Option<(Range<Point>, String)> {
2078 if new_size.kind == current_size.kind {
2079 match new_size.len.cmp(¤t_size.len) {
2080 Ordering::Greater => {
2081 let point = Point::new(row, 0);
2082 Some((
2083 point..point,
2084 iter::repeat(new_size.char())
2085 .take((new_size.len - current_size.len) as usize)
2086 .collect::<String>(),
2087 ))
2088 }
2089
2090 Ordering::Less => Some((
2091 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2092 String::new(),
2093 )),
2094
2095 Ordering::Equal => None,
2096 }
2097 } else {
2098 Some((
2099 Point::new(row, 0)..Point::new(row, current_size.len),
2100 iter::repeat(new_size.char())
2101 .take(new_size.len as usize)
2102 .collect::<String>(),
2103 ))
2104 }
2105 }
2106
2107 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2108 /// and the given new text.
2109 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2110 let old_text = self.as_rope().clone();
2111 let base_version = self.version();
2112 cx.background_executor()
2113 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2114 let old_text = old_text.to_string();
2115 let line_ending = LineEnding::detect(&new_text);
2116 LineEnding::normalize(&mut new_text);
2117 let edits = text_diff(&old_text, &new_text);
2118 Diff {
2119 base_version,
2120 line_ending,
2121 edits,
2122 }
2123 })
2124 }
2125
2126 /// Spawns a background task that searches the buffer for any whitespace
2127 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2128 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2129 let old_text = self.as_rope().clone();
2130 let line_ending = self.line_ending();
2131 let base_version = self.version();
2132 cx.background_spawn(async move {
2133 let ranges = trailing_whitespace_ranges(&old_text);
2134 let empty = Arc::<str>::from("");
2135 Diff {
2136 base_version,
2137 line_ending,
2138 edits: ranges
2139 .into_iter()
2140 .map(|range| (range, empty.clone()))
2141 .collect(),
2142 }
2143 })
2144 }
2145
2146 /// Ensures that the buffer ends with a single newline character, and
2147 /// no other whitespace. Skips if the buffer is empty.
2148 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2149 let len = self.len();
2150 if len == 0 {
2151 return;
2152 }
2153 let mut offset = len;
2154 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2155 let non_whitespace_len = chunk
2156 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2157 .len();
2158 offset -= chunk.len();
2159 offset += non_whitespace_len;
2160 if non_whitespace_len != 0 {
2161 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2162 return;
2163 }
2164 break;
2165 }
2166 }
2167 self.edit([(offset..len, "\n")], None, cx);
2168 }
2169
2170 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2171 /// calculated, then adjust the diff to account for those changes, and discard any
2172 /// parts of the diff that conflict with those changes.
2173 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2174 let snapshot = self.snapshot();
2175 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2176 let mut delta = 0;
2177 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2178 while let Some(edit_since) = edits_since.peek() {
2179 // If the edit occurs after a diff hunk, then it does not
2180 // affect that hunk.
2181 if edit_since.old.start > range.end {
2182 break;
2183 }
2184 // If the edit precedes the diff hunk, then adjust the hunk
2185 // to reflect the edit.
2186 else if edit_since.old.end < range.start {
2187 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2188 edits_since.next();
2189 }
2190 // If the edit intersects a diff hunk, then discard that hunk.
2191 else {
2192 return None;
2193 }
2194 }
2195
2196 let start = (range.start as i64 + delta) as usize;
2197 let end = (range.end as i64 + delta) as usize;
2198 Some((start..end, new_text))
2199 });
2200
2201 self.start_transaction();
2202 self.text.set_line_ending(diff.line_ending);
2203 self.edit(adjusted_edits, None, cx);
2204 self.end_transaction(cx)
2205 }
2206
2207 pub fn has_unsaved_edits(&self) -> bool {
2208 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2209
2210 if last_version == self.version {
2211 self.has_unsaved_edits
2212 .set((last_version, has_unsaved_edits));
2213 return has_unsaved_edits;
2214 }
2215
2216 let has_edits = self.has_edits_since(&self.saved_version);
2217 self.has_unsaved_edits
2218 .set((self.version.clone(), has_edits));
2219 has_edits
2220 }
2221
2222 /// Checks if the buffer has unsaved changes.
2223 pub fn is_dirty(&self) -> bool {
2224 if self.capability == Capability::ReadOnly {
2225 return false;
2226 }
2227 if self.has_conflict {
2228 return true;
2229 }
2230 match self.file.as_ref().map(|f| f.disk_state()) {
2231 Some(DiskState::New) | Some(DiskState::Deleted) => {
2232 !self.is_empty() && self.has_unsaved_edits()
2233 }
2234 _ => self.has_unsaved_edits(),
2235 }
2236 }
2237
2238 /// Marks the buffer as having a conflict regardless of current buffer state.
2239 pub fn set_conflict(&mut self) {
2240 self.has_conflict = true;
2241 }
2242
2243 /// Checks if the buffer and its file have both changed since the buffer
2244 /// was last saved or reloaded.
2245 pub fn has_conflict(&self) -> bool {
2246 if self.has_conflict {
2247 return true;
2248 }
2249 let Some(file) = self.file.as_ref() else {
2250 return false;
2251 };
2252 match file.disk_state() {
2253 DiskState::New => false,
2254 DiskState::Present { mtime } => match self.saved_mtime {
2255 Some(saved_mtime) => {
2256 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2257 }
2258 None => true,
2259 },
2260 DiskState::Deleted => false,
2261 }
2262 }
2263
2264 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2265 pub fn subscribe(&mut self) -> Subscription<usize> {
2266 self.text.subscribe()
2267 }
2268
2269 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2270 ///
2271 /// This allows downstream code to check if the buffer's text has changed without
2272 /// waiting for an effect cycle, which would be required if using eents.
2273 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2274 if let Err(ix) = self
2275 .change_bits
2276 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2277 {
2278 self.change_bits.insert(ix, bit);
2279 }
2280 }
2281
2282 /// Set the change bit for all "listeners".
2283 fn was_changed(&mut self) {
2284 self.change_bits.retain(|change_bit| {
2285 change_bit
2286 .upgrade()
2287 .inspect(|bit| {
2288 _ = bit.replace(true);
2289 })
2290 .is_some()
2291 });
2292 }
2293
2294 /// Starts a transaction, if one is not already in-progress. When undoing or
2295 /// redoing edits, all of the edits performed within a transaction are undone
2296 /// or redone together.
2297 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2298 self.start_transaction_at(Instant::now())
2299 }
2300
2301 /// Starts a transaction, providing the current time. Subsequent transactions
2302 /// that occur within a short period of time will be grouped together. This
2303 /// is controlled by the buffer's undo grouping duration.
2304 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2305 self.transaction_depth += 1;
2306 if self.was_dirty_before_starting_transaction.is_none() {
2307 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2308 }
2309 self.text.start_transaction_at(now)
2310 }
2311
2312 /// Terminates the current transaction, if this is the outermost transaction.
2313 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2314 self.end_transaction_at(Instant::now(), cx)
2315 }
2316
2317 /// Terminates the current transaction, providing the current time. Subsequent transactions
2318 /// that occur within a short period of time will be grouped together. This
2319 /// is controlled by the buffer's undo grouping duration.
2320 pub fn end_transaction_at(
2321 &mut self,
2322 now: Instant,
2323 cx: &mut Context<Self>,
2324 ) -> Option<TransactionId> {
2325 assert!(self.transaction_depth > 0);
2326 self.transaction_depth -= 1;
2327 let was_dirty = if self.transaction_depth == 0 {
2328 self.was_dirty_before_starting_transaction.take().unwrap()
2329 } else {
2330 false
2331 };
2332 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2333 self.did_edit(&start_version, was_dirty, cx);
2334 Some(transaction_id)
2335 } else {
2336 None
2337 }
2338 }
2339
2340 /// Manually add a transaction to the buffer's undo history.
2341 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2342 self.text.push_transaction(transaction, now);
2343 }
2344
2345 /// Differs from `push_transaction` in that it does not clear the redo
2346 /// stack. Intended to be used to create a parent transaction to merge
2347 /// potential child transactions into.
2348 ///
2349 /// The caller is responsible for removing it from the undo history using
2350 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2351 /// are merged into this transaction, the caller is responsible for ensuring
2352 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2353 /// cleared is to create transactions with the usual `start_transaction` and
2354 /// `end_transaction` methods and merging the resulting transactions into
2355 /// the transaction created by this method
2356 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2357 self.text.push_empty_transaction(now)
2358 }
2359
2360 /// Prevent the last transaction from being grouped with any subsequent transactions,
2361 /// even if they occur with the buffer's undo grouping duration.
2362 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2363 self.text.finalize_last_transaction()
2364 }
2365
2366 /// Manually group all changes since a given transaction.
2367 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2368 self.text.group_until_transaction(transaction_id);
2369 }
2370
2371 /// Manually remove a transaction from the buffer's undo history
2372 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2373 self.text.forget_transaction(transaction_id)
2374 }
2375
2376 /// Retrieve a transaction from the buffer's undo history
2377 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2378 self.text.get_transaction(transaction_id)
2379 }
2380
2381 /// Manually merge two transactions in the buffer's undo history.
2382 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2383 self.text.merge_transactions(transaction, destination);
2384 }
2385
2386 /// Waits for the buffer to receive operations with the given timestamps.
2387 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2388 &mut self,
2389 edit_ids: It,
2390 ) -> impl Future<Output = Result<()>> + use<It> {
2391 self.text.wait_for_edits(edit_ids)
2392 }
2393
2394 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2395 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2396 &mut self,
2397 anchors: It,
2398 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2399 self.text.wait_for_anchors(anchors)
2400 }
2401
2402 /// Waits for the buffer to receive operations up to the given version.
2403 pub fn wait_for_version(
2404 &mut self,
2405 version: clock::Global,
2406 ) -> impl Future<Output = Result<()>> + use<> {
2407 self.text.wait_for_version(version)
2408 }
2409
2410 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2411 /// [`Buffer::wait_for_version`] to resolve with an error.
2412 pub fn give_up_waiting(&mut self) {
2413 self.text.give_up_waiting();
2414 }
2415
2416 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2417 let mut rx = None;
2418 if !self.autoindent_requests.is_empty() {
2419 let channel = oneshot::channel();
2420 self.wait_for_autoindent_txs.push(channel.0);
2421 rx = Some(channel.1);
2422 }
2423 rx
2424 }
2425
2426 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2427 pub fn set_active_selections(
2428 &mut self,
2429 selections: Arc<[Selection<Anchor>]>,
2430 line_mode: bool,
2431 cursor_shape: CursorShape,
2432 cx: &mut Context<Self>,
2433 ) {
2434 let lamport_timestamp = self.text.lamport_clock.tick();
2435 self.remote_selections.insert(
2436 self.text.replica_id(),
2437 SelectionSet {
2438 selections: selections.clone(),
2439 lamport_timestamp,
2440 line_mode,
2441 cursor_shape,
2442 },
2443 );
2444 self.send_operation(
2445 Operation::UpdateSelections {
2446 selections,
2447 line_mode,
2448 lamport_timestamp,
2449 cursor_shape,
2450 },
2451 true,
2452 cx,
2453 );
2454 self.non_text_state_update_count += 1;
2455 cx.notify();
2456 }
2457
2458 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2459 /// this replica.
2460 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2461 if self
2462 .remote_selections
2463 .get(&self.text.replica_id())
2464 .is_none_or(|set| !set.selections.is_empty())
2465 {
2466 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2467 }
2468 }
2469
2470 pub fn set_agent_selections(
2471 &mut self,
2472 selections: Arc<[Selection<Anchor>]>,
2473 line_mode: bool,
2474 cursor_shape: CursorShape,
2475 cx: &mut Context<Self>,
2476 ) {
2477 let lamport_timestamp = self.text.lamport_clock.tick();
2478 self.remote_selections.insert(
2479 ReplicaId::AGENT,
2480 SelectionSet {
2481 selections,
2482 lamport_timestamp,
2483 line_mode,
2484 cursor_shape,
2485 },
2486 );
2487 self.non_text_state_update_count += 1;
2488 cx.notify();
2489 }
2490
2491 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2492 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2493 }
2494
2495 /// Replaces the buffer's entire text.
2496 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2497 where
2498 T: Into<Arc<str>>,
2499 {
2500 self.autoindent_requests.clear();
2501 self.edit([(0..self.len(), text)], None, cx)
2502 }
2503
2504 /// Appends the given text to the end of the buffer.
2505 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2506 where
2507 T: Into<Arc<str>>,
2508 {
2509 self.edit([(self.len()..self.len(), text)], None, cx)
2510 }
2511
2512 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2513 /// delete, and a string of text to insert at that location.
2514 ///
2515 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2516 /// request for the edited ranges, which will be processed when the buffer finishes
2517 /// parsing.
2518 ///
2519 /// Parsing takes place at the end of a transaction, and may compute synchronously
2520 /// or asynchronously, depending on the changes.
2521 pub fn edit<I, S, T>(
2522 &mut self,
2523 edits_iter: I,
2524 autoindent_mode: Option<AutoindentMode>,
2525 cx: &mut Context<Self>,
2526 ) -> Option<clock::Lamport>
2527 where
2528 I: IntoIterator<Item = (Range<S>, T)>,
2529 S: ToOffset,
2530 T: Into<Arc<str>>,
2531 {
2532 // Skip invalid edits and coalesce contiguous ones.
2533 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2534
2535 for (range, new_text) in edits_iter {
2536 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2537
2538 if range.start > range.end {
2539 mem::swap(&mut range.start, &mut range.end);
2540 }
2541 let new_text = new_text.into();
2542 if !new_text.is_empty() || !range.is_empty() {
2543 if let Some((prev_range, prev_text)) = edits.last_mut()
2544 && prev_range.end >= range.start
2545 {
2546 prev_range.end = cmp::max(prev_range.end, range.end);
2547 *prev_text = format!("{prev_text}{new_text}").into();
2548 } else {
2549 edits.push((range, new_text));
2550 }
2551 }
2552 }
2553 if edits.is_empty() {
2554 return None;
2555 }
2556
2557 self.start_transaction();
2558 self.pending_autoindent.take();
2559 let autoindent_request = autoindent_mode
2560 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2561
2562 let edit_operation = self.text.edit(edits.iter().cloned());
2563 let edit_id = edit_operation.timestamp();
2564
2565 if let Some((before_edit, mode)) = autoindent_request {
2566 let mut delta = 0isize;
2567 let mut previous_setting = None;
2568 let entries: Vec<_> = edits
2569 .into_iter()
2570 .enumerate()
2571 .zip(&edit_operation.as_edit().unwrap().new_text)
2572 .filter(|((_, (range, _)), _)| {
2573 let language = before_edit.language_at(range.start);
2574 let language_id = language.map(|l| l.id());
2575 if let Some((cached_language_id, auto_indent)) = previous_setting
2576 && cached_language_id == language_id
2577 {
2578 auto_indent
2579 } else {
2580 // The auto-indent setting is not present in editorconfigs, hence
2581 // we can avoid passing the file here.
2582 let auto_indent =
2583 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2584 previous_setting = Some((language_id, auto_indent));
2585 auto_indent
2586 }
2587 })
2588 .map(|((ix, (range, _)), new_text)| {
2589 let new_text_length = new_text.len();
2590 let old_start = range.start.to_point(&before_edit);
2591 let new_start = (delta + range.start as isize) as usize;
2592 let range_len = range.end - range.start;
2593 delta += new_text_length as isize - range_len as isize;
2594
2595 // Decide what range of the insertion to auto-indent, and whether
2596 // the first line of the insertion should be considered a newly-inserted line
2597 // or an edit to an existing line.
2598 let mut range_of_insertion_to_indent = 0..new_text_length;
2599 let mut first_line_is_new = true;
2600
2601 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2602 let old_line_end = before_edit.line_len(old_start.row);
2603
2604 if old_start.column > old_line_start {
2605 first_line_is_new = false;
2606 }
2607
2608 if !new_text.contains('\n')
2609 && (old_start.column + (range_len as u32) < old_line_end
2610 || old_line_end == old_line_start)
2611 {
2612 first_line_is_new = false;
2613 }
2614
2615 // When inserting text starting with a newline, avoid auto-indenting the
2616 // previous line.
2617 if new_text.starts_with('\n') {
2618 range_of_insertion_to_indent.start += 1;
2619 first_line_is_new = true;
2620 }
2621
2622 let mut original_indent_column = None;
2623 if let AutoindentMode::Block {
2624 original_indent_columns,
2625 } = &mode
2626 {
2627 original_indent_column = Some(if new_text.starts_with('\n') {
2628 indent_size_for_text(
2629 new_text[range_of_insertion_to_indent.clone()].chars(),
2630 )
2631 .len
2632 } else {
2633 original_indent_columns
2634 .get(ix)
2635 .copied()
2636 .flatten()
2637 .unwrap_or_else(|| {
2638 indent_size_for_text(
2639 new_text[range_of_insertion_to_indent.clone()].chars(),
2640 )
2641 .len
2642 })
2643 });
2644
2645 // Avoid auto-indenting the line after the edit.
2646 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2647 range_of_insertion_to_indent.end -= 1;
2648 }
2649 }
2650
2651 AutoindentRequestEntry {
2652 first_line_is_new,
2653 original_indent_column,
2654 indent_size: before_edit.language_indent_size_at(range.start, cx),
2655 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2656 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2657 }
2658 })
2659 .collect();
2660
2661 if !entries.is_empty() {
2662 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2663 before_edit,
2664 entries,
2665 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2666 ignore_empty_lines: false,
2667 }));
2668 }
2669 }
2670
2671 self.end_transaction(cx);
2672 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2673 Some(edit_id)
2674 }
2675
2676 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2677 self.was_changed();
2678
2679 if self.edits_since::<usize>(old_version).next().is_none() {
2680 return;
2681 }
2682
2683 self.reparse(cx, true);
2684 cx.emit(BufferEvent::Edited);
2685 if was_dirty != self.is_dirty() {
2686 cx.emit(BufferEvent::DirtyChanged);
2687 }
2688 cx.notify();
2689 }
2690
2691 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2692 where
2693 I: IntoIterator<Item = Range<T>>,
2694 T: ToOffset + Copy,
2695 {
2696 let before_edit = self.snapshot();
2697 let entries = ranges
2698 .into_iter()
2699 .map(|range| AutoindentRequestEntry {
2700 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2701 first_line_is_new: true,
2702 indent_size: before_edit.language_indent_size_at(range.start, cx),
2703 original_indent_column: None,
2704 })
2705 .collect();
2706 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2707 before_edit,
2708 entries,
2709 is_block_mode: false,
2710 ignore_empty_lines: true,
2711 }));
2712 self.request_autoindent(cx);
2713 }
2714
2715 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2716 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2717 pub fn insert_empty_line(
2718 &mut self,
2719 position: impl ToPoint,
2720 space_above: bool,
2721 space_below: bool,
2722 cx: &mut Context<Self>,
2723 ) -> Point {
2724 let mut position = position.to_point(self);
2725
2726 self.start_transaction();
2727
2728 self.edit(
2729 [(position..position, "\n")],
2730 Some(AutoindentMode::EachLine),
2731 cx,
2732 );
2733
2734 if position.column > 0 {
2735 position += Point::new(1, 0);
2736 }
2737
2738 if !self.is_line_blank(position.row) {
2739 self.edit(
2740 [(position..position, "\n")],
2741 Some(AutoindentMode::EachLine),
2742 cx,
2743 );
2744 }
2745
2746 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2747 self.edit(
2748 [(position..position, "\n")],
2749 Some(AutoindentMode::EachLine),
2750 cx,
2751 );
2752 position.row += 1;
2753 }
2754
2755 if space_below
2756 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2757 {
2758 self.edit(
2759 [(position..position, "\n")],
2760 Some(AutoindentMode::EachLine),
2761 cx,
2762 );
2763 }
2764
2765 self.end_transaction(cx);
2766
2767 position
2768 }
2769
2770 /// Applies the given remote operations to the buffer.
2771 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2772 self.pending_autoindent.take();
2773 let was_dirty = self.is_dirty();
2774 let old_version = self.version.clone();
2775 let mut deferred_ops = Vec::new();
2776 let buffer_ops = ops
2777 .into_iter()
2778 .filter_map(|op| match op {
2779 Operation::Buffer(op) => Some(op),
2780 _ => {
2781 if self.can_apply_op(&op) {
2782 self.apply_op(op, cx);
2783 } else {
2784 deferred_ops.push(op);
2785 }
2786 None
2787 }
2788 })
2789 .collect::<Vec<_>>();
2790 for operation in buffer_ops.iter() {
2791 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2792 }
2793 self.text.apply_ops(buffer_ops);
2794 self.deferred_ops.insert(deferred_ops);
2795 self.flush_deferred_ops(cx);
2796 self.did_edit(&old_version, was_dirty, cx);
2797 // Notify independently of whether the buffer was edited as the operations could include a
2798 // selection update.
2799 cx.notify();
2800 }
2801
2802 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2803 let mut deferred_ops = Vec::new();
2804 for op in self.deferred_ops.drain().iter().cloned() {
2805 if self.can_apply_op(&op) {
2806 self.apply_op(op, cx);
2807 } else {
2808 deferred_ops.push(op);
2809 }
2810 }
2811 self.deferred_ops.insert(deferred_ops);
2812 }
2813
2814 pub fn has_deferred_ops(&self) -> bool {
2815 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2816 }
2817
2818 fn can_apply_op(&self, operation: &Operation) -> bool {
2819 match operation {
2820 Operation::Buffer(_) => {
2821 unreachable!("buffer operations should never be applied at this layer")
2822 }
2823 Operation::UpdateDiagnostics {
2824 diagnostics: diagnostic_set,
2825 ..
2826 } => diagnostic_set.iter().all(|diagnostic| {
2827 self.text.can_resolve(&diagnostic.range.start)
2828 && self.text.can_resolve(&diagnostic.range.end)
2829 }),
2830 Operation::UpdateSelections { selections, .. } => selections
2831 .iter()
2832 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2833 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2834 }
2835 }
2836
2837 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2838 match operation {
2839 Operation::Buffer(_) => {
2840 unreachable!("buffer operations should never be applied at this layer")
2841 }
2842 Operation::UpdateDiagnostics {
2843 server_id,
2844 diagnostics: diagnostic_set,
2845 lamport_timestamp,
2846 } => {
2847 let snapshot = self.snapshot();
2848 self.apply_diagnostic_update(
2849 server_id,
2850 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2851 lamport_timestamp,
2852 cx,
2853 );
2854 }
2855 Operation::UpdateSelections {
2856 selections,
2857 lamport_timestamp,
2858 line_mode,
2859 cursor_shape,
2860 } => {
2861 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2862 && set.lamport_timestamp > lamport_timestamp
2863 {
2864 return;
2865 }
2866
2867 self.remote_selections.insert(
2868 lamport_timestamp.replica_id,
2869 SelectionSet {
2870 selections,
2871 lamport_timestamp,
2872 line_mode,
2873 cursor_shape,
2874 },
2875 );
2876 self.text.lamport_clock.observe(lamport_timestamp);
2877 self.non_text_state_update_count += 1;
2878 }
2879 Operation::UpdateCompletionTriggers {
2880 triggers,
2881 lamport_timestamp,
2882 server_id,
2883 } => {
2884 if triggers.is_empty() {
2885 self.completion_triggers_per_language_server
2886 .remove(&server_id);
2887 self.completion_triggers = self
2888 .completion_triggers_per_language_server
2889 .values()
2890 .flat_map(|triggers| triggers.iter().cloned())
2891 .collect();
2892 } else {
2893 self.completion_triggers_per_language_server
2894 .insert(server_id, triggers.iter().cloned().collect());
2895 self.completion_triggers.extend(triggers);
2896 }
2897 self.text.lamport_clock.observe(lamport_timestamp);
2898 }
2899 Operation::UpdateLineEnding {
2900 line_ending,
2901 lamport_timestamp,
2902 } => {
2903 self.text.set_line_ending(line_ending);
2904 self.text.lamport_clock.observe(lamport_timestamp);
2905 }
2906 }
2907 }
2908
2909 fn apply_diagnostic_update(
2910 &mut self,
2911 server_id: LanguageServerId,
2912 diagnostics: DiagnosticSet,
2913 lamport_timestamp: clock::Lamport,
2914 cx: &mut Context<Self>,
2915 ) {
2916 if lamport_timestamp > self.diagnostics_timestamp {
2917 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2918 if diagnostics.is_empty() {
2919 if let Ok(ix) = ix {
2920 self.diagnostics.remove(ix);
2921 }
2922 } else {
2923 match ix {
2924 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2925 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2926 };
2927 }
2928 self.diagnostics_timestamp = lamport_timestamp;
2929 self.non_text_state_update_count += 1;
2930 self.text.lamport_clock.observe(lamport_timestamp);
2931 cx.notify();
2932 cx.emit(BufferEvent::DiagnosticsUpdated);
2933 }
2934 }
2935
2936 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2937 self.was_changed();
2938 cx.emit(BufferEvent::Operation {
2939 operation,
2940 is_local,
2941 });
2942 }
2943
2944 /// Removes the selections for a given peer.
2945 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2946 self.remote_selections.remove(&replica_id);
2947 cx.notify();
2948 }
2949
2950 /// Undoes the most recent transaction.
2951 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2952 let was_dirty = self.is_dirty();
2953 let old_version = self.version.clone();
2954
2955 if let Some((transaction_id, operation)) = self.text.undo() {
2956 self.send_operation(Operation::Buffer(operation), true, cx);
2957 self.did_edit(&old_version, was_dirty, cx);
2958 Some(transaction_id)
2959 } else {
2960 None
2961 }
2962 }
2963
2964 /// Manually undoes a specific transaction in the buffer's undo history.
2965 pub fn undo_transaction(
2966 &mut self,
2967 transaction_id: TransactionId,
2968 cx: &mut Context<Self>,
2969 ) -> bool {
2970 let was_dirty = self.is_dirty();
2971 let old_version = self.version.clone();
2972 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2973 self.send_operation(Operation::Buffer(operation), true, cx);
2974 self.did_edit(&old_version, was_dirty, cx);
2975 true
2976 } else {
2977 false
2978 }
2979 }
2980
2981 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2982 pub fn undo_to_transaction(
2983 &mut self,
2984 transaction_id: TransactionId,
2985 cx: &mut Context<Self>,
2986 ) -> bool {
2987 let was_dirty = self.is_dirty();
2988 let old_version = self.version.clone();
2989
2990 let operations = self.text.undo_to_transaction(transaction_id);
2991 let undone = !operations.is_empty();
2992 for operation in operations {
2993 self.send_operation(Operation::Buffer(operation), true, cx);
2994 }
2995 if undone {
2996 self.did_edit(&old_version, was_dirty, cx)
2997 }
2998 undone
2999 }
3000
3001 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3002 let was_dirty = self.is_dirty();
3003 let operation = self.text.undo_operations(counts);
3004 let old_version = self.version.clone();
3005 self.send_operation(Operation::Buffer(operation), true, cx);
3006 self.did_edit(&old_version, was_dirty, cx);
3007 }
3008
3009 /// Manually redoes a specific transaction in the buffer's redo history.
3010 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3011 let was_dirty = self.is_dirty();
3012 let old_version = self.version.clone();
3013
3014 if let Some((transaction_id, operation)) = self.text.redo() {
3015 self.send_operation(Operation::Buffer(operation), true, cx);
3016 self.did_edit(&old_version, was_dirty, cx);
3017 Some(transaction_id)
3018 } else {
3019 None
3020 }
3021 }
3022
3023 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3024 pub fn redo_to_transaction(
3025 &mut self,
3026 transaction_id: TransactionId,
3027 cx: &mut Context<Self>,
3028 ) -> bool {
3029 let was_dirty = self.is_dirty();
3030 let old_version = self.version.clone();
3031
3032 let operations = self.text.redo_to_transaction(transaction_id);
3033 let redone = !operations.is_empty();
3034 for operation in operations {
3035 self.send_operation(Operation::Buffer(operation), true, cx);
3036 }
3037 if redone {
3038 self.did_edit(&old_version, was_dirty, cx)
3039 }
3040 redone
3041 }
3042
3043 /// Override current completion triggers with the user-provided completion triggers.
3044 pub fn set_completion_triggers(
3045 &mut self,
3046 server_id: LanguageServerId,
3047 triggers: BTreeSet<String>,
3048 cx: &mut Context<Self>,
3049 ) {
3050 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3051 if triggers.is_empty() {
3052 self.completion_triggers_per_language_server
3053 .remove(&server_id);
3054 self.completion_triggers = self
3055 .completion_triggers_per_language_server
3056 .values()
3057 .flat_map(|triggers| triggers.iter().cloned())
3058 .collect();
3059 } else {
3060 self.completion_triggers_per_language_server
3061 .insert(server_id, triggers.clone());
3062 self.completion_triggers.extend(triggers.iter().cloned());
3063 }
3064 self.send_operation(
3065 Operation::UpdateCompletionTriggers {
3066 triggers: triggers.into_iter().collect(),
3067 lamport_timestamp: self.completion_triggers_timestamp,
3068 server_id,
3069 },
3070 true,
3071 cx,
3072 );
3073 cx.notify();
3074 }
3075
3076 /// Returns a list of strings which trigger a completion menu for this language.
3077 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3078 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3079 &self.completion_triggers
3080 }
3081
3082 /// Call this directly after performing edits to prevent the preview tab
3083 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3084 /// to return false until there are additional edits.
3085 pub fn refresh_preview(&mut self) {
3086 self.preview_version = self.version.clone();
3087 }
3088
3089 /// Whether we should preserve the preview status of a tab containing this buffer.
3090 pub fn preserve_preview(&self) -> bool {
3091 !self.has_edits_since(&self.preview_version)
3092 }
3093}
3094
3095#[doc(hidden)]
3096#[cfg(any(test, feature = "test-support"))]
3097impl Buffer {
3098 pub fn edit_via_marked_text(
3099 &mut self,
3100 marked_string: &str,
3101 autoindent_mode: Option<AutoindentMode>,
3102 cx: &mut Context<Self>,
3103 ) {
3104 let edits = self.edits_for_marked_text(marked_string);
3105 self.edit(edits, autoindent_mode, cx);
3106 }
3107
3108 pub fn set_group_interval(&mut self, group_interval: Duration) {
3109 self.text.set_group_interval(group_interval);
3110 }
3111
3112 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3113 where
3114 T: rand::Rng,
3115 {
3116 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3117 let mut last_end = None;
3118 for _ in 0..old_range_count {
3119 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3120 break;
3121 }
3122
3123 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3124 let mut range = self.random_byte_range(new_start, rng);
3125 if rng.random_bool(0.2) {
3126 mem::swap(&mut range.start, &mut range.end);
3127 }
3128 last_end = Some(range.end);
3129
3130 let new_text_len = rng.random_range(0..10);
3131 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3132 new_text = new_text.to_uppercase();
3133
3134 edits.push((range, new_text));
3135 }
3136 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3137 self.edit(edits, None, cx);
3138 }
3139
3140 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3141 let was_dirty = self.is_dirty();
3142 let old_version = self.version.clone();
3143
3144 let ops = self.text.randomly_undo_redo(rng);
3145 if !ops.is_empty() {
3146 for op in ops {
3147 self.send_operation(Operation::Buffer(op), true, cx);
3148 self.did_edit(&old_version, was_dirty, cx);
3149 }
3150 }
3151 }
3152}
3153
3154impl EventEmitter<BufferEvent> for Buffer {}
3155
3156impl Deref for Buffer {
3157 type Target = TextBuffer;
3158
3159 fn deref(&self) -> &Self::Target {
3160 &self.text
3161 }
3162}
3163
3164impl BufferSnapshot {
3165 /// Returns [`IndentSize`] for a given line that respects user settings and
3166 /// language preferences.
3167 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3168 indent_size_for_line(self, row)
3169 }
3170
3171 /// Returns [`IndentSize`] for a given position that respects user settings
3172 /// and language preferences.
3173 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3174 let settings = language_settings(
3175 self.language_at(position).map(|l| l.name()),
3176 self.file(),
3177 cx,
3178 );
3179 if settings.hard_tabs {
3180 IndentSize::tab()
3181 } else {
3182 IndentSize::spaces(settings.tab_size.get())
3183 }
3184 }
3185
3186 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3187 /// is passed in as `single_indent_size`.
3188 pub fn suggested_indents(
3189 &self,
3190 rows: impl Iterator<Item = u32>,
3191 single_indent_size: IndentSize,
3192 ) -> BTreeMap<u32, IndentSize> {
3193 let mut result = BTreeMap::new();
3194
3195 for row_range in contiguous_ranges(rows, 10) {
3196 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3197 Some(suggestions) => suggestions,
3198 _ => break,
3199 };
3200
3201 for (row, suggestion) in row_range.zip(suggestions) {
3202 let indent_size = if let Some(suggestion) = suggestion {
3203 result
3204 .get(&suggestion.basis_row)
3205 .copied()
3206 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3207 .with_delta(suggestion.delta, single_indent_size)
3208 } else {
3209 self.indent_size_for_line(row)
3210 };
3211
3212 result.insert(row, indent_size);
3213 }
3214 }
3215
3216 result
3217 }
3218
3219 fn suggest_autoindents(
3220 &self,
3221 row_range: Range<u32>,
3222 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3223 let config = &self.language.as_ref()?.config;
3224 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3225
3226 #[derive(Debug, Clone)]
3227 struct StartPosition {
3228 start: Point,
3229 suffix: SharedString,
3230 language: Arc<Language>,
3231 }
3232
3233 // Find the suggested indentation ranges based on the syntax tree.
3234 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3235 let end = Point::new(row_range.end, 0);
3236 let range = (start..end).to_offset(&self.text);
3237 let mut matches = self.syntax.matches_with_options(
3238 range.clone(),
3239 &self.text,
3240 TreeSitterOptions {
3241 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3242 max_start_depth: None,
3243 },
3244 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3245 );
3246 let indent_configs = matches
3247 .grammars()
3248 .iter()
3249 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3250 .collect::<Vec<_>>();
3251
3252 let mut indent_ranges = Vec::<Range<Point>>::new();
3253 let mut start_positions = Vec::<StartPosition>::new();
3254 let mut outdent_positions = Vec::<Point>::new();
3255 while let Some(mat) = matches.peek() {
3256 let mut start: Option<Point> = None;
3257 let mut end: Option<Point> = None;
3258
3259 let config = indent_configs[mat.grammar_index];
3260 for capture in mat.captures {
3261 if capture.index == config.indent_capture_ix {
3262 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3263 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3264 } else if Some(capture.index) == config.start_capture_ix {
3265 start = Some(Point::from_ts_point(capture.node.end_position()));
3266 } else if Some(capture.index) == config.end_capture_ix {
3267 end = Some(Point::from_ts_point(capture.node.start_position()));
3268 } else if Some(capture.index) == config.outdent_capture_ix {
3269 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3270 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3271 start_positions.push(StartPosition {
3272 start: Point::from_ts_point(capture.node.start_position()),
3273 suffix: suffix.clone(),
3274 language: mat.language.clone(),
3275 });
3276 }
3277 }
3278
3279 matches.advance();
3280 if let Some((start, end)) = start.zip(end) {
3281 if start.row == end.row {
3282 continue;
3283 }
3284 let range = start..end;
3285 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3286 Err(ix) => indent_ranges.insert(ix, range),
3287 Ok(ix) => {
3288 let prev_range = &mut indent_ranges[ix];
3289 prev_range.end = prev_range.end.max(range.end);
3290 }
3291 }
3292 }
3293 }
3294
3295 let mut error_ranges = Vec::<Range<Point>>::new();
3296 let mut matches = self
3297 .syntax
3298 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3299 while let Some(mat) = matches.peek() {
3300 let node = mat.captures[0].node;
3301 let start = Point::from_ts_point(node.start_position());
3302 let end = Point::from_ts_point(node.end_position());
3303 let range = start..end;
3304 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3305 Ok(ix) | Err(ix) => ix,
3306 };
3307 let mut end_ix = ix;
3308 while let Some(existing_range) = error_ranges.get(end_ix) {
3309 if existing_range.end < end {
3310 end_ix += 1;
3311 } else {
3312 break;
3313 }
3314 }
3315 error_ranges.splice(ix..end_ix, [range]);
3316 matches.advance();
3317 }
3318
3319 outdent_positions.sort();
3320 for outdent_position in outdent_positions {
3321 // find the innermost indent range containing this outdent_position
3322 // set its end to the outdent position
3323 if let Some(range_to_truncate) = indent_ranges
3324 .iter_mut()
3325 .rfind(|indent_range| indent_range.contains(&outdent_position))
3326 {
3327 range_to_truncate.end = outdent_position;
3328 }
3329 }
3330
3331 start_positions.sort_by_key(|b| b.start);
3332
3333 // Find the suggested indentation increases and decreased based on regexes.
3334 let mut regex_outdent_map = HashMap::default();
3335 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3336 let mut start_positions_iter = start_positions.iter().peekable();
3337
3338 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3339 self.for_each_line(
3340 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3341 ..Point::new(row_range.end, 0),
3342 |row, line| {
3343 let indent_len = self.indent_size_for_line(row).len;
3344 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3345 let row_language_config = row_language
3346 .as_ref()
3347 .map(|lang| lang.config())
3348 .unwrap_or(config);
3349
3350 if row_language_config
3351 .decrease_indent_pattern
3352 .as_ref()
3353 .is_some_and(|regex| regex.is_match(line))
3354 {
3355 indent_change_rows.push((row, Ordering::Less));
3356 }
3357 if row_language_config
3358 .increase_indent_pattern
3359 .as_ref()
3360 .is_some_and(|regex| regex.is_match(line))
3361 {
3362 indent_change_rows.push((row + 1, Ordering::Greater));
3363 }
3364 while let Some(pos) = start_positions_iter.peek() {
3365 if pos.start.row < row {
3366 let pos = start_positions_iter.next().unwrap().clone();
3367 last_seen_suffix
3368 .entry(pos.suffix.to_string())
3369 .or_default()
3370 .push(pos);
3371 } else {
3372 break;
3373 }
3374 }
3375 for rule in &row_language_config.decrease_indent_patterns {
3376 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3377 let row_start_column = self.indent_size_for_line(row).len;
3378 let basis_row = rule
3379 .valid_after
3380 .iter()
3381 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3382 .flatten()
3383 .filter(|pos| {
3384 row_language
3385 .as_ref()
3386 .or(self.language.as_ref())
3387 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3388 })
3389 .filter(|pos| pos.start.column <= row_start_column)
3390 .max_by_key(|pos| pos.start.row);
3391 if let Some(outdent_to) = basis_row {
3392 regex_outdent_map.insert(row, outdent_to.start.row);
3393 }
3394 break;
3395 }
3396 }
3397 },
3398 );
3399
3400 let mut indent_changes = indent_change_rows.into_iter().peekable();
3401 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3402 prev_non_blank_row.unwrap_or(0)
3403 } else {
3404 row_range.start.saturating_sub(1)
3405 };
3406
3407 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3408 Some(row_range.map(move |row| {
3409 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3410
3411 let mut indent_from_prev_row = false;
3412 let mut outdent_from_prev_row = false;
3413 let mut outdent_to_row = u32::MAX;
3414 let mut from_regex = false;
3415
3416 while let Some((indent_row, delta)) = indent_changes.peek() {
3417 match indent_row.cmp(&row) {
3418 Ordering::Equal => match delta {
3419 Ordering::Less => {
3420 from_regex = true;
3421 outdent_from_prev_row = true
3422 }
3423 Ordering::Greater => {
3424 indent_from_prev_row = true;
3425 from_regex = true
3426 }
3427 _ => {}
3428 },
3429
3430 Ordering::Greater => break,
3431 Ordering::Less => {}
3432 }
3433
3434 indent_changes.next();
3435 }
3436
3437 for range in &indent_ranges {
3438 if range.start.row >= row {
3439 break;
3440 }
3441 if range.start.row == prev_row && range.end > row_start {
3442 indent_from_prev_row = true;
3443 }
3444 if range.end > prev_row_start && range.end <= row_start {
3445 outdent_to_row = outdent_to_row.min(range.start.row);
3446 }
3447 }
3448
3449 if let Some(basis_row) = regex_outdent_map.get(&row) {
3450 indent_from_prev_row = false;
3451 outdent_to_row = *basis_row;
3452 from_regex = true;
3453 }
3454
3455 let within_error = error_ranges
3456 .iter()
3457 .any(|e| e.start.row < row && e.end > row_start);
3458
3459 let suggestion = if outdent_to_row == prev_row
3460 || (outdent_from_prev_row && indent_from_prev_row)
3461 {
3462 Some(IndentSuggestion {
3463 basis_row: prev_row,
3464 delta: Ordering::Equal,
3465 within_error: within_error && !from_regex,
3466 })
3467 } else if indent_from_prev_row {
3468 Some(IndentSuggestion {
3469 basis_row: prev_row,
3470 delta: Ordering::Greater,
3471 within_error: within_error && !from_regex,
3472 })
3473 } else if outdent_to_row < prev_row {
3474 Some(IndentSuggestion {
3475 basis_row: outdent_to_row,
3476 delta: Ordering::Equal,
3477 within_error: within_error && !from_regex,
3478 })
3479 } else if outdent_from_prev_row {
3480 Some(IndentSuggestion {
3481 basis_row: prev_row,
3482 delta: Ordering::Less,
3483 within_error: within_error && !from_regex,
3484 })
3485 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3486 {
3487 Some(IndentSuggestion {
3488 basis_row: prev_row,
3489 delta: Ordering::Equal,
3490 within_error: within_error && !from_regex,
3491 })
3492 } else {
3493 None
3494 };
3495
3496 prev_row = row;
3497 prev_row_start = row_start;
3498 suggestion
3499 }))
3500 }
3501
3502 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3503 while row > 0 {
3504 row -= 1;
3505 if !self.is_line_blank(row) {
3506 return Some(row);
3507 }
3508 }
3509 None
3510 }
3511
3512 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3513 let captures = self.syntax.captures(range, &self.text, |grammar| {
3514 grammar
3515 .highlights_config
3516 .as_ref()
3517 .map(|config| &config.query)
3518 });
3519 let highlight_maps = captures
3520 .grammars()
3521 .iter()
3522 .map(|grammar| grammar.highlight_map())
3523 .collect();
3524 (captures, highlight_maps)
3525 }
3526
3527 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3528 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3529 /// returned in chunks where each chunk has a single syntax highlighting style and
3530 /// diagnostic status.
3531 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3532 let range = range.start.to_offset(self)..range.end.to_offset(self);
3533
3534 let mut syntax = None;
3535 if language_aware {
3536 syntax = Some(self.get_highlights(range.clone()));
3537 }
3538 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3539 let diagnostics = language_aware;
3540 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3541 }
3542
3543 pub fn highlighted_text_for_range<T: ToOffset>(
3544 &self,
3545 range: Range<T>,
3546 override_style: Option<HighlightStyle>,
3547 syntax_theme: &SyntaxTheme,
3548 ) -> HighlightedText {
3549 HighlightedText::from_buffer_range(
3550 range,
3551 &self.text,
3552 &self.syntax,
3553 override_style,
3554 syntax_theme,
3555 )
3556 }
3557
3558 /// Invokes the given callback for each line of text in the given range of the buffer.
3559 /// Uses callback to avoid allocating a string for each line.
3560 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3561 let mut line = String::new();
3562 let mut row = range.start.row;
3563 for chunk in self
3564 .as_rope()
3565 .chunks_in_range(range.to_offset(self))
3566 .chain(["\n"])
3567 {
3568 for (newline_ix, text) in chunk.split('\n').enumerate() {
3569 if newline_ix > 0 {
3570 callback(row, &line);
3571 row += 1;
3572 line.clear();
3573 }
3574 line.push_str(text);
3575 }
3576 }
3577 }
3578
3579 /// Iterates over every [`SyntaxLayer`] in the buffer.
3580 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3581 self.syntax_layers_for_range(0..self.len(), true)
3582 }
3583
3584 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3585 let offset = position.to_offset(self);
3586 self.syntax_layers_for_range(offset..offset, false)
3587 .filter(|l| {
3588 if let Some(ranges) = l.included_sub_ranges {
3589 ranges.iter().any(|range| {
3590 let start = range.start.to_offset(self);
3591 start <= offset && {
3592 let end = range.end.to_offset(self);
3593 offset < end
3594 }
3595 })
3596 } else {
3597 l.node().start_byte() <= offset && l.node().end_byte() > offset
3598 }
3599 })
3600 .last()
3601 }
3602
3603 pub fn syntax_layers_for_range<D: ToOffset>(
3604 &self,
3605 range: Range<D>,
3606 include_hidden: bool,
3607 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3608 self.syntax
3609 .layers_for_range(range, &self.text, include_hidden)
3610 }
3611
3612 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3613 &self,
3614 range: Range<D>,
3615 ) -> Option<SyntaxLayer<'_>> {
3616 let range = range.to_offset(self);
3617 self.syntax
3618 .layers_for_range(range, &self.text, false)
3619 .max_by(|a, b| {
3620 if a.depth != b.depth {
3621 a.depth.cmp(&b.depth)
3622 } else if a.offset.0 != b.offset.0 {
3623 a.offset.0.cmp(&b.offset.0)
3624 } else {
3625 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3626 }
3627 })
3628 }
3629
3630 /// Returns the main [`Language`].
3631 pub fn language(&self) -> Option<&Arc<Language>> {
3632 self.language.as_ref()
3633 }
3634
3635 /// Returns the [`Language`] at the given location.
3636 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3637 self.syntax_layer_at(position)
3638 .map(|info| info.language)
3639 .or(self.language.as_ref())
3640 }
3641
3642 /// Returns the settings for the language at the given location.
3643 pub fn settings_at<'a, D: ToOffset>(
3644 &'a self,
3645 position: D,
3646 cx: &'a App,
3647 ) -> Cow<'a, LanguageSettings> {
3648 language_settings(
3649 self.language_at(position).map(|l| l.name()),
3650 self.file.as_ref(),
3651 cx,
3652 )
3653 }
3654
3655 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3656 CharClassifier::new(self.language_scope_at(point))
3657 }
3658
3659 /// Returns the [`LanguageScope`] at the given location.
3660 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3661 let offset = position.to_offset(self);
3662 let mut scope = None;
3663 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3664
3665 // Use the layer that has the smallest node intersecting the given point.
3666 for layer in self
3667 .syntax
3668 .layers_for_range(offset..offset, &self.text, false)
3669 {
3670 let mut cursor = layer.node().walk();
3671
3672 let mut range = None;
3673 loop {
3674 let child_range = cursor.node().byte_range();
3675 if !child_range.contains(&offset) {
3676 break;
3677 }
3678
3679 range = Some(child_range);
3680 if cursor.goto_first_child_for_byte(offset).is_none() {
3681 break;
3682 }
3683 }
3684
3685 if let Some(range) = range
3686 && smallest_range_and_depth.as_ref().is_none_or(
3687 |(smallest_range, smallest_range_depth)| {
3688 if layer.depth > *smallest_range_depth {
3689 true
3690 } else if layer.depth == *smallest_range_depth {
3691 range.len() < smallest_range.len()
3692 } else {
3693 false
3694 }
3695 },
3696 )
3697 {
3698 smallest_range_and_depth = Some((range, layer.depth));
3699 scope = Some(LanguageScope {
3700 language: layer.language.clone(),
3701 override_id: layer.override_id(offset, &self.text),
3702 });
3703 }
3704 }
3705
3706 scope.or_else(|| {
3707 self.language.clone().map(|language| LanguageScope {
3708 language,
3709 override_id: None,
3710 })
3711 })
3712 }
3713
3714 /// Returns a tuple of the range and character kind of the word
3715 /// surrounding the given position.
3716 pub fn surrounding_word<T: ToOffset>(
3717 &self,
3718 start: T,
3719 scope_context: Option<CharScopeContext>,
3720 ) -> (Range<usize>, Option<CharKind>) {
3721 let mut start = start.to_offset(self);
3722 let mut end = start;
3723 let mut next_chars = self.chars_at(start).take(128).peekable();
3724 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3725
3726 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3727 let word_kind = cmp::max(
3728 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3729 next_chars.peek().copied().map(|c| classifier.kind(c)),
3730 );
3731
3732 for ch in prev_chars {
3733 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3734 start -= ch.len_utf8();
3735 } else {
3736 break;
3737 }
3738 }
3739
3740 for ch in next_chars {
3741 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3742 end += ch.len_utf8();
3743 } else {
3744 break;
3745 }
3746 }
3747
3748 (start..end, word_kind)
3749 }
3750
3751 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3752 /// range. When `require_larger` is true, the node found must be larger than the query range.
3753 ///
3754 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3755 /// be moved to the root of the tree.
3756 fn goto_node_enclosing_range(
3757 cursor: &mut tree_sitter::TreeCursor,
3758 query_range: &Range<usize>,
3759 require_larger: bool,
3760 ) -> bool {
3761 let mut ascending = false;
3762 loop {
3763 let mut range = cursor.node().byte_range();
3764 if query_range.is_empty() {
3765 // When the query range is empty and the current node starts after it, move to the
3766 // previous sibling to find the node the containing node.
3767 if range.start > query_range.start {
3768 cursor.goto_previous_sibling();
3769 range = cursor.node().byte_range();
3770 }
3771 } else {
3772 // When the query range is non-empty and the current node ends exactly at the start,
3773 // move to the next sibling to find a node that extends beyond the start.
3774 if range.end == query_range.start {
3775 cursor.goto_next_sibling();
3776 range = cursor.node().byte_range();
3777 }
3778 }
3779
3780 let encloses = range.contains_inclusive(query_range)
3781 && (!require_larger || range.len() > query_range.len());
3782 if !encloses {
3783 ascending = true;
3784 if !cursor.goto_parent() {
3785 return false;
3786 }
3787 continue;
3788 } else if ascending {
3789 return true;
3790 }
3791
3792 // Descend into the current node.
3793 if cursor
3794 .goto_first_child_for_byte(query_range.start)
3795 .is_none()
3796 {
3797 return true;
3798 }
3799 }
3800 }
3801
3802 pub fn syntax_ancestor<'a, T: ToOffset>(
3803 &'a self,
3804 range: Range<T>,
3805 ) -> Option<tree_sitter::Node<'a>> {
3806 let range = range.start.to_offset(self)..range.end.to_offset(self);
3807 let mut result: Option<tree_sitter::Node<'a>> = None;
3808 for layer in self
3809 .syntax
3810 .layers_for_range(range.clone(), &self.text, true)
3811 {
3812 let mut cursor = layer.node().walk();
3813
3814 // Find the node that both contains the range and is larger than it.
3815 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3816 continue;
3817 }
3818
3819 let left_node = cursor.node();
3820 let mut layer_result = left_node;
3821
3822 // For an empty range, try to find another node immediately to the right of the range.
3823 if left_node.end_byte() == range.start {
3824 let mut right_node = None;
3825 while !cursor.goto_next_sibling() {
3826 if !cursor.goto_parent() {
3827 break;
3828 }
3829 }
3830
3831 while cursor.node().start_byte() == range.start {
3832 right_node = Some(cursor.node());
3833 if !cursor.goto_first_child() {
3834 break;
3835 }
3836 }
3837
3838 // If there is a candidate node on both sides of the (empty) range, then
3839 // decide between the two by favoring a named node over an anonymous token.
3840 // If both nodes are the same in that regard, favor the right one.
3841 if let Some(right_node) = right_node
3842 && (right_node.is_named() || !left_node.is_named())
3843 {
3844 layer_result = right_node;
3845 }
3846 }
3847
3848 if let Some(previous_result) = &result
3849 && previous_result.byte_range().len() < layer_result.byte_range().len()
3850 {
3851 continue;
3852 }
3853 result = Some(layer_result);
3854 }
3855
3856 result
3857 }
3858
3859 /// Find the previous sibling syntax node at the given range.
3860 ///
3861 /// This function locates the syntax node that precedes the node containing
3862 /// the given range. It searches hierarchically by:
3863 /// 1. Finding the node that contains the given range
3864 /// 2. Looking for the previous sibling at the same tree level
3865 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3866 ///
3867 /// Returns `None` if there is no previous sibling at any ancestor level.
3868 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3869 &'a self,
3870 range: Range<T>,
3871 ) -> Option<tree_sitter::Node<'a>> {
3872 let range = range.start.to_offset(self)..range.end.to_offset(self);
3873 let mut result: Option<tree_sitter::Node<'a>> = None;
3874
3875 for layer in self
3876 .syntax
3877 .layers_for_range(range.clone(), &self.text, true)
3878 {
3879 let mut cursor = layer.node().walk();
3880
3881 // Find the node that contains the range
3882 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3883 continue;
3884 }
3885
3886 // Look for the previous sibling, moving up ancestor levels if needed
3887 loop {
3888 if cursor.goto_previous_sibling() {
3889 let layer_result = cursor.node();
3890
3891 if let Some(previous_result) = &result {
3892 if previous_result.byte_range().end < layer_result.byte_range().end {
3893 continue;
3894 }
3895 }
3896 result = Some(layer_result);
3897 break;
3898 }
3899
3900 // No sibling found at this level, try moving up to parent
3901 if !cursor.goto_parent() {
3902 break;
3903 }
3904 }
3905 }
3906
3907 result
3908 }
3909
3910 /// Find the next sibling syntax node at the given range.
3911 ///
3912 /// This function locates the syntax node that follows the node containing
3913 /// the given range. It searches hierarchically by:
3914 /// 1. Finding the node that contains the given range
3915 /// 2. Looking for the next sibling at the same tree level
3916 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3917 ///
3918 /// Returns `None` if there is no next sibling at any ancestor level.
3919 pub fn syntax_next_sibling<'a, T: ToOffset>(
3920 &'a self,
3921 range: Range<T>,
3922 ) -> Option<tree_sitter::Node<'a>> {
3923 let range = range.start.to_offset(self)..range.end.to_offset(self);
3924 let mut result: Option<tree_sitter::Node<'a>> = None;
3925
3926 for layer in self
3927 .syntax
3928 .layers_for_range(range.clone(), &self.text, true)
3929 {
3930 let mut cursor = layer.node().walk();
3931
3932 // Find the node that contains the range
3933 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3934 continue;
3935 }
3936
3937 // Look for the next sibling, moving up ancestor levels if needed
3938 loop {
3939 if cursor.goto_next_sibling() {
3940 let layer_result = cursor.node();
3941
3942 if let Some(previous_result) = &result {
3943 if previous_result.byte_range().start > layer_result.byte_range().start {
3944 continue;
3945 }
3946 }
3947 result = Some(layer_result);
3948 break;
3949 }
3950
3951 // No sibling found at this level, try moving up to parent
3952 if !cursor.goto_parent() {
3953 break;
3954 }
3955 }
3956 }
3957
3958 result
3959 }
3960
3961 /// Returns the root syntax node within the given row
3962 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3963 let start_offset = position.to_offset(self);
3964
3965 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3966
3967 let layer = self
3968 .syntax
3969 .layers_for_range(start_offset..start_offset, &self.text, true)
3970 .next()?;
3971
3972 let mut cursor = layer.node().walk();
3973
3974 // Descend to the first leaf that touches the start of the range.
3975 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3976 if cursor.node().end_byte() == start_offset {
3977 cursor.goto_next_sibling();
3978 }
3979 }
3980
3981 // Ascend to the root node within the same row.
3982 while cursor.goto_parent() {
3983 if cursor.node().start_position().row != row {
3984 break;
3985 }
3986 }
3987
3988 Some(cursor.node())
3989 }
3990
3991 /// Returns the outline for the buffer.
3992 ///
3993 /// This method allows passing an optional [`SyntaxTheme`] to
3994 /// syntax-highlight the returned symbols.
3995 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3996 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3997 }
3998
3999 /// Returns all the symbols that contain the given position.
4000 ///
4001 /// This method allows passing an optional [`SyntaxTheme`] to
4002 /// syntax-highlight the returned symbols.
4003 pub fn symbols_containing<T: ToOffset>(
4004 &self,
4005 position: T,
4006 theme: Option<&SyntaxTheme>,
4007 ) -> Vec<OutlineItem<Anchor>> {
4008 let position = position.to_offset(self);
4009 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4010 let end = self.clip_offset(position + 1, Bias::Right);
4011 let mut items = self.outline_items_containing(start..end, false, theme);
4012 let mut prev_depth = None;
4013 items.retain(|item| {
4014 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4015 prev_depth = Some(item.depth);
4016 result
4017 });
4018 items
4019 }
4020
4021 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4022 let range = range.to_offset(self);
4023 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4024 grammar.outline_config.as_ref().map(|c| &c.query)
4025 });
4026 let configs = matches
4027 .grammars()
4028 .iter()
4029 .map(|g| g.outline_config.as_ref().unwrap())
4030 .collect::<Vec<_>>();
4031
4032 while let Some(mat) = matches.peek() {
4033 let config = &configs[mat.grammar_index];
4034 let containing_item_node = maybe!({
4035 let item_node = mat.captures.iter().find_map(|cap| {
4036 if cap.index == config.item_capture_ix {
4037 Some(cap.node)
4038 } else {
4039 None
4040 }
4041 })?;
4042
4043 let item_byte_range = item_node.byte_range();
4044 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4045 None
4046 } else {
4047 Some(item_node)
4048 }
4049 });
4050
4051 if let Some(item_node) = containing_item_node {
4052 return Some(
4053 Point::from_ts_point(item_node.start_position())
4054 ..Point::from_ts_point(item_node.end_position()),
4055 );
4056 }
4057
4058 matches.advance();
4059 }
4060 None
4061 }
4062
4063 pub fn outline_items_containing<T: ToOffset>(
4064 &self,
4065 range: Range<T>,
4066 include_extra_context: bool,
4067 theme: Option<&SyntaxTheme>,
4068 ) -> Vec<OutlineItem<Anchor>> {
4069 self.outline_items_containing_internal(
4070 range,
4071 include_extra_context,
4072 theme,
4073 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4074 )
4075 }
4076
4077 pub fn outline_items_as_points_containing<T: ToOffset>(
4078 &self,
4079 range: Range<T>,
4080 include_extra_context: bool,
4081 theme: Option<&SyntaxTheme>,
4082 ) -> Vec<OutlineItem<Point>> {
4083 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4084 range
4085 })
4086 }
4087
4088 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4089 &self,
4090 range: Range<T>,
4091 include_extra_context: bool,
4092 theme: Option<&SyntaxTheme>,
4093 ) -> Vec<OutlineItem<usize>> {
4094 self.outline_items_containing_internal(
4095 range,
4096 include_extra_context,
4097 theme,
4098 |buffer, range| range.to_offset(buffer),
4099 )
4100 }
4101
4102 fn outline_items_containing_internal<T: ToOffset, U>(
4103 &self,
4104 range: Range<T>,
4105 include_extra_context: bool,
4106 theme: Option<&SyntaxTheme>,
4107 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4108 ) -> Vec<OutlineItem<U>> {
4109 let range = range.to_offset(self);
4110 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4111 grammar.outline_config.as_ref().map(|c| &c.query)
4112 });
4113
4114 let mut items = Vec::new();
4115 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4116 while let Some(mat) = matches.peek() {
4117 let config = matches.grammars()[mat.grammar_index]
4118 .outline_config
4119 .as_ref()
4120 .unwrap();
4121 if let Some(item) =
4122 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4123 {
4124 items.push(item);
4125 } else if let Some(capture) = mat
4126 .captures
4127 .iter()
4128 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4129 {
4130 let capture_range = capture.node.start_position()..capture.node.end_position();
4131 let mut capture_row_range =
4132 capture_range.start.row as u32..capture_range.end.row as u32;
4133 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4134 {
4135 capture_row_range.end -= 1;
4136 }
4137 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4138 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4139 last_row_range.end = capture_row_range.end;
4140 } else {
4141 annotation_row_ranges.push(capture_row_range);
4142 }
4143 } else {
4144 annotation_row_ranges.push(capture_row_range);
4145 }
4146 }
4147 matches.advance();
4148 }
4149
4150 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4151
4152 // Assign depths based on containment relationships and convert to anchors.
4153 let mut item_ends_stack = Vec::<Point>::new();
4154 let mut anchor_items = Vec::new();
4155 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4156 for item in items {
4157 while let Some(last_end) = item_ends_stack.last().copied() {
4158 if last_end < item.range.end {
4159 item_ends_stack.pop();
4160 } else {
4161 break;
4162 }
4163 }
4164
4165 let mut annotation_row_range = None;
4166 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4167 let row_preceding_item = item.range.start.row.saturating_sub(1);
4168 if next_annotation_row_range.end < row_preceding_item {
4169 annotation_row_ranges.next();
4170 } else {
4171 if next_annotation_row_range.end == row_preceding_item {
4172 annotation_row_range = Some(next_annotation_row_range.clone());
4173 annotation_row_ranges.next();
4174 }
4175 break;
4176 }
4177 }
4178
4179 anchor_items.push(OutlineItem {
4180 depth: item_ends_stack.len(),
4181 range: range_callback(self, item.range.clone()),
4182 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4183 text: item.text,
4184 highlight_ranges: item.highlight_ranges,
4185 name_ranges: item.name_ranges,
4186 body_range: item.body_range.map(|r| range_callback(self, r)),
4187 annotation_range: annotation_row_range.map(|annotation_range| {
4188 let point_range = Point::new(annotation_range.start, 0)
4189 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4190 range_callback(self, point_range)
4191 }),
4192 });
4193 item_ends_stack.push(item.range.end);
4194 }
4195
4196 anchor_items
4197 }
4198
4199 fn next_outline_item(
4200 &self,
4201 config: &OutlineConfig,
4202 mat: &SyntaxMapMatch,
4203 range: &Range<usize>,
4204 include_extra_context: bool,
4205 theme: Option<&SyntaxTheme>,
4206 ) -> Option<OutlineItem<Point>> {
4207 let item_node = mat.captures.iter().find_map(|cap| {
4208 if cap.index == config.item_capture_ix {
4209 Some(cap.node)
4210 } else {
4211 None
4212 }
4213 })?;
4214
4215 let item_byte_range = item_node.byte_range();
4216 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4217 return None;
4218 }
4219 let item_point_range = Point::from_ts_point(item_node.start_position())
4220 ..Point::from_ts_point(item_node.end_position());
4221
4222 let mut open_point = None;
4223 let mut close_point = None;
4224
4225 let mut buffer_ranges = Vec::new();
4226 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4227 let mut range = node.start_byte()..node.end_byte();
4228 let start = node.start_position();
4229 if node.end_position().row > start.row {
4230 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4231 }
4232
4233 if !range.is_empty() {
4234 buffer_ranges.push((range, node_is_name));
4235 }
4236 };
4237
4238 for capture in mat.captures {
4239 if capture.index == config.name_capture_ix {
4240 add_to_buffer_ranges(capture.node, true);
4241 } else if Some(capture.index) == config.context_capture_ix
4242 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4243 {
4244 add_to_buffer_ranges(capture.node, false);
4245 } else {
4246 if Some(capture.index) == config.open_capture_ix {
4247 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4248 } else if Some(capture.index) == config.close_capture_ix {
4249 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4250 }
4251 }
4252 }
4253
4254 if buffer_ranges.is_empty() {
4255 return None;
4256 }
4257 let source_range_for_text =
4258 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4259
4260 let mut text = String::new();
4261 let mut highlight_ranges = Vec::new();
4262 let mut name_ranges = Vec::new();
4263 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4264 let mut last_buffer_range_end = 0;
4265 for (buffer_range, is_name) in buffer_ranges {
4266 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4267 if space_added {
4268 text.push(' ');
4269 }
4270 let before_append_len = text.len();
4271 let mut offset = buffer_range.start;
4272 chunks.seek(buffer_range.clone());
4273 for mut chunk in chunks.by_ref() {
4274 if chunk.text.len() > buffer_range.end - offset {
4275 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4276 offset = buffer_range.end;
4277 } else {
4278 offset += chunk.text.len();
4279 }
4280 let style = chunk
4281 .syntax_highlight_id
4282 .zip(theme)
4283 .and_then(|(highlight, theme)| highlight.style(theme));
4284 if let Some(style) = style {
4285 let start = text.len();
4286 let end = start + chunk.text.len();
4287 highlight_ranges.push((start..end, style));
4288 }
4289 text.push_str(chunk.text);
4290 if offset >= buffer_range.end {
4291 break;
4292 }
4293 }
4294 if is_name {
4295 let after_append_len = text.len();
4296 let start = if space_added && !name_ranges.is_empty() {
4297 before_append_len - 1
4298 } else {
4299 before_append_len
4300 };
4301 name_ranges.push(start..after_append_len);
4302 }
4303 last_buffer_range_end = buffer_range.end;
4304 }
4305
4306 Some(OutlineItem {
4307 depth: 0, // We'll calculate the depth later
4308 range: item_point_range,
4309 source_range_for_text: source_range_for_text.to_point(self),
4310 text,
4311 highlight_ranges,
4312 name_ranges,
4313 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4314 annotation_range: None,
4315 })
4316 }
4317
4318 pub fn function_body_fold_ranges<T: ToOffset>(
4319 &self,
4320 within: Range<T>,
4321 ) -> impl Iterator<Item = Range<usize>> + '_ {
4322 self.text_object_ranges(within, TreeSitterOptions::default())
4323 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4324 }
4325
4326 /// For each grammar in the language, runs the provided
4327 /// [`tree_sitter::Query`] against the given range.
4328 pub fn matches(
4329 &self,
4330 range: Range<usize>,
4331 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4332 ) -> SyntaxMapMatches<'_> {
4333 self.syntax.matches(range, self, query)
4334 }
4335
4336 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4337 /// Hence, may return more bracket pairs than the range contains.
4338 ///
4339 /// Will omit known chunks.
4340 /// The resulting bracket match collections are not ordered.
4341 pub fn fetch_bracket_ranges(
4342 &self,
4343 range: Range<usize>,
4344 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4345 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4346 let mut all_bracket_matches = HashMap::default();
4347
4348 for chunk in self
4349 .tree_sitter_data
4350 .chunks
4351 .applicable_chunks(&[range.to_point(self)])
4352 {
4353 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4354 continue;
4355 }
4356 let chunk_range = chunk.anchor_range();
4357 let chunk_range = chunk_range.to_offset(&self);
4358
4359 if let Some(cached_brackets) =
4360 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4361 {
4362 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4363 continue;
4364 }
4365
4366 let mut all_brackets = Vec::new();
4367 let mut opens = Vec::new();
4368 let mut color_pairs = Vec::new();
4369
4370 let mut matches = self.syntax.matches_with_options(
4371 chunk_range.clone(),
4372 &self.text,
4373 TreeSitterOptions {
4374 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4375 max_start_depth: None,
4376 },
4377 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4378 );
4379 let configs = matches
4380 .grammars()
4381 .iter()
4382 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4383 .collect::<Vec<_>>();
4384
4385 while let Some(mat) = matches.peek() {
4386 let mut open = None;
4387 let mut close = None;
4388 let syntax_layer_depth = mat.depth;
4389 let config = configs[mat.grammar_index];
4390 let pattern = &config.patterns[mat.pattern_index];
4391 for capture in mat.captures {
4392 if capture.index == config.open_capture_ix {
4393 open = Some(capture.node.byte_range());
4394 } else if capture.index == config.close_capture_ix {
4395 close = Some(capture.node.byte_range());
4396 }
4397 }
4398
4399 matches.advance();
4400
4401 let Some((open_range, close_range)) = open.zip(close) else {
4402 continue;
4403 };
4404
4405 let bracket_range = open_range.start..=close_range.end;
4406 if !bracket_range.overlaps(&chunk_range) {
4407 continue;
4408 }
4409
4410 let index = all_brackets.len();
4411 all_brackets.push(BracketMatch {
4412 open_range: open_range.clone(),
4413 close_range: close_range.clone(),
4414 newline_only: pattern.newline_only,
4415 syntax_layer_depth,
4416 color_index: None,
4417 });
4418
4419 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4420 // bracket will match the entire tag with all text inside.
4421 // For now, avoid highlighting any pair that has more than single char in each bracket.
4422 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4423 let should_color =
4424 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4425 if should_color {
4426 opens.push(open_range.clone());
4427 color_pairs.push((open_range, close_range, index));
4428 }
4429 }
4430
4431 opens.sort_by_key(|r| (r.start, r.end));
4432 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4433 color_pairs.sort_by_key(|(_, close, _)| close.end);
4434
4435 let mut open_stack = Vec::new();
4436 let mut open_index = 0;
4437 for (open, close, index) in color_pairs {
4438 while open_index < opens.len() && opens[open_index].start < close.start {
4439 open_stack.push(opens[open_index].clone());
4440 open_index += 1;
4441 }
4442
4443 if open_stack.last() == Some(&open) {
4444 let depth_index = open_stack.len() - 1;
4445 all_brackets[index].color_index = Some(depth_index);
4446 open_stack.pop();
4447 }
4448 }
4449
4450 all_brackets.sort_by_key(|bracket_match| {
4451 (bracket_match.open_range.start, bracket_match.open_range.end)
4452 });
4453
4454 if let empty_slot @ None =
4455 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4456 {
4457 *empty_slot = Some(all_brackets.clone());
4458 }
4459 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4460 }
4461
4462 all_bracket_matches
4463 }
4464
4465 pub fn all_bracket_ranges(
4466 &self,
4467 range: Range<usize>,
4468 ) -> impl Iterator<Item = BracketMatch<usize>> {
4469 self.fetch_bracket_ranges(range.clone(), None)
4470 .into_values()
4471 .flatten()
4472 .filter(move |bracket_match| {
4473 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4474 bracket_range.overlaps(&range)
4475 })
4476 }
4477
4478 /// Returns bracket range pairs overlapping or adjacent to `range`
4479 pub fn bracket_ranges<T: ToOffset>(
4480 &self,
4481 range: Range<T>,
4482 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4483 // Find bracket pairs that *inclusively* contain the given range.
4484 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4485 self.all_bracket_ranges(range)
4486 .filter(|pair| !pair.newline_only)
4487 }
4488
4489 pub fn debug_variables_query<T: ToOffset>(
4490 &self,
4491 range: Range<T>,
4492 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4493 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4494
4495 let mut matches = self.syntax.matches_with_options(
4496 range.clone(),
4497 &self.text,
4498 TreeSitterOptions::default(),
4499 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4500 );
4501
4502 let configs = matches
4503 .grammars()
4504 .iter()
4505 .map(|grammar| grammar.debug_variables_config.as_ref())
4506 .collect::<Vec<_>>();
4507
4508 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4509
4510 iter::from_fn(move || {
4511 loop {
4512 while let Some(capture) = captures.pop() {
4513 if capture.0.overlaps(&range) {
4514 return Some(capture);
4515 }
4516 }
4517
4518 let mat = matches.peek()?;
4519
4520 let Some(config) = configs[mat.grammar_index].as_ref() else {
4521 matches.advance();
4522 continue;
4523 };
4524
4525 for capture in mat.captures {
4526 let Some(ix) = config
4527 .objects_by_capture_ix
4528 .binary_search_by_key(&capture.index, |e| e.0)
4529 .ok()
4530 else {
4531 continue;
4532 };
4533 let text_object = config.objects_by_capture_ix[ix].1;
4534 let byte_range = capture.node.byte_range();
4535
4536 let mut found = false;
4537 for (range, existing) in captures.iter_mut() {
4538 if existing == &text_object {
4539 range.start = range.start.min(byte_range.start);
4540 range.end = range.end.max(byte_range.end);
4541 found = true;
4542 break;
4543 }
4544 }
4545
4546 if !found {
4547 captures.push((byte_range, text_object));
4548 }
4549 }
4550
4551 matches.advance();
4552 }
4553 })
4554 }
4555
4556 pub fn text_object_ranges<T: ToOffset>(
4557 &self,
4558 range: Range<T>,
4559 options: TreeSitterOptions,
4560 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4561 let range =
4562 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4563
4564 let mut matches =
4565 self.syntax
4566 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4567 grammar.text_object_config.as_ref().map(|c| &c.query)
4568 });
4569
4570 let configs = matches
4571 .grammars()
4572 .iter()
4573 .map(|grammar| grammar.text_object_config.as_ref())
4574 .collect::<Vec<_>>();
4575
4576 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4577
4578 iter::from_fn(move || {
4579 loop {
4580 while let Some(capture) = captures.pop() {
4581 if capture.0.overlaps(&range) {
4582 return Some(capture);
4583 }
4584 }
4585
4586 let mat = matches.peek()?;
4587
4588 let Some(config) = configs[mat.grammar_index].as_ref() else {
4589 matches.advance();
4590 continue;
4591 };
4592
4593 for capture in mat.captures {
4594 let Some(ix) = config
4595 .text_objects_by_capture_ix
4596 .binary_search_by_key(&capture.index, |e| e.0)
4597 .ok()
4598 else {
4599 continue;
4600 };
4601 let text_object = config.text_objects_by_capture_ix[ix].1;
4602 let byte_range = capture.node.byte_range();
4603
4604 let mut found = false;
4605 for (range, existing) in captures.iter_mut() {
4606 if existing == &text_object {
4607 range.start = range.start.min(byte_range.start);
4608 range.end = range.end.max(byte_range.end);
4609 found = true;
4610 break;
4611 }
4612 }
4613
4614 if !found {
4615 captures.push((byte_range, text_object));
4616 }
4617 }
4618
4619 matches.advance();
4620 }
4621 })
4622 }
4623
4624 /// Returns enclosing bracket ranges containing the given range
4625 pub fn enclosing_bracket_ranges<T: ToOffset>(
4626 &self,
4627 range: Range<T>,
4628 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4629 let range = range.start.to_offset(self)..range.end.to_offset(self);
4630
4631 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4632 let max_depth = result
4633 .iter()
4634 .map(|mat| mat.syntax_layer_depth)
4635 .max()
4636 .unwrap_or(0);
4637 result.into_iter().filter(move |pair| {
4638 pair.open_range.start <= range.start
4639 && pair.close_range.end >= range.end
4640 && pair.syntax_layer_depth == max_depth
4641 })
4642 }
4643
4644 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4645 ///
4646 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4647 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4648 &self,
4649 range: Range<T>,
4650 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4651 ) -> Option<(Range<usize>, Range<usize>)> {
4652 let range = range.start.to_offset(self)..range.end.to_offset(self);
4653
4654 // Get the ranges of the innermost pair of brackets.
4655 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4656
4657 for pair in self.enclosing_bracket_ranges(range) {
4658 if let Some(range_filter) = range_filter
4659 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4660 {
4661 continue;
4662 }
4663
4664 let len = pair.close_range.end - pair.open_range.start;
4665
4666 if let Some((existing_open, existing_close)) = &result {
4667 let existing_len = existing_close.end - existing_open.start;
4668 if len > existing_len {
4669 continue;
4670 }
4671 }
4672
4673 result = Some((pair.open_range, pair.close_range));
4674 }
4675
4676 result
4677 }
4678
4679 /// Returns anchor ranges for any matches of the redaction query.
4680 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4681 /// will be run on the relevant section of the buffer.
4682 pub fn redacted_ranges<T: ToOffset>(
4683 &self,
4684 range: Range<T>,
4685 ) -> impl Iterator<Item = Range<usize>> + '_ {
4686 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4687 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4688 grammar
4689 .redactions_config
4690 .as_ref()
4691 .map(|config| &config.query)
4692 });
4693
4694 let configs = syntax_matches
4695 .grammars()
4696 .iter()
4697 .map(|grammar| grammar.redactions_config.as_ref())
4698 .collect::<Vec<_>>();
4699
4700 iter::from_fn(move || {
4701 let redacted_range = syntax_matches
4702 .peek()
4703 .and_then(|mat| {
4704 configs[mat.grammar_index].and_then(|config| {
4705 mat.captures
4706 .iter()
4707 .find(|capture| capture.index == config.redaction_capture_ix)
4708 })
4709 })
4710 .map(|mat| mat.node.byte_range());
4711 syntax_matches.advance();
4712 redacted_range
4713 })
4714 }
4715
4716 pub fn injections_intersecting_range<T: ToOffset>(
4717 &self,
4718 range: Range<T>,
4719 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4720 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4721
4722 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4723 grammar
4724 .injection_config
4725 .as_ref()
4726 .map(|config| &config.query)
4727 });
4728
4729 let configs = syntax_matches
4730 .grammars()
4731 .iter()
4732 .map(|grammar| grammar.injection_config.as_ref())
4733 .collect::<Vec<_>>();
4734
4735 iter::from_fn(move || {
4736 let ranges = syntax_matches.peek().and_then(|mat| {
4737 let config = &configs[mat.grammar_index]?;
4738 let content_capture_range = mat.captures.iter().find_map(|capture| {
4739 if capture.index == config.content_capture_ix {
4740 Some(capture.node.byte_range())
4741 } else {
4742 None
4743 }
4744 })?;
4745 let language = self.language_at(content_capture_range.start)?;
4746 Some((content_capture_range, language))
4747 });
4748 syntax_matches.advance();
4749 ranges
4750 })
4751 }
4752
4753 pub fn runnable_ranges(
4754 &self,
4755 offset_range: Range<usize>,
4756 ) -> impl Iterator<Item = RunnableRange> + '_ {
4757 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4758 grammar.runnable_config.as_ref().map(|config| &config.query)
4759 });
4760
4761 let test_configs = syntax_matches
4762 .grammars()
4763 .iter()
4764 .map(|grammar| grammar.runnable_config.as_ref())
4765 .collect::<Vec<_>>();
4766
4767 iter::from_fn(move || {
4768 loop {
4769 let mat = syntax_matches.peek()?;
4770
4771 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4772 let mut run_range = None;
4773 let full_range = mat.captures.iter().fold(
4774 Range {
4775 start: usize::MAX,
4776 end: 0,
4777 },
4778 |mut acc, next| {
4779 let byte_range = next.node.byte_range();
4780 if acc.start > byte_range.start {
4781 acc.start = byte_range.start;
4782 }
4783 if acc.end < byte_range.end {
4784 acc.end = byte_range.end;
4785 }
4786 acc
4787 },
4788 );
4789 if full_range.start > full_range.end {
4790 // We did not find a full spanning range of this match.
4791 return None;
4792 }
4793 let extra_captures: SmallVec<[_; 1]> =
4794 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4795 test_configs
4796 .extra_captures
4797 .get(capture.index as usize)
4798 .cloned()
4799 .and_then(|tag_name| match tag_name {
4800 RunnableCapture::Named(name) => {
4801 Some((capture.node.byte_range(), name))
4802 }
4803 RunnableCapture::Run => {
4804 let _ = run_range.insert(capture.node.byte_range());
4805 None
4806 }
4807 })
4808 }));
4809 let run_range = run_range?;
4810 let tags = test_configs
4811 .query
4812 .property_settings(mat.pattern_index)
4813 .iter()
4814 .filter_map(|property| {
4815 if *property.key == *"tag" {
4816 property
4817 .value
4818 .as_ref()
4819 .map(|value| RunnableTag(value.to_string().into()))
4820 } else {
4821 None
4822 }
4823 })
4824 .collect();
4825 let extra_captures = extra_captures
4826 .into_iter()
4827 .map(|(range, name)| {
4828 (
4829 name.to_string(),
4830 self.text_for_range(range).collect::<String>(),
4831 )
4832 })
4833 .collect();
4834 // All tags should have the same range.
4835 Some(RunnableRange {
4836 run_range,
4837 full_range,
4838 runnable: Runnable {
4839 tags,
4840 language: mat.language,
4841 buffer: self.remote_id(),
4842 },
4843 extra_captures,
4844 buffer_id: self.remote_id(),
4845 })
4846 });
4847
4848 syntax_matches.advance();
4849 if test_range.is_some() {
4850 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4851 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4852 return test_range;
4853 }
4854 }
4855 })
4856 }
4857
4858 /// Returns selections for remote peers intersecting the given range.
4859 #[allow(clippy::type_complexity)]
4860 pub fn selections_in_range(
4861 &self,
4862 range: Range<Anchor>,
4863 include_local: bool,
4864 ) -> impl Iterator<
4865 Item = (
4866 ReplicaId,
4867 bool,
4868 CursorShape,
4869 impl Iterator<Item = &Selection<Anchor>> + '_,
4870 ),
4871 > + '_ {
4872 self.remote_selections
4873 .iter()
4874 .filter(move |(replica_id, set)| {
4875 (include_local || **replica_id != self.text.replica_id())
4876 && !set.selections.is_empty()
4877 })
4878 .map(move |(replica_id, set)| {
4879 let start_ix = match set.selections.binary_search_by(|probe| {
4880 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4881 }) {
4882 Ok(ix) | Err(ix) => ix,
4883 };
4884 let end_ix = match set.selections.binary_search_by(|probe| {
4885 probe.start.cmp(&range.end, self).then(Ordering::Less)
4886 }) {
4887 Ok(ix) | Err(ix) => ix,
4888 };
4889
4890 (
4891 *replica_id,
4892 set.line_mode,
4893 set.cursor_shape,
4894 set.selections[start_ix..end_ix].iter(),
4895 )
4896 })
4897 }
4898
4899 /// Returns if the buffer contains any diagnostics.
4900 pub fn has_diagnostics(&self) -> bool {
4901 !self.diagnostics.is_empty()
4902 }
4903
4904 /// Returns all the diagnostics intersecting the given range.
4905 pub fn diagnostics_in_range<'a, T, O>(
4906 &'a self,
4907 search_range: Range<T>,
4908 reversed: bool,
4909 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4910 where
4911 T: 'a + Clone + ToOffset,
4912 O: 'a + FromAnchor,
4913 {
4914 let mut iterators: Vec<_> = self
4915 .diagnostics
4916 .iter()
4917 .map(|(_, collection)| {
4918 collection
4919 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4920 .peekable()
4921 })
4922 .collect();
4923
4924 std::iter::from_fn(move || {
4925 let (next_ix, _) = iterators
4926 .iter_mut()
4927 .enumerate()
4928 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4929 .min_by(|(_, a), (_, b)| {
4930 let cmp = a
4931 .range
4932 .start
4933 .cmp(&b.range.start, self)
4934 // when range is equal, sort by diagnostic severity
4935 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4936 // and stabilize order with group_id
4937 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4938 if reversed { cmp.reverse() } else { cmp }
4939 })?;
4940 iterators[next_ix]
4941 .next()
4942 .map(
4943 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4944 diagnostic,
4945 range: FromAnchor::from_anchor(&range.start, self)
4946 ..FromAnchor::from_anchor(&range.end, self),
4947 },
4948 )
4949 })
4950 }
4951
4952 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4953 /// should be used instead.
4954 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4955 &self.diagnostics
4956 }
4957
4958 /// Returns all the diagnostic groups associated with the given
4959 /// language server ID. If no language server ID is provided,
4960 /// all diagnostics groups are returned.
4961 pub fn diagnostic_groups(
4962 &self,
4963 language_server_id: Option<LanguageServerId>,
4964 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4965 let mut groups = Vec::new();
4966
4967 if let Some(language_server_id) = language_server_id {
4968 if let Ok(ix) = self
4969 .diagnostics
4970 .binary_search_by_key(&language_server_id, |e| e.0)
4971 {
4972 self.diagnostics[ix]
4973 .1
4974 .groups(language_server_id, &mut groups, self);
4975 }
4976 } else {
4977 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4978 diagnostics.groups(*language_server_id, &mut groups, self);
4979 }
4980 }
4981
4982 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4983 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4984 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4985 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4986 });
4987
4988 groups
4989 }
4990
4991 /// Returns an iterator over the diagnostics for the given group.
4992 pub fn diagnostic_group<O>(
4993 &self,
4994 group_id: usize,
4995 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4996 where
4997 O: FromAnchor + 'static,
4998 {
4999 self.diagnostics
5000 .iter()
5001 .flat_map(move |(_, set)| set.group(group_id, self))
5002 }
5003
5004 /// An integer version number that accounts for all updates besides
5005 /// the buffer's text itself (which is versioned via a version vector).
5006 pub fn non_text_state_update_count(&self) -> usize {
5007 self.non_text_state_update_count
5008 }
5009
5010 /// An integer version that changes when the buffer's syntax changes.
5011 pub fn syntax_update_count(&self) -> usize {
5012 self.syntax.update_count()
5013 }
5014
5015 /// Returns a snapshot of underlying file.
5016 pub fn file(&self) -> Option<&Arc<dyn File>> {
5017 self.file.as_ref()
5018 }
5019
5020 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5021 if let Some(file) = self.file() {
5022 if file.path().file_name().is_none() || include_root {
5023 Some(file.full_path(cx).to_string_lossy().into_owned())
5024 } else {
5025 Some(file.path().display(file.path_style(cx)).to_string())
5026 }
5027 } else {
5028 None
5029 }
5030 }
5031
5032 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5033 let query_str = query.fuzzy_contents;
5034 if query_str.is_some_and(|query| query.is_empty()) {
5035 return BTreeMap::default();
5036 }
5037
5038 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5039 language,
5040 override_id: None,
5041 }));
5042
5043 let mut query_ix = 0;
5044 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5045 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5046
5047 let mut words = BTreeMap::default();
5048 let mut current_word_start_ix = None;
5049 let mut chunk_ix = query.range.start;
5050 for chunk in self.chunks(query.range, false) {
5051 for (i, c) in chunk.text.char_indices() {
5052 let ix = chunk_ix + i;
5053 if classifier.is_word(c) {
5054 if current_word_start_ix.is_none() {
5055 current_word_start_ix = Some(ix);
5056 }
5057
5058 if let Some(query_chars) = &query_chars
5059 && query_ix < query_len
5060 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5061 {
5062 query_ix += 1;
5063 }
5064 continue;
5065 } else if let Some(word_start) = current_word_start_ix.take()
5066 && query_ix == query_len
5067 {
5068 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5069 let mut word_text = self.text_for_range(word_start..ix).peekable();
5070 let first_char = word_text
5071 .peek()
5072 .and_then(|first_chunk| first_chunk.chars().next());
5073 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5074 if !query.skip_digits
5075 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5076 {
5077 words.insert(word_text.collect(), word_range);
5078 }
5079 }
5080 query_ix = 0;
5081 }
5082 chunk_ix += chunk.text.len();
5083 }
5084
5085 words
5086 }
5087}
5088
5089pub struct WordsQuery<'a> {
5090 /// Only returns words with all chars from the fuzzy string in them.
5091 pub fuzzy_contents: Option<&'a str>,
5092 /// Skips words that start with a digit.
5093 pub skip_digits: bool,
5094 /// Buffer offset range, to look for words.
5095 pub range: Range<usize>,
5096}
5097
5098fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5099 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5100}
5101
5102fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5103 let mut result = IndentSize::spaces(0);
5104 for c in text {
5105 let kind = match c {
5106 ' ' => IndentKind::Space,
5107 '\t' => IndentKind::Tab,
5108 _ => break,
5109 };
5110 if result.len == 0 {
5111 result.kind = kind;
5112 }
5113 result.len += 1;
5114 }
5115 result
5116}
5117
5118impl Clone for BufferSnapshot {
5119 fn clone(&self) -> Self {
5120 Self {
5121 text: self.text.clone(),
5122 syntax: self.syntax.clone(),
5123 file: self.file.clone(),
5124 remote_selections: self.remote_selections.clone(),
5125 diagnostics: self.diagnostics.clone(),
5126 language: self.language.clone(),
5127 tree_sitter_data: self.tree_sitter_data.clone(),
5128 non_text_state_update_count: self.non_text_state_update_count,
5129 }
5130 }
5131}
5132
5133impl Deref for BufferSnapshot {
5134 type Target = text::BufferSnapshot;
5135
5136 fn deref(&self) -> &Self::Target {
5137 &self.text
5138 }
5139}
5140
5141unsafe impl Send for BufferChunks<'_> {}
5142
5143impl<'a> BufferChunks<'a> {
5144 pub(crate) fn new(
5145 text: &'a Rope,
5146 range: Range<usize>,
5147 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5148 diagnostics: bool,
5149 buffer_snapshot: Option<&'a BufferSnapshot>,
5150 ) -> Self {
5151 let mut highlights = None;
5152 if let Some((captures, highlight_maps)) = syntax {
5153 highlights = Some(BufferChunkHighlights {
5154 captures,
5155 next_capture: None,
5156 stack: Default::default(),
5157 highlight_maps,
5158 })
5159 }
5160
5161 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5162 let chunks = text.chunks_in_range(range.clone());
5163
5164 let mut this = BufferChunks {
5165 range,
5166 buffer_snapshot,
5167 chunks,
5168 diagnostic_endpoints,
5169 error_depth: 0,
5170 warning_depth: 0,
5171 information_depth: 0,
5172 hint_depth: 0,
5173 unnecessary_depth: 0,
5174 underline: true,
5175 highlights,
5176 };
5177 this.initialize_diagnostic_endpoints();
5178 this
5179 }
5180
5181 /// Seeks to the given byte offset in the buffer.
5182 pub fn seek(&mut self, range: Range<usize>) {
5183 let old_range = std::mem::replace(&mut self.range, range.clone());
5184 self.chunks.set_range(self.range.clone());
5185 if let Some(highlights) = self.highlights.as_mut() {
5186 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5187 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5188 highlights
5189 .stack
5190 .retain(|(end_offset, _)| *end_offset > range.start);
5191 if let Some(capture) = &highlights.next_capture
5192 && range.start >= capture.node.start_byte()
5193 {
5194 let next_capture_end = capture.node.end_byte();
5195 if range.start < next_capture_end {
5196 highlights.stack.push((
5197 next_capture_end,
5198 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5199 ));
5200 }
5201 highlights.next_capture.take();
5202 }
5203 } else if let Some(snapshot) = self.buffer_snapshot {
5204 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5205 *highlights = BufferChunkHighlights {
5206 captures,
5207 next_capture: None,
5208 stack: Default::default(),
5209 highlight_maps,
5210 };
5211 } else {
5212 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5213 // Seeking such BufferChunks is not supported.
5214 debug_assert!(
5215 false,
5216 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5217 );
5218 }
5219
5220 highlights.captures.set_byte_range(self.range.clone());
5221 self.initialize_diagnostic_endpoints();
5222 }
5223 }
5224
5225 fn initialize_diagnostic_endpoints(&mut self) {
5226 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5227 && let Some(buffer) = self.buffer_snapshot
5228 {
5229 let mut diagnostic_endpoints = Vec::new();
5230 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5231 diagnostic_endpoints.push(DiagnosticEndpoint {
5232 offset: entry.range.start,
5233 is_start: true,
5234 severity: entry.diagnostic.severity,
5235 is_unnecessary: entry.diagnostic.is_unnecessary,
5236 underline: entry.diagnostic.underline,
5237 });
5238 diagnostic_endpoints.push(DiagnosticEndpoint {
5239 offset: entry.range.end,
5240 is_start: false,
5241 severity: entry.diagnostic.severity,
5242 is_unnecessary: entry.diagnostic.is_unnecessary,
5243 underline: entry.diagnostic.underline,
5244 });
5245 }
5246 diagnostic_endpoints
5247 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5248 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5249 self.hint_depth = 0;
5250 self.error_depth = 0;
5251 self.warning_depth = 0;
5252 self.information_depth = 0;
5253 }
5254 }
5255
5256 /// The current byte offset in the buffer.
5257 pub fn offset(&self) -> usize {
5258 self.range.start
5259 }
5260
5261 pub fn range(&self) -> Range<usize> {
5262 self.range.clone()
5263 }
5264
5265 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5266 let depth = match endpoint.severity {
5267 DiagnosticSeverity::ERROR => &mut self.error_depth,
5268 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5269 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5270 DiagnosticSeverity::HINT => &mut self.hint_depth,
5271 _ => return,
5272 };
5273 if endpoint.is_start {
5274 *depth += 1;
5275 } else {
5276 *depth -= 1;
5277 }
5278
5279 if endpoint.is_unnecessary {
5280 if endpoint.is_start {
5281 self.unnecessary_depth += 1;
5282 } else {
5283 self.unnecessary_depth -= 1;
5284 }
5285 }
5286 }
5287
5288 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5289 if self.error_depth > 0 {
5290 Some(DiagnosticSeverity::ERROR)
5291 } else if self.warning_depth > 0 {
5292 Some(DiagnosticSeverity::WARNING)
5293 } else if self.information_depth > 0 {
5294 Some(DiagnosticSeverity::INFORMATION)
5295 } else if self.hint_depth > 0 {
5296 Some(DiagnosticSeverity::HINT)
5297 } else {
5298 None
5299 }
5300 }
5301
5302 fn current_code_is_unnecessary(&self) -> bool {
5303 self.unnecessary_depth > 0
5304 }
5305}
5306
5307impl<'a> Iterator for BufferChunks<'a> {
5308 type Item = Chunk<'a>;
5309
5310 fn next(&mut self) -> Option<Self::Item> {
5311 let mut next_capture_start = usize::MAX;
5312 let mut next_diagnostic_endpoint = usize::MAX;
5313
5314 if let Some(highlights) = self.highlights.as_mut() {
5315 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5316 if *parent_capture_end <= self.range.start {
5317 highlights.stack.pop();
5318 } else {
5319 break;
5320 }
5321 }
5322
5323 if highlights.next_capture.is_none() {
5324 highlights.next_capture = highlights.captures.next();
5325 }
5326
5327 while let Some(capture) = highlights.next_capture.as_ref() {
5328 if self.range.start < capture.node.start_byte() {
5329 next_capture_start = capture.node.start_byte();
5330 break;
5331 } else {
5332 let highlight_id =
5333 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5334 highlights
5335 .stack
5336 .push((capture.node.end_byte(), highlight_id));
5337 highlights.next_capture = highlights.captures.next();
5338 }
5339 }
5340 }
5341
5342 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5343 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5344 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5345 if endpoint.offset <= self.range.start {
5346 self.update_diagnostic_depths(endpoint);
5347 diagnostic_endpoints.next();
5348 self.underline = endpoint.underline;
5349 } else {
5350 next_diagnostic_endpoint = endpoint.offset;
5351 break;
5352 }
5353 }
5354 }
5355 self.diagnostic_endpoints = diagnostic_endpoints;
5356
5357 if let Some(ChunkBitmaps {
5358 text: chunk,
5359 chars: chars_map,
5360 tabs,
5361 }) = self.chunks.peek_with_bitmaps()
5362 {
5363 let chunk_start = self.range.start;
5364 let mut chunk_end = (self.chunks.offset() + chunk.len())
5365 .min(next_capture_start)
5366 .min(next_diagnostic_endpoint);
5367 let mut highlight_id = None;
5368 if let Some(highlights) = self.highlights.as_ref()
5369 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5370 {
5371 chunk_end = chunk_end.min(*parent_capture_end);
5372 highlight_id = Some(*parent_highlight_id);
5373 }
5374 let bit_start = chunk_start - self.chunks.offset();
5375 let bit_end = chunk_end - self.chunks.offset();
5376
5377 let slice = &chunk[bit_start..bit_end];
5378
5379 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5380 let tabs = (tabs >> bit_start) & mask;
5381 let chars = (chars_map >> bit_start) & mask;
5382
5383 self.range.start = chunk_end;
5384 if self.range.start == self.chunks.offset() + chunk.len() {
5385 self.chunks.next().unwrap();
5386 }
5387
5388 Some(Chunk {
5389 text: slice,
5390 syntax_highlight_id: highlight_id,
5391 underline: self.underline,
5392 diagnostic_severity: self.current_diagnostic_severity(),
5393 is_unnecessary: self.current_code_is_unnecessary(),
5394 tabs,
5395 chars,
5396 ..Chunk::default()
5397 })
5398 } else {
5399 None
5400 }
5401 }
5402}
5403
5404impl operation_queue::Operation for Operation {
5405 fn lamport_timestamp(&self) -> clock::Lamport {
5406 match self {
5407 Operation::Buffer(_) => {
5408 unreachable!("buffer operations should never be deferred at this layer")
5409 }
5410 Operation::UpdateDiagnostics {
5411 lamport_timestamp, ..
5412 }
5413 | Operation::UpdateSelections {
5414 lamport_timestamp, ..
5415 }
5416 | Operation::UpdateCompletionTriggers {
5417 lamport_timestamp, ..
5418 }
5419 | Operation::UpdateLineEnding {
5420 lamport_timestamp, ..
5421 } => *lamport_timestamp,
5422 }
5423 }
5424}
5425
5426impl Default for Diagnostic {
5427 fn default() -> Self {
5428 Self {
5429 source: Default::default(),
5430 source_kind: DiagnosticSourceKind::Other,
5431 code: None,
5432 code_description: None,
5433 severity: DiagnosticSeverity::ERROR,
5434 message: Default::default(),
5435 markdown: None,
5436 group_id: 0,
5437 is_primary: false,
5438 is_disk_based: false,
5439 is_unnecessary: false,
5440 underline: true,
5441 data: None,
5442 registration_id: None,
5443 }
5444 }
5445}
5446
5447impl IndentSize {
5448 /// Returns an [`IndentSize`] representing the given spaces.
5449 pub fn spaces(len: u32) -> Self {
5450 Self {
5451 len,
5452 kind: IndentKind::Space,
5453 }
5454 }
5455
5456 /// Returns an [`IndentSize`] representing a tab.
5457 pub fn tab() -> Self {
5458 Self {
5459 len: 1,
5460 kind: IndentKind::Tab,
5461 }
5462 }
5463
5464 /// An iterator over the characters represented by this [`IndentSize`].
5465 pub fn chars(&self) -> impl Iterator<Item = char> {
5466 iter::repeat(self.char()).take(self.len as usize)
5467 }
5468
5469 /// The character representation of this [`IndentSize`].
5470 pub fn char(&self) -> char {
5471 match self.kind {
5472 IndentKind::Space => ' ',
5473 IndentKind::Tab => '\t',
5474 }
5475 }
5476
5477 /// Consumes the current [`IndentSize`] and returns a new one that has
5478 /// been shrunk or enlarged by the given size along the given direction.
5479 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5480 match direction {
5481 Ordering::Less => {
5482 if self.kind == size.kind && self.len >= size.len {
5483 self.len -= size.len;
5484 }
5485 }
5486 Ordering::Equal => {}
5487 Ordering::Greater => {
5488 if self.len == 0 {
5489 self = size;
5490 } else if self.kind == size.kind {
5491 self.len += size.len;
5492 }
5493 }
5494 }
5495 self
5496 }
5497
5498 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5499 match self.kind {
5500 IndentKind::Space => self.len as usize,
5501 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5502 }
5503 }
5504}
5505
5506#[cfg(any(test, feature = "test-support"))]
5507pub struct TestFile {
5508 pub path: Arc<RelPath>,
5509 pub root_name: String,
5510 pub local_root: Option<PathBuf>,
5511}
5512
5513#[cfg(any(test, feature = "test-support"))]
5514impl File for TestFile {
5515 fn path(&self) -> &Arc<RelPath> {
5516 &self.path
5517 }
5518
5519 fn full_path(&self, _: &gpui::App) -> PathBuf {
5520 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5521 }
5522
5523 fn as_local(&self) -> Option<&dyn LocalFile> {
5524 if self.local_root.is_some() {
5525 Some(self)
5526 } else {
5527 None
5528 }
5529 }
5530
5531 fn disk_state(&self) -> DiskState {
5532 unimplemented!()
5533 }
5534
5535 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5536 self.path().file_name().unwrap_or(self.root_name.as_ref())
5537 }
5538
5539 fn worktree_id(&self, _: &App) -> WorktreeId {
5540 WorktreeId::from_usize(0)
5541 }
5542
5543 fn to_proto(&self, _: &App) -> rpc::proto::File {
5544 unimplemented!()
5545 }
5546
5547 fn is_private(&self) -> bool {
5548 false
5549 }
5550
5551 fn path_style(&self, _cx: &App) -> PathStyle {
5552 PathStyle::local()
5553 }
5554}
5555
5556#[cfg(any(test, feature = "test-support"))]
5557impl LocalFile for TestFile {
5558 fn abs_path(&self, _cx: &App) -> PathBuf {
5559 PathBuf::from(self.local_root.as_ref().unwrap())
5560 .join(&self.root_name)
5561 .join(self.path.as_std_path())
5562 }
5563
5564 fn load(&self, _cx: &App) -> Task<Result<String>> {
5565 unimplemented!()
5566 }
5567
5568 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5569 unimplemented!()
5570 }
5571}
5572
5573pub(crate) fn contiguous_ranges(
5574 values: impl Iterator<Item = u32>,
5575 max_len: usize,
5576) -> impl Iterator<Item = Range<u32>> {
5577 let mut values = values;
5578 let mut current_range: Option<Range<u32>> = None;
5579 std::iter::from_fn(move || {
5580 loop {
5581 if let Some(value) = values.next() {
5582 if let Some(range) = &mut current_range
5583 && value == range.end
5584 && range.len() < max_len
5585 {
5586 range.end += 1;
5587 continue;
5588 }
5589
5590 let prev_range = current_range.clone();
5591 current_range = Some(value..(value + 1));
5592 if prev_range.is_some() {
5593 return prev_range;
5594 }
5595 } else {
5596 return current_range.take();
5597 }
5598 }
5599 })
5600}
5601
5602#[derive(Default, Debug)]
5603pub struct CharClassifier {
5604 scope: Option<LanguageScope>,
5605 scope_context: Option<CharScopeContext>,
5606 ignore_punctuation: bool,
5607}
5608
5609impl CharClassifier {
5610 pub fn new(scope: Option<LanguageScope>) -> Self {
5611 Self {
5612 scope,
5613 scope_context: None,
5614 ignore_punctuation: false,
5615 }
5616 }
5617
5618 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5619 Self {
5620 scope_context,
5621 ..self
5622 }
5623 }
5624
5625 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5626 Self {
5627 ignore_punctuation,
5628 ..self
5629 }
5630 }
5631
5632 pub fn is_whitespace(&self, c: char) -> bool {
5633 self.kind(c) == CharKind::Whitespace
5634 }
5635
5636 pub fn is_word(&self, c: char) -> bool {
5637 self.kind(c) == CharKind::Word
5638 }
5639
5640 pub fn is_punctuation(&self, c: char) -> bool {
5641 self.kind(c) == CharKind::Punctuation
5642 }
5643
5644 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5645 if c.is_alphanumeric() || c == '_' {
5646 return CharKind::Word;
5647 }
5648
5649 if let Some(scope) = &self.scope {
5650 let characters = match self.scope_context {
5651 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5652 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5653 None => scope.word_characters(),
5654 };
5655 if let Some(characters) = characters
5656 && characters.contains(&c)
5657 {
5658 return CharKind::Word;
5659 }
5660 }
5661
5662 if c.is_whitespace() {
5663 return CharKind::Whitespace;
5664 }
5665
5666 if ignore_punctuation {
5667 CharKind::Word
5668 } else {
5669 CharKind::Punctuation
5670 }
5671 }
5672
5673 pub fn kind(&self, c: char) -> CharKind {
5674 self.kind_with(c, self.ignore_punctuation)
5675 }
5676}
5677
5678/// Find all of the ranges of whitespace that occur at the ends of lines
5679/// in the given rope.
5680///
5681/// This could also be done with a regex search, but this implementation
5682/// avoids copying text.
5683pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5684 let mut ranges = Vec::new();
5685
5686 let mut offset = 0;
5687 let mut prev_chunk_trailing_whitespace_range = 0..0;
5688 for chunk in rope.chunks() {
5689 let mut prev_line_trailing_whitespace_range = 0..0;
5690 for (i, line) in chunk.split('\n').enumerate() {
5691 let line_end_offset = offset + line.len();
5692 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5693 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5694
5695 if i == 0 && trimmed_line_len == 0 {
5696 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5697 }
5698 if !prev_line_trailing_whitespace_range.is_empty() {
5699 ranges.push(prev_line_trailing_whitespace_range);
5700 }
5701
5702 offset = line_end_offset + 1;
5703 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5704 }
5705
5706 offset -= 1;
5707 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5708 }
5709
5710 if !prev_chunk_trailing_whitespace_range.is_empty() {
5711 ranges.push(prev_chunk_trailing_whitespace_range);
5712 }
5713
5714 ranges
5715}