1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430}
431
432impl DiskState {
433 /// Returns the file's last known modification time on disk.
434 pub fn mtime(self) -> Option<MTime> {
435 match self {
436 DiskState::New => None,
437 DiskState::Present { mtime } => Some(mtime),
438 DiskState::Deleted => None,
439 }
440 }
441
442 pub fn exists(&self) -> bool {
443 match self {
444 DiskState::New => false,
445 DiskState::Present { .. } => true,
446 DiskState::Deleted => false,
447 }
448 }
449}
450
451/// The file associated with a buffer, in the case where the file is on the local disk.
452pub trait LocalFile: File {
453 /// Returns the absolute path of this file
454 fn abs_path(&self, cx: &App) -> PathBuf;
455
456 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
457 fn load(&self, cx: &App) -> Task<Result<String>>;
458
459 /// Loads the file's contents from disk.
460 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
461}
462
463/// The auto-indent behavior associated with an editing operation.
464/// For some editing operations, each affected line of text has its
465/// indentation recomputed. For other operations, the entire block
466/// of edited text is adjusted uniformly.
467#[derive(Clone, Debug)]
468pub enum AutoindentMode {
469 /// Indent each line of inserted text.
470 EachLine,
471 /// Apply the same indentation adjustment to all of the lines
472 /// in a given insertion.
473 Block {
474 /// The original indentation column of the first line of each
475 /// insertion, if it has been copied.
476 ///
477 /// Knowing this makes it possible to preserve the relative indentation
478 /// of every line in the insertion from when it was copied.
479 ///
480 /// If the original indent column is `a`, and the first line of insertion
481 /// is then auto-indented to column `b`, then every other line of
482 /// the insertion will be auto-indented to column `b - a`
483 original_indent_columns: Vec<Option<u32>>,
484 },
485}
486
487#[derive(Clone)]
488struct AutoindentRequest {
489 before_edit: BufferSnapshot,
490 entries: Vec<AutoindentRequestEntry>,
491 is_block_mode: bool,
492 ignore_empty_lines: bool,
493}
494
495#[derive(Debug, Clone)]
496struct AutoindentRequestEntry {
497 /// A range of the buffer whose indentation should be adjusted.
498 range: Range<Anchor>,
499 /// Whether or not these lines should be considered brand new, for the
500 /// purpose of auto-indent. When text is not new, its indentation will
501 /// only be adjusted if the suggested indentation level has *changed*
502 /// since the edit was made.
503 first_line_is_new: bool,
504 indent_size: IndentSize,
505 original_indent_column: Option<u32>,
506}
507
508#[derive(Debug)]
509struct IndentSuggestion {
510 basis_row: u32,
511 delta: Ordering,
512 within_error: bool,
513}
514
515struct BufferChunkHighlights<'a> {
516 captures: SyntaxMapCaptures<'a>,
517 next_capture: Option<SyntaxMapCapture<'a>>,
518 stack: Vec<(usize, HighlightId)>,
519 highlight_maps: Vec<HighlightMap>,
520}
521
522/// An iterator that yields chunks of a buffer's text, along with their
523/// syntax highlights and diagnostic status.
524pub struct BufferChunks<'a> {
525 buffer_snapshot: Option<&'a BufferSnapshot>,
526 range: Range<usize>,
527 chunks: text::Chunks<'a>,
528 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
529 error_depth: usize,
530 warning_depth: usize,
531 information_depth: usize,
532 hint_depth: usize,
533 unnecessary_depth: usize,
534 underline: bool,
535 highlights: Option<BufferChunkHighlights<'a>>,
536}
537
538/// A chunk of a buffer's text, along with its syntax highlight and
539/// diagnostic status.
540#[derive(Clone, Debug, Default)]
541pub struct Chunk<'a> {
542 /// The text of the chunk.
543 pub text: &'a str,
544 /// The syntax highlighting style of the chunk.
545 pub syntax_highlight_id: Option<HighlightId>,
546 /// The highlight style that has been applied to this chunk in
547 /// the editor.
548 pub highlight_style: Option<HighlightStyle>,
549 /// The severity of diagnostic associated with this chunk, if any.
550 pub diagnostic_severity: Option<DiagnosticSeverity>,
551 /// A bitset of which characters are tabs in this string.
552 pub tabs: u128,
553 /// Bitmap of character indices in this chunk
554 pub chars: u128,
555 /// Whether this chunk of text is marked as unnecessary.
556 pub is_unnecessary: bool,
557 /// Whether this chunk of text was originally a tab character.
558 pub is_tab: bool,
559 /// Whether this chunk of text was originally an inlay.
560 pub is_inlay: bool,
561 /// Whether to underline the corresponding text range in the editor.
562 pub underline: bool,
563}
564
565/// A set of edits to a given version of a buffer, computed asynchronously.
566#[derive(Debug)]
567pub struct Diff {
568 pub base_version: clock::Global,
569 pub line_ending: LineEnding,
570 pub edits: Vec<(Range<usize>, Arc<str>)>,
571}
572
573#[derive(Debug, Clone, Copy)]
574pub(crate) struct DiagnosticEndpoint {
575 offset: usize,
576 is_start: bool,
577 underline: bool,
578 severity: DiagnosticSeverity,
579 is_unnecessary: bool,
580}
581
582/// A class of characters, used for characterizing a run of text.
583#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
584pub enum CharKind {
585 /// Whitespace.
586 Whitespace,
587 /// Punctuation.
588 Punctuation,
589 /// Word.
590 Word,
591}
592
593/// Context for character classification within a specific scope.
594#[derive(Copy, Clone, Eq, PartialEq, Debug)]
595pub enum CharScopeContext {
596 /// Character classification for completion queries.
597 ///
598 /// This context treats certain characters as word constituents that would
599 /// normally be considered punctuation, such as '-' in Tailwind classes
600 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
601 Completion,
602 /// Character classification for linked edits.
603 ///
604 /// This context handles characters that should be treated as part of
605 /// identifiers during linked editing operations, such as '.' in JSX
606 /// component names like `<Animated.View>`.
607 LinkedEdit,
608}
609
610/// A runnable is a set of data about a region that could be resolved into a task
611pub struct Runnable {
612 pub tags: SmallVec<[RunnableTag; 1]>,
613 pub language: Arc<Language>,
614 pub buffer: BufferId,
615}
616
617#[derive(Default, Clone, Debug)]
618pub struct HighlightedText {
619 pub text: SharedString,
620 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
621}
622
623#[derive(Default, Debug)]
624struct HighlightedTextBuilder {
625 pub text: String,
626 highlights: Vec<(Range<usize>, HighlightStyle)>,
627}
628
629impl HighlightedText {
630 pub fn from_buffer_range<T: ToOffset>(
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) -> Self {
637 let mut highlighted_text = HighlightedTextBuilder::default();
638 highlighted_text.add_text_from_buffer_range(
639 range,
640 snapshot,
641 syntax_snapshot,
642 override_style,
643 syntax_theme,
644 );
645 highlighted_text.build()
646 }
647
648 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
649 gpui::StyledText::new(self.text.clone())
650 .with_default_highlights(default_style, self.highlights.iter().cloned())
651 }
652
653 /// Returns the first line without leading whitespace unless highlighted
654 /// and a boolean indicating if there are more lines after
655 pub fn first_line_preview(self) -> (Self, bool) {
656 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
657 let first_line = &self.text[..newline_ix];
658
659 // Trim leading whitespace, unless an edit starts prior to it.
660 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
661 if let Some((first_highlight_range, _)) = self.highlights.first() {
662 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
663 }
664
665 let preview_text = &first_line[preview_start_ix..];
666 let preview_highlights = self
667 .highlights
668 .into_iter()
669 .skip_while(|(range, _)| range.end <= preview_start_ix)
670 .take_while(|(range, _)| range.start < newline_ix)
671 .filter_map(|(mut range, highlight)| {
672 range.start = range.start.saturating_sub(preview_start_ix);
673 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
674 if range.is_empty() {
675 None
676 } else {
677 Some((range, highlight))
678 }
679 });
680
681 let preview = Self {
682 text: SharedString::new(preview_text),
683 highlights: preview_highlights.collect(),
684 };
685
686 (preview, self.text.len() > newline_ix)
687 }
688}
689
690impl HighlightedTextBuilder {
691 pub fn build(self) -> HighlightedText {
692 HighlightedText {
693 text: self.text.into(),
694 highlights: self.highlights,
695 }
696 }
697
698 pub fn add_text_from_buffer_range<T: ToOffset>(
699 &mut self,
700 range: Range<T>,
701 snapshot: &text::BufferSnapshot,
702 syntax_snapshot: &SyntaxSnapshot,
703 override_style: Option<HighlightStyle>,
704 syntax_theme: &SyntaxTheme,
705 ) {
706 let range = range.to_offset(snapshot);
707 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
708 let start = self.text.len();
709 self.text.push_str(chunk.text);
710 let end = self.text.len();
711
712 if let Some(highlight_style) = chunk
713 .syntax_highlight_id
714 .and_then(|id| id.style(syntax_theme))
715 {
716 let highlight_style = override_style.map_or(highlight_style, |override_style| {
717 highlight_style.highlight(override_style)
718 });
719 self.highlights.push((start..end, highlight_style));
720 } else if let Some(override_style) = override_style {
721 self.highlights.push((start..end, override_style));
722 }
723 }
724 }
725
726 fn highlighted_chunks<'a>(
727 range: Range<usize>,
728 snapshot: &'a text::BufferSnapshot,
729 syntax_snapshot: &'a SyntaxSnapshot,
730 ) -> BufferChunks<'a> {
731 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
732 grammar
733 .highlights_config
734 .as_ref()
735 .map(|config| &config.query)
736 });
737
738 let highlight_maps = captures
739 .grammars()
740 .iter()
741 .map(|grammar| grammar.highlight_map())
742 .collect();
743
744 BufferChunks::new(
745 snapshot.as_rope(),
746 range,
747 Some((captures, highlight_maps)),
748 false,
749 None,
750 )
751 }
752}
753
754#[derive(Clone)]
755pub struct EditPreview {
756 old_snapshot: text::BufferSnapshot,
757 applied_edits_snapshot: text::BufferSnapshot,
758 syntax_snapshot: SyntaxSnapshot,
759}
760
761impl EditPreview {
762 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
763 let (first, _) = edits.first()?;
764 let (last, _) = edits.last()?;
765
766 let start = first.start.to_point(&self.old_snapshot);
767 let old_end = last.end.to_point(&self.old_snapshot);
768 let new_end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 let start = Point::new(start.row.saturating_sub(3), 0);
774 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
775 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
776
777 Some(unified_diff(
778 &self
779 .old_snapshot
780 .text_for_range(start..old_end)
781 .collect::<String>(),
782 &self
783 .applied_edits_snapshot
784 .text_for_range(start..new_end)
785 .collect::<String>(),
786 ))
787 }
788
789 pub fn highlight_edits(
790 &self,
791 current_snapshot: &BufferSnapshot,
792 edits: &[(Range<Anchor>, impl AsRef<str>)],
793 include_deletions: bool,
794 cx: &App,
795 ) -> HighlightedText {
796 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
797 return HighlightedText::default();
798 };
799
800 let mut highlighted_text = HighlightedTextBuilder::default();
801
802 let visible_range_in_preview_snapshot =
803 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
804 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
805
806 let insertion_highlight_style = HighlightStyle {
807 background_color: Some(cx.theme().status().created_background),
808 ..Default::default()
809 };
810 let deletion_highlight_style = HighlightStyle {
811 background_color: Some(cx.theme().status().deleted_background),
812 ..Default::default()
813 };
814 let syntax_theme = cx.theme().syntax();
815
816 for (range, edit_text) in edits {
817 let edit_new_end_in_preview_snapshot = range
818 .end
819 .bias_right(&self.old_snapshot)
820 .to_offset(&self.applied_edits_snapshot);
821 let edit_start_in_preview_snapshot =
822 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
823
824 let unchanged_range_in_preview_snapshot =
825 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
826 if !unchanged_range_in_preview_snapshot.is_empty() {
827 highlighted_text.add_text_from_buffer_range(
828 unchanged_range_in_preview_snapshot,
829 &self.applied_edits_snapshot,
830 &self.syntax_snapshot,
831 None,
832 syntax_theme,
833 );
834 }
835
836 let range_in_current_snapshot = range.to_offset(current_snapshot);
837 if include_deletions && !range_in_current_snapshot.is_empty() {
838 highlighted_text.add_text_from_buffer_range(
839 range_in_current_snapshot,
840 ¤t_snapshot.text,
841 ¤t_snapshot.syntax,
842 Some(deletion_highlight_style),
843 syntax_theme,
844 );
845 }
846
847 if !edit_text.as_ref().is_empty() {
848 highlighted_text.add_text_from_buffer_range(
849 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
850 &self.applied_edits_snapshot,
851 &self.syntax_snapshot,
852 Some(insertion_highlight_style),
853 syntax_theme,
854 );
855 }
856
857 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
858 }
859
860 highlighted_text.add_text_from_buffer_range(
861 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
862 &self.applied_edits_snapshot,
863 &self.syntax_snapshot,
864 None,
865 syntax_theme,
866 );
867
868 highlighted_text.build()
869 }
870
871 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
872 cx.new(|cx| {
873 let mut buffer = Buffer::local_normalized(
874 self.applied_edits_snapshot.as_rope().clone(),
875 self.applied_edits_snapshot.line_ending(),
876 cx,
877 );
878 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
879 buffer
880 })
881 }
882
883 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
884 let (first, _) = edits.first()?;
885 let (last, _) = edits.last()?;
886
887 let start = first
888 .start
889 .bias_left(&self.old_snapshot)
890 .to_point(&self.applied_edits_snapshot);
891 let end = last
892 .end
893 .bias_right(&self.old_snapshot)
894 .to_point(&self.applied_edits_snapshot);
895
896 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
897 let range = Point::new(start.row, 0)
898 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
899
900 Some(range)
901 }
902}
903
904#[derive(Clone, Debug, PartialEq, Eq)]
905pub struct BracketMatch<T> {
906 pub open_range: Range<T>,
907 pub close_range: Range<T>,
908 pub newline_only: bool,
909 pub syntax_layer_depth: usize,
910 pub color_index: Option<usize>,
911}
912
913impl<T> BracketMatch<T> {
914 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
915 (self.open_range, self.close_range)
916 }
917}
918
919impl Buffer {
920 /// Create a new buffer with the given base text.
921 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
922 Self::build(
923 TextBuffer::new(
924 ReplicaId::LOCAL,
925 cx.entity_id().as_non_zero_u64().into(),
926 base_text.into(),
927 ),
928 None,
929 Capability::ReadWrite,
930 )
931 }
932
933 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
934 pub fn local_normalized(
935 base_text_normalized: Rope,
936 line_ending: LineEnding,
937 cx: &Context<Self>,
938 ) -> Self {
939 Self::build(
940 TextBuffer::new_normalized(
941 ReplicaId::LOCAL,
942 cx.entity_id().as_non_zero_u64().into(),
943 line_ending,
944 base_text_normalized,
945 ),
946 None,
947 Capability::ReadWrite,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer.
952 pub fn remote(
953 remote_id: BufferId,
954 replica_id: ReplicaId,
955 capability: Capability,
956 base_text: impl Into<String>,
957 ) -> Self {
958 Self::build(
959 TextBuffer::new(replica_id, remote_id, base_text.into()),
960 None,
961 capability,
962 )
963 }
964
965 /// Create a new buffer that is a replica of a remote buffer, populating its
966 /// state from the given protobuf message.
967 pub fn from_proto(
968 replica_id: ReplicaId,
969 capability: Capability,
970 message: proto::BufferState,
971 file: Option<Arc<dyn File>>,
972 ) -> Result<Self> {
973 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
974 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
975 let mut this = Self::build(buffer, file, capability);
976 this.text.set_line_ending(proto::deserialize_line_ending(
977 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
978 ));
979 this.saved_version = proto::deserialize_version(&message.saved_version);
980 this.saved_mtime = message.saved_mtime.map(|time| time.into());
981 Ok(this)
982 }
983
984 /// Serialize the buffer's state to a protobuf message.
985 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
986 proto::BufferState {
987 id: self.remote_id().into(),
988 file: self.file.as_ref().map(|f| f.to_proto(cx)),
989 base_text: self.base_text().to_string(),
990 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
991 saved_version: proto::serialize_version(&self.saved_version),
992 saved_mtime: self.saved_mtime.map(|time| time.into()),
993 }
994 }
995
996 /// Serialize as protobufs all of the changes to the buffer since the given version.
997 pub fn serialize_ops(
998 &self,
999 since: Option<clock::Global>,
1000 cx: &App,
1001 ) -> Task<Vec<proto::Operation>> {
1002 let mut operations = Vec::new();
1003 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1004
1005 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1006 proto::serialize_operation(&Operation::UpdateSelections {
1007 selections: set.selections.clone(),
1008 lamport_timestamp: set.lamport_timestamp,
1009 line_mode: set.line_mode,
1010 cursor_shape: set.cursor_shape,
1011 })
1012 }));
1013
1014 for (server_id, diagnostics) in &self.diagnostics {
1015 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1016 lamport_timestamp: self.diagnostics_timestamp,
1017 server_id: *server_id,
1018 diagnostics: diagnostics.iter().cloned().collect(),
1019 }));
1020 }
1021
1022 for (server_id, completions) in &self.completion_triggers_per_language_server {
1023 operations.push(proto::serialize_operation(
1024 &Operation::UpdateCompletionTriggers {
1025 triggers: completions.iter().cloned().collect(),
1026 lamport_timestamp: self.completion_triggers_timestamp,
1027 server_id: *server_id,
1028 },
1029 ));
1030 }
1031
1032 let text_operations = self.text.operations().clone();
1033 cx.background_spawn(async move {
1034 let since = since.unwrap_or_default();
1035 operations.extend(
1036 text_operations
1037 .iter()
1038 .filter(|(_, op)| !since.observed(op.timestamp()))
1039 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1040 );
1041 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1042 operations
1043 })
1044 }
1045
1046 /// Assign a language to the buffer, returning the buffer.
1047 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1048 self.set_language_async(Some(language), cx);
1049 self
1050 }
1051
1052 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1053 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1054 self.set_language(Some(language), cx);
1055 self
1056 }
1057
1058 /// Returns the [`Capability`] of this buffer.
1059 pub fn capability(&self) -> Capability {
1060 self.capability
1061 }
1062
1063 /// Whether this buffer can only be read.
1064 pub fn read_only(&self) -> bool {
1065 self.capability == Capability::ReadOnly
1066 }
1067
1068 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1069 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1070 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1071 let snapshot = buffer.snapshot();
1072 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1073 let tree_sitter_data = TreeSitterData::new(snapshot);
1074 Self {
1075 saved_mtime,
1076 tree_sitter_data: Arc::new(tree_sitter_data),
1077 saved_version: buffer.version(),
1078 preview_version: buffer.version(),
1079 reload_task: None,
1080 transaction_depth: 0,
1081 was_dirty_before_starting_transaction: None,
1082 has_unsaved_edits: Cell::new((buffer.version(), false)),
1083 text: buffer,
1084 branch_state: None,
1085 file,
1086 capability,
1087 syntax_map,
1088 reparse: None,
1089 non_text_state_update_count: 0,
1090 sync_parse_timeout: Duration::from_millis(1),
1091 parse_status: watch::channel(ParseStatus::Idle),
1092 autoindent_requests: Default::default(),
1093 wait_for_autoindent_txs: Default::default(),
1094 pending_autoindent: Default::default(),
1095 language: None,
1096 remote_selections: Default::default(),
1097 diagnostics: Default::default(),
1098 diagnostics_timestamp: Lamport::MIN,
1099 completion_triggers: Default::default(),
1100 completion_triggers_per_language_server: Default::default(),
1101 completion_triggers_timestamp: Lamport::MIN,
1102 deferred_ops: OperationQueue::new(),
1103 has_conflict: false,
1104 change_bits: Default::default(),
1105 _subscriptions: Vec::new(),
1106 encoding: encoding_rs::UTF_8,
1107 has_bom: false,
1108 }
1109 }
1110
1111 pub fn build_snapshot(
1112 text: Rope,
1113 language: Option<Arc<Language>>,
1114 language_registry: Option<Arc<LanguageRegistry>>,
1115 cx: &mut App,
1116 ) -> impl Future<Output = BufferSnapshot> + use<> {
1117 let entity_id = cx.reserve_entity::<Self>().entity_id();
1118 let buffer_id = entity_id.as_non_zero_u64().into();
1119 async move {
1120 let text =
1121 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1122 .snapshot();
1123 let mut syntax = SyntaxMap::new(&text).snapshot();
1124 if let Some(language) = language.clone() {
1125 let language_registry = language_registry.clone();
1126 syntax.reparse(&text, language_registry, language);
1127 }
1128 let tree_sitter_data = TreeSitterData::new(text.clone());
1129 BufferSnapshot {
1130 text,
1131 syntax,
1132 file: None,
1133 diagnostics: Default::default(),
1134 remote_selections: Default::default(),
1135 tree_sitter_data: Arc::new(tree_sitter_data),
1136 language,
1137 non_text_state_update_count: 0,
1138 }
1139 }
1140 }
1141
1142 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1143 let entity_id = cx.reserve_entity::<Self>().entity_id();
1144 let buffer_id = entity_id.as_non_zero_u64().into();
1145 let text = TextBuffer::new_normalized(
1146 ReplicaId::LOCAL,
1147 buffer_id,
1148 Default::default(),
1149 Rope::new(),
1150 )
1151 .snapshot();
1152 let syntax = SyntaxMap::new(&text).snapshot();
1153 let tree_sitter_data = TreeSitterData::new(text.clone());
1154 BufferSnapshot {
1155 text,
1156 syntax,
1157 tree_sitter_data: Arc::new(tree_sitter_data),
1158 file: None,
1159 diagnostics: Default::default(),
1160 remote_selections: Default::default(),
1161 language: None,
1162 non_text_state_update_count: 0,
1163 }
1164 }
1165
1166 #[cfg(any(test, feature = "test-support"))]
1167 pub fn build_snapshot_sync(
1168 text: Rope,
1169 language: Option<Arc<Language>>,
1170 language_registry: Option<Arc<LanguageRegistry>>,
1171 cx: &mut App,
1172 ) -> BufferSnapshot {
1173 let entity_id = cx.reserve_entity::<Self>().entity_id();
1174 let buffer_id = entity_id.as_non_zero_u64().into();
1175 let text =
1176 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1177 .snapshot();
1178 let mut syntax = SyntaxMap::new(&text).snapshot();
1179 if let Some(language) = language.clone() {
1180 syntax.reparse(&text, language_registry, language);
1181 }
1182 let tree_sitter_data = TreeSitterData::new(text.clone());
1183 BufferSnapshot {
1184 text,
1185 syntax,
1186 tree_sitter_data: Arc::new(tree_sitter_data),
1187 file: None,
1188 diagnostics: Default::default(),
1189 remote_selections: Default::default(),
1190 language,
1191 non_text_state_update_count: 0,
1192 }
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's current state. This is computationally
1196 /// cheap, and allows reading from the buffer on a background thread.
1197 pub fn snapshot(&self) -> BufferSnapshot {
1198 let text = self.text.snapshot();
1199 let mut syntax_map = self.syntax_map.lock();
1200 syntax_map.interpolate(&text);
1201 let syntax = syntax_map.snapshot();
1202
1203 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1204 Arc::new(TreeSitterData::new(text.clone()))
1205 } else {
1206 self.tree_sitter_data.clone()
1207 };
1208
1209 BufferSnapshot {
1210 text,
1211 syntax,
1212 tree_sitter_data,
1213 file: self.file.clone(),
1214 remote_selections: self.remote_selections.clone(),
1215 diagnostics: self.diagnostics.clone(),
1216 language: self.language.clone(),
1217 non_text_state_update_count: self.non_text_state_update_count,
1218 }
1219 }
1220
1221 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1222 let this = cx.entity();
1223 cx.new(|cx| {
1224 let mut branch = Self {
1225 branch_state: Some(BufferBranchState {
1226 base_buffer: this.clone(),
1227 merged_operations: Default::default(),
1228 }),
1229 language: self.language.clone(),
1230 has_conflict: self.has_conflict,
1231 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1232 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1233 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1234 };
1235 if let Some(language_registry) = self.language_registry() {
1236 branch.set_language_registry(language_registry);
1237 }
1238
1239 // Reparse the branch buffer so that we get syntax highlighting immediately.
1240 branch.reparse(cx, true);
1241
1242 branch
1243 })
1244 }
1245
1246 pub fn preview_edits(
1247 &self,
1248 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1249 cx: &App,
1250 ) -> Task<EditPreview> {
1251 let registry = self.language_registry();
1252 let language = self.language().cloned();
1253 let old_snapshot = self.text.snapshot();
1254 let mut branch_buffer = self.text.branch();
1255 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1256 cx.background_spawn(async move {
1257 if !edits.is_empty() {
1258 if let Some(language) = language.clone() {
1259 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1260 }
1261
1262 branch_buffer.edit(edits.iter().cloned());
1263 let snapshot = branch_buffer.snapshot();
1264 syntax_snapshot.interpolate(&snapshot);
1265
1266 if let Some(language) = language {
1267 syntax_snapshot.reparse(&snapshot, registry, language);
1268 }
1269 }
1270 EditPreview {
1271 old_snapshot,
1272 applied_edits_snapshot: branch_buffer.snapshot(),
1273 syntax_snapshot,
1274 }
1275 })
1276 }
1277
1278 /// Applies all of the changes in this buffer that intersect any of the
1279 /// given `ranges` to its base buffer.
1280 ///
1281 /// If `ranges` is empty, then all changes will be applied. This buffer must
1282 /// be a branch buffer to call this method.
1283 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1284 let Some(base_buffer) = self.base_buffer() else {
1285 debug_panic!("not a branch buffer");
1286 return;
1287 };
1288
1289 let mut ranges = if ranges.is_empty() {
1290 &[0..usize::MAX]
1291 } else {
1292 ranges.as_slice()
1293 }
1294 .iter()
1295 .peekable();
1296
1297 let mut edits = Vec::new();
1298 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1299 let mut is_included = false;
1300 while let Some(range) = ranges.peek() {
1301 if range.end < edit.new.start {
1302 ranges.next().unwrap();
1303 } else {
1304 if range.start <= edit.new.end {
1305 is_included = true;
1306 }
1307 break;
1308 }
1309 }
1310
1311 if is_included {
1312 edits.push((
1313 edit.old.clone(),
1314 self.text_for_range(edit.new.clone()).collect::<String>(),
1315 ));
1316 }
1317 }
1318
1319 let operation = base_buffer.update(cx, |base_buffer, cx| {
1320 // cx.emit(BufferEvent::DiffBaseChanged);
1321 base_buffer.edit(edits, None, cx)
1322 });
1323
1324 if let Some(operation) = operation
1325 && let Some(BufferBranchState {
1326 merged_operations, ..
1327 }) = &mut self.branch_state
1328 {
1329 merged_operations.push(operation);
1330 }
1331 }
1332
1333 fn on_base_buffer_event(
1334 &mut self,
1335 _: Entity<Buffer>,
1336 event: &BufferEvent,
1337 cx: &mut Context<Self>,
1338 ) {
1339 let BufferEvent::Operation { operation, .. } = event else {
1340 return;
1341 };
1342 let Some(BufferBranchState {
1343 merged_operations, ..
1344 }) = &mut self.branch_state
1345 else {
1346 return;
1347 };
1348
1349 let mut operation_to_undo = None;
1350 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1351 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1352 {
1353 merged_operations.remove(ix);
1354 operation_to_undo = Some(operation.timestamp);
1355 }
1356
1357 self.apply_ops([operation.clone()], cx);
1358
1359 if let Some(timestamp) = operation_to_undo {
1360 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1361 self.undo_operations(counts, cx);
1362 }
1363 }
1364
1365 #[cfg(test)]
1366 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1367 &self.text
1368 }
1369
1370 /// Retrieve a snapshot of the buffer's raw text, without any
1371 /// language-related state like the syntax tree or diagnostics.
1372 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1373 self.text.snapshot()
1374 }
1375
1376 /// The file associated with the buffer, if any.
1377 pub fn file(&self) -> Option<&Arc<dyn File>> {
1378 self.file.as_ref()
1379 }
1380
1381 /// The version of the buffer that was last saved or reloaded from disk.
1382 pub fn saved_version(&self) -> &clock::Global {
1383 &self.saved_version
1384 }
1385
1386 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1387 pub fn saved_mtime(&self) -> Option<MTime> {
1388 self.saved_mtime
1389 }
1390
1391 /// Returns the character encoding of the buffer's file.
1392 pub fn encoding(&self) -> &'static Encoding {
1393 self.encoding
1394 }
1395
1396 /// Sets the character encoding of the buffer.
1397 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1398 self.encoding = encoding;
1399 }
1400
1401 /// Returns whether the buffer has a Byte Order Mark.
1402 pub fn has_bom(&self) -> bool {
1403 self.has_bom
1404 }
1405
1406 /// Sets whether the buffer has a Byte Order Mark.
1407 pub fn set_has_bom(&mut self, has_bom: bool) {
1408 self.has_bom = has_bom;
1409 }
1410
1411 /// Assign a language to the buffer.
1412 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1413 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1414 }
1415
1416 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1417 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1418 self.set_language_(language, true, cx);
1419 }
1420
1421 fn set_language_(
1422 &mut self,
1423 language: Option<Arc<Language>>,
1424 may_block: bool,
1425 cx: &mut Context<Self>,
1426 ) {
1427 self.non_text_state_update_count += 1;
1428 self.syntax_map.lock().clear(&self.text);
1429 let old_language = std::mem::replace(&mut self.language, language);
1430 self.was_changed();
1431 self.reparse(cx, may_block);
1432 let has_fresh_language =
1433 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1434 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1435 }
1436
1437 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1438 /// other languages if parts of the buffer are written in different languages.
1439 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1440 self.syntax_map
1441 .lock()
1442 .set_language_registry(language_registry);
1443 }
1444
1445 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1446 self.syntax_map.lock().language_registry()
1447 }
1448
1449 /// Assign the line ending type to the buffer.
1450 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1451 self.text.set_line_ending(line_ending);
1452
1453 let lamport_timestamp = self.text.lamport_clock.tick();
1454 self.send_operation(
1455 Operation::UpdateLineEnding {
1456 line_ending,
1457 lamport_timestamp,
1458 },
1459 true,
1460 cx,
1461 );
1462 }
1463
1464 /// Assign the buffer a new [`Capability`].
1465 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1466 if self.capability != capability {
1467 self.capability = capability;
1468 cx.emit(BufferEvent::CapabilityChanged)
1469 }
1470 }
1471
1472 /// This method is called to signal that the buffer has been saved.
1473 pub fn did_save(
1474 &mut self,
1475 version: clock::Global,
1476 mtime: Option<MTime>,
1477 cx: &mut Context<Self>,
1478 ) {
1479 self.saved_version = version.clone();
1480 self.has_unsaved_edits.set((version, false));
1481 self.has_conflict = false;
1482 self.saved_mtime = mtime;
1483 self.was_changed();
1484 cx.emit(BufferEvent::Saved);
1485 cx.notify();
1486 }
1487
1488 /// Reloads the contents of the buffer from disk.
1489 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1490 let (tx, rx) = futures::channel::oneshot::channel();
1491 let prev_version = self.text.version();
1492 self.reload_task = Some(cx.spawn(async move |this, cx| {
1493 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1494 let file = this.file.as_ref()?.as_local()?;
1495
1496 Some((file.disk_state().mtime(), file.load(cx)))
1497 })?
1498 else {
1499 return Ok(());
1500 };
1501
1502 let new_text = new_text.await?;
1503 let diff = this
1504 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1505 .await;
1506 this.update(cx, |this, cx| {
1507 if this.version() == diff.base_version {
1508 this.finalize_last_transaction();
1509 this.apply_diff(diff, cx);
1510 tx.send(this.finalize_last_transaction().cloned()).ok();
1511 this.has_conflict = false;
1512 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1513 } else {
1514 if !diff.edits.is_empty()
1515 || this
1516 .edits_since::<usize>(&diff.base_version)
1517 .next()
1518 .is_some()
1519 {
1520 this.has_conflict = true;
1521 }
1522
1523 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1524 }
1525
1526 this.reload_task.take();
1527 })
1528 }));
1529 rx
1530 }
1531
1532 /// This method is called to signal that the buffer has been reloaded.
1533 pub fn did_reload(
1534 &mut self,
1535 version: clock::Global,
1536 line_ending: LineEnding,
1537 mtime: Option<MTime>,
1538 cx: &mut Context<Self>,
1539 ) {
1540 self.saved_version = version;
1541 self.has_unsaved_edits
1542 .set((self.saved_version.clone(), false));
1543 self.text.set_line_ending(line_ending);
1544 self.saved_mtime = mtime;
1545 cx.emit(BufferEvent::Reloaded);
1546 cx.notify();
1547 }
1548
1549 /// Updates the [`File`] backing this buffer. This should be called when
1550 /// the file has changed or has been deleted.
1551 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1552 let was_dirty = self.is_dirty();
1553 let mut file_changed = false;
1554
1555 if let Some(old_file) = self.file.as_ref() {
1556 if new_file.path() != old_file.path() {
1557 file_changed = true;
1558 }
1559
1560 let old_state = old_file.disk_state();
1561 let new_state = new_file.disk_state();
1562 if old_state != new_state {
1563 file_changed = true;
1564 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1565 cx.emit(BufferEvent::ReloadNeeded)
1566 }
1567 }
1568 } else {
1569 file_changed = true;
1570 };
1571
1572 self.file = Some(new_file);
1573 if file_changed {
1574 self.was_changed();
1575 self.non_text_state_update_count += 1;
1576 if was_dirty != self.is_dirty() {
1577 cx.emit(BufferEvent::DirtyChanged);
1578 }
1579 cx.emit(BufferEvent::FileHandleChanged);
1580 cx.notify();
1581 }
1582 }
1583
1584 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1585 Some(self.branch_state.as_ref()?.base_buffer.clone())
1586 }
1587
1588 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1589 pub fn language(&self) -> Option<&Arc<Language>> {
1590 self.language.as_ref()
1591 }
1592
1593 /// Returns the [`Language`] at the given location.
1594 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1595 let offset = position.to_offset(self);
1596 let mut is_first = true;
1597 let start_anchor = self.anchor_before(offset);
1598 let end_anchor = self.anchor_after(offset);
1599 self.syntax_map
1600 .lock()
1601 .layers_for_range(offset..offset, &self.text, false)
1602 .filter(|layer| {
1603 if is_first {
1604 is_first = false;
1605 return true;
1606 }
1607
1608 layer
1609 .included_sub_ranges
1610 .map(|sub_ranges| {
1611 sub_ranges.iter().any(|sub_range| {
1612 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1613 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1614 !is_before_start && !is_after_end
1615 })
1616 })
1617 .unwrap_or(true)
1618 })
1619 .last()
1620 .map(|info| info.language.clone())
1621 .or_else(|| self.language.clone())
1622 }
1623
1624 /// Returns each [`Language`] for the active syntax layers at the given location.
1625 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1626 let offset = position.to_offset(self);
1627 let mut languages: Vec<Arc<Language>> = self
1628 .syntax_map
1629 .lock()
1630 .layers_for_range(offset..offset, &self.text, false)
1631 .map(|info| info.language.clone())
1632 .collect();
1633
1634 if languages.is_empty()
1635 && let Some(buffer_language) = self.language()
1636 {
1637 languages.push(buffer_language.clone());
1638 }
1639
1640 languages
1641 }
1642
1643 /// An integer version number that accounts for all updates besides
1644 /// the buffer's text itself (which is versioned via a version vector).
1645 pub fn non_text_state_update_count(&self) -> usize {
1646 self.non_text_state_update_count
1647 }
1648
1649 /// Whether the buffer is being parsed in the background.
1650 #[cfg(any(test, feature = "test-support"))]
1651 pub fn is_parsing(&self) -> bool {
1652 self.reparse.is_some()
1653 }
1654
1655 /// Indicates whether the buffer contains any regions that may be
1656 /// written in a language that hasn't been loaded yet.
1657 pub fn contains_unknown_injections(&self) -> bool {
1658 self.syntax_map.lock().contains_unknown_injections()
1659 }
1660
1661 #[cfg(any(test, feature = "test-support"))]
1662 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1663 self.sync_parse_timeout = timeout;
1664 }
1665
1666 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1667 match Arc::get_mut(&mut self.tree_sitter_data) {
1668 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1669 None => {
1670 let tree_sitter_data = TreeSitterData::new(snapshot);
1671 self.tree_sitter_data = Arc::new(tree_sitter_data)
1672 }
1673 }
1674 }
1675
1676 /// Called after an edit to synchronize the buffer's main parse tree with
1677 /// the buffer's new underlying state.
1678 ///
1679 /// Locks the syntax map and interpolates the edits since the last reparse
1680 /// into the foreground syntax tree.
1681 ///
1682 /// Then takes a stable snapshot of the syntax map before unlocking it.
1683 /// The snapshot with the interpolated edits is sent to a background thread,
1684 /// where we ask Tree-sitter to perform an incremental parse.
1685 ///
1686 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1687 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1688 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1689 ///
1690 /// If we time out waiting on the parse, we spawn a second task waiting
1691 /// until the parse does complete and return with the interpolated tree still
1692 /// in the foreground. When the background parse completes, call back into
1693 /// the main thread and assign the foreground parse state.
1694 ///
1695 /// If the buffer or grammar changed since the start of the background parse,
1696 /// initiate an additional reparse recursively. To avoid concurrent parses
1697 /// for the same buffer, we only initiate a new parse if we are not already
1698 /// parsing in the background.
1699 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1700 if self.reparse.is_some() {
1701 return;
1702 }
1703 let language = if let Some(language) = self.language.clone() {
1704 language
1705 } else {
1706 return;
1707 };
1708
1709 let text = self.text_snapshot();
1710 let parsed_version = self.version();
1711
1712 let mut syntax_map = self.syntax_map.lock();
1713 syntax_map.interpolate(&text);
1714 let language_registry = syntax_map.language_registry();
1715 let mut syntax_snapshot = syntax_map.snapshot();
1716 drop(syntax_map);
1717
1718 let parse_task = cx.background_spawn({
1719 let language = language.clone();
1720 let language_registry = language_registry.clone();
1721 async move {
1722 syntax_snapshot.reparse(&text, language_registry, language);
1723 syntax_snapshot
1724 }
1725 });
1726
1727 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1728 if may_block {
1729 match cx
1730 .background_executor()
1731 .block_with_timeout(self.sync_parse_timeout, parse_task)
1732 {
1733 Ok(new_syntax_snapshot) => {
1734 self.did_finish_parsing(new_syntax_snapshot, cx);
1735 self.reparse = None;
1736 }
1737 Err(parse_task) => {
1738 self.reparse = Some(cx.spawn(async move |this, cx| {
1739 let new_syntax_map = cx.background_spawn(parse_task).await;
1740 this.update(cx, move |this, cx| {
1741 let grammar_changed = || {
1742 this.language.as_ref().is_none_or(|current_language| {
1743 !Arc::ptr_eq(&language, current_language)
1744 })
1745 };
1746 let language_registry_changed = || {
1747 new_syntax_map.contains_unknown_injections()
1748 && language_registry.is_some_and(|registry| {
1749 registry.version()
1750 != new_syntax_map.language_registry_version()
1751 })
1752 };
1753 let parse_again = this.version.changed_since(&parsed_version)
1754 || language_registry_changed()
1755 || grammar_changed();
1756 this.did_finish_parsing(new_syntax_map, cx);
1757 this.reparse = None;
1758 if parse_again {
1759 this.reparse(cx, false);
1760 }
1761 })
1762 .ok();
1763 }));
1764 }
1765 }
1766 } else {
1767 self.reparse = Some(cx.spawn(async move |this, cx| {
1768 let new_syntax_map = cx.background_spawn(parse_task).await;
1769 this.update(cx, move |this, cx| {
1770 let grammar_changed = || {
1771 this.language.as_ref().is_none_or(|current_language| {
1772 !Arc::ptr_eq(&language, current_language)
1773 })
1774 };
1775 let language_registry_changed = || {
1776 new_syntax_map.contains_unknown_injections()
1777 && language_registry.is_some_and(|registry| {
1778 registry.version() != new_syntax_map.language_registry_version()
1779 })
1780 };
1781 let parse_again = this.version.changed_since(&parsed_version)
1782 || language_registry_changed()
1783 || grammar_changed();
1784 this.did_finish_parsing(new_syntax_map, cx);
1785 this.reparse = None;
1786 if parse_again {
1787 this.reparse(cx, false);
1788 }
1789 })
1790 .ok();
1791 }));
1792 }
1793 }
1794
1795 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1796 self.was_changed();
1797 self.non_text_state_update_count += 1;
1798 self.syntax_map.lock().did_parse(syntax_snapshot);
1799 self.request_autoindent(cx);
1800 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1801 self.invalidate_tree_sitter_data(self.text.snapshot());
1802 cx.emit(BufferEvent::Reparsed);
1803 cx.notify();
1804 }
1805
1806 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1807 self.parse_status.1.clone()
1808 }
1809
1810 /// Wait until the buffer is no longer parsing
1811 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1812 let mut parse_status = self.parse_status();
1813 async move {
1814 while *parse_status.borrow() != ParseStatus::Idle {
1815 if parse_status.changed().await.is_err() {
1816 break;
1817 }
1818 }
1819 }
1820 }
1821
1822 /// Assign to the buffer a set of diagnostics created by a given language server.
1823 pub fn update_diagnostics(
1824 &mut self,
1825 server_id: LanguageServerId,
1826 diagnostics: DiagnosticSet,
1827 cx: &mut Context<Self>,
1828 ) {
1829 let lamport_timestamp = self.text.lamport_clock.tick();
1830 let op = Operation::UpdateDiagnostics {
1831 server_id,
1832 diagnostics: diagnostics.iter().cloned().collect(),
1833 lamport_timestamp,
1834 };
1835
1836 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1837 self.send_operation(op, true, cx);
1838 }
1839
1840 pub fn buffer_diagnostics(
1841 &self,
1842 for_server: Option<LanguageServerId>,
1843 ) -> Vec<&DiagnosticEntry<Anchor>> {
1844 match for_server {
1845 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1846 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1847 Err(_) => Vec::new(),
1848 },
1849 None => self
1850 .diagnostics
1851 .iter()
1852 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1853 .collect(),
1854 }
1855 }
1856
1857 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1858 if let Some(indent_sizes) = self.compute_autoindents() {
1859 let indent_sizes = cx.background_spawn(indent_sizes);
1860 match cx
1861 .background_executor()
1862 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1863 {
1864 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1865 Err(indent_sizes) => {
1866 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1867 let indent_sizes = indent_sizes.await;
1868 this.update(cx, |this, cx| {
1869 this.apply_autoindents(indent_sizes, cx);
1870 })
1871 .ok();
1872 }));
1873 }
1874 }
1875 } else {
1876 self.autoindent_requests.clear();
1877 for tx in self.wait_for_autoindent_txs.drain(..) {
1878 tx.send(()).ok();
1879 }
1880 }
1881 }
1882
1883 fn compute_autoindents(
1884 &self,
1885 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1886 let max_rows_between_yields = 100;
1887 let snapshot = self.snapshot();
1888 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1889 return None;
1890 }
1891
1892 let autoindent_requests = self.autoindent_requests.clone();
1893 Some(async move {
1894 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1895 for request in autoindent_requests {
1896 // Resolve each edited range to its row in the current buffer and in the
1897 // buffer before this batch of edits.
1898 let mut row_ranges = Vec::new();
1899 let mut old_to_new_rows = BTreeMap::new();
1900 let mut language_indent_sizes_by_new_row = Vec::new();
1901 for entry in &request.entries {
1902 let position = entry.range.start;
1903 let new_row = position.to_point(&snapshot).row;
1904 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1905 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1906
1907 if !entry.first_line_is_new {
1908 let old_row = position.to_point(&request.before_edit).row;
1909 old_to_new_rows.insert(old_row, new_row);
1910 }
1911 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1912 }
1913
1914 // Build a map containing the suggested indentation for each of the edited lines
1915 // with respect to the state of the buffer before these edits. This map is keyed
1916 // by the rows for these lines in the current state of the buffer.
1917 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1918 let old_edited_ranges =
1919 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1920 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1921 let mut language_indent_size = IndentSize::default();
1922 for old_edited_range in old_edited_ranges {
1923 let suggestions = request
1924 .before_edit
1925 .suggest_autoindents(old_edited_range.clone())
1926 .into_iter()
1927 .flatten();
1928 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1929 if let Some(suggestion) = suggestion {
1930 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1931
1932 // Find the indent size based on the language for this row.
1933 while let Some((row, size)) = language_indent_sizes.peek() {
1934 if *row > new_row {
1935 break;
1936 }
1937 language_indent_size = *size;
1938 language_indent_sizes.next();
1939 }
1940
1941 let suggested_indent = old_to_new_rows
1942 .get(&suggestion.basis_row)
1943 .and_then(|from_row| {
1944 Some(old_suggestions.get(from_row).copied()?.0)
1945 })
1946 .unwrap_or_else(|| {
1947 request
1948 .before_edit
1949 .indent_size_for_line(suggestion.basis_row)
1950 })
1951 .with_delta(suggestion.delta, language_indent_size);
1952 old_suggestions
1953 .insert(new_row, (suggested_indent, suggestion.within_error));
1954 }
1955 }
1956 yield_now().await;
1957 }
1958
1959 // Compute new suggestions for each line, but only include them in the result
1960 // if they differ from the old suggestion for that line.
1961 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1962 let mut language_indent_size = IndentSize::default();
1963 for (row_range, original_indent_column) in row_ranges {
1964 let new_edited_row_range = if request.is_block_mode {
1965 row_range.start..row_range.start + 1
1966 } else {
1967 row_range.clone()
1968 };
1969
1970 let suggestions = snapshot
1971 .suggest_autoindents(new_edited_row_range.clone())
1972 .into_iter()
1973 .flatten();
1974 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1975 if let Some(suggestion) = suggestion {
1976 // Find the indent size based on the language for this row.
1977 while let Some((row, size)) = language_indent_sizes.peek() {
1978 if *row > new_row {
1979 break;
1980 }
1981 language_indent_size = *size;
1982 language_indent_sizes.next();
1983 }
1984
1985 let suggested_indent = indent_sizes
1986 .get(&suggestion.basis_row)
1987 .copied()
1988 .map(|e| e.0)
1989 .unwrap_or_else(|| {
1990 snapshot.indent_size_for_line(suggestion.basis_row)
1991 })
1992 .with_delta(suggestion.delta, language_indent_size);
1993
1994 if old_suggestions.get(&new_row).is_none_or(
1995 |(old_indentation, was_within_error)| {
1996 suggested_indent != *old_indentation
1997 && (!suggestion.within_error || *was_within_error)
1998 },
1999 ) {
2000 indent_sizes.insert(
2001 new_row,
2002 (suggested_indent, request.ignore_empty_lines),
2003 );
2004 }
2005 }
2006 }
2007
2008 if let (true, Some(original_indent_column)) =
2009 (request.is_block_mode, original_indent_column)
2010 {
2011 let new_indent =
2012 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2013 *indent
2014 } else {
2015 snapshot.indent_size_for_line(row_range.start)
2016 };
2017 let delta = new_indent.len as i64 - original_indent_column as i64;
2018 if delta != 0 {
2019 for row in row_range.skip(1) {
2020 indent_sizes.entry(row).or_insert_with(|| {
2021 let mut size = snapshot.indent_size_for_line(row);
2022 if size.kind == new_indent.kind {
2023 match delta.cmp(&0) {
2024 Ordering::Greater => size.len += delta as u32,
2025 Ordering::Less => {
2026 size.len = size.len.saturating_sub(-delta as u32)
2027 }
2028 Ordering::Equal => {}
2029 }
2030 }
2031 (size, request.ignore_empty_lines)
2032 });
2033 }
2034 }
2035 }
2036
2037 yield_now().await;
2038 }
2039 }
2040
2041 indent_sizes
2042 .into_iter()
2043 .filter_map(|(row, (indent, ignore_empty_lines))| {
2044 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2045 None
2046 } else {
2047 Some((row, indent))
2048 }
2049 })
2050 .collect()
2051 })
2052 }
2053
2054 fn apply_autoindents(
2055 &mut self,
2056 indent_sizes: BTreeMap<u32, IndentSize>,
2057 cx: &mut Context<Self>,
2058 ) {
2059 self.autoindent_requests.clear();
2060 for tx in self.wait_for_autoindent_txs.drain(..) {
2061 tx.send(()).ok();
2062 }
2063
2064 let edits: Vec<_> = indent_sizes
2065 .into_iter()
2066 .filter_map(|(row, indent_size)| {
2067 let current_size = indent_size_for_line(self, row);
2068 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2069 })
2070 .collect();
2071
2072 let preserve_preview = self.preserve_preview();
2073 self.edit(edits, None, cx);
2074 if preserve_preview {
2075 self.refresh_preview();
2076 }
2077 }
2078
2079 /// Create a minimal edit that will cause the given row to be indented
2080 /// with the given size. After applying this edit, the length of the line
2081 /// will always be at least `new_size.len`.
2082 pub fn edit_for_indent_size_adjustment(
2083 row: u32,
2084 current_size: IndentSize,
2085 new_size: IndentSize,
2086 ) -> Option<(Range<Point>, String)> {
2087 if new_size.kind == current_size.kind {
2088 match new_size.len.cmp(¤t_size.len) {
2089 Ordering::Greater => {
2090 let point = Point::new(row, 0);
2091 Some((
2092 point..point,
2093 iter::repeat(new_size.char())
2094 .take((new_size.len - current_size.len) as usize)
2095 .collect::<String>(),
2096 ))
2097 }
2098
2099 Ordering::Less => Some((
2100 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2101 String::new(),
2102 )),
2103
2104 Ordering::Equal => None,
2105 }
2106 } else {
2107 Some((
2108 Point::new(row, 0)..Point::new(row, current_size.len),
2109 iter::repeat(new_size.char())
2110 .take(new_size.len as usize)
2111 .collect::<String>(),
2112 ))
2113 }
2114 }
2115
2116 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2117 /// and the given new text.
2118 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2119 let old_text = self.as_rope().clone();
2120 let base_version = self.version();
2121 cx.background_executor()
2122 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2123 let old_text = old_text.to_string();
2124 let line_ending = LineEnding::detect(&new_text);
2125 LineEnding::normalize(&mut new_text);
2126 let edits = text_diff(&old_text, &new_text);
2127 Diff {
2128 base_version,
2129 line_ending,
2130 edits,
2131 }
2132 })
2133 }
2134
2135 /// Spawns a background task that searches the buffer for any whitespace
2136 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2137 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2138 let old_text = self.as_rope().clone();
2139 let line_ending = self.line_ending();
2140 let base_version = self.version();
2141 cx.background_spawn(async move {
2142 let ranges = trailing_whitespace_ranges(&old_text);
2143 let empty = Arc::<str>::from("");
2144 Diff {
2145 base_version,
2146 line_ending,
2147 edits: ranges
2148 .into_iter()
2149 .map(|range| (range, empty.clone()))
2150 .collect(),
2151 }
2152 })
2153 }
2154
2155 /// Ensures that the buffer ends with a single newline character, and
2156 /// no other whitespace. Skips if the buffer is empty.
2157 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2158 let len = self.len();
2159 if len == 0 {
2160 return;
2161 }
2162 let mut offset = len;
2163 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2164 let non_whitespace_len = chunk
2165 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2166 .len();
2167 offset -= chunk.len();
2168 offset += non_whitespace_len;
2169 if non_whitespace_len != 0 {
2170 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2171 return;
2172 }
2173 break;
2174 }
2175 }
2176 self.edit([(offset..len, "\n")], None, cx);
2177 }
2178
2179 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2180 /// calculated, then adjust the diff to account for those changes, and discard any
2181 /// parts of the diff that conflict with those changes.
2182 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2183 let snapshot = self.snapshot();
2184 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2185 let mut delta = 0;
2186 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2187 while let Some(edit_since) = edits_since.peek() {
2188 // If the edit occurs after a diff hunk, then it does not
2189 // affect that hunk.
2190 if edit_since.old.start > range.end {
2191 break;
2192 }
2193 // If the edit precedes the diff hunk, then adjust the hunk
2194 // to reflect the edit.
2195 else if edit_since.old.end < range.start {
2196 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2197 edits_since.next();
2198 }
2199 // If the edit intersects a diff hunk, then discard that hunk.
2200 else {
2201 return None;
2202 }
2203 }
2204
2205 let start = (range.start as i64 + delta) as usize;
2206 let end = (range.end as i64 + delta) as usize;
2207 Some((start..end, new_text))
2208 });
2209
2210 self.start_transaction();
2211 self.text.set_line_ending(diff.line_ending);
2212 self.edit(adjusted_edits, None, cx);
2213 self.end_transaction(cx)
2214 }
2215
2216 pub fn has_unsaved_edits(&self) -> bool {
2217 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2218
2219 if last_version == self.version {
2220 self.has_unsaved_edits
2221 .set((last_version, has_unsaved_edits));
2222 return has_unsaved_edits;
2223 }
2224
2225 let has_edits = self.has_edits_since(&self.saved_version);
2226 self.has_unsaved_edits
2227 .set((self.version.clone(), has_edits));
2228 has_edits
2229 }
2230
2231 /// Checks if the buffer has unsaved changes.
2232 pub fn is_dirty(&self) -> bool {
2233 if self.capability == Capability::ReadOnly {
2234 return false;
2235 }
2236 if self.has_conflict {
2237 return true;
2238 }
2239 match self.file.as_ref().map(|f| f.disk_state()) {
2240 Some(DiskState::New) | Some(DiskState::Deleted) => {
2241 !self.is_empty() && self.has_unsaved_edits()
2242 }
2243 _ => self.has_unsaved_edits(),
2244 }
2245 }
2246
2247 /// Marks the buffer as having a conflict regardless of current buffer state.
2248 pub fn set_conflict(&mut self) {
2249 self.has_conflict = true;
2250 }
2251
2252 /// Checks if the buffer and its file have both changed since the buffer
2253 /// was last saved or reloaded.
2254 pub fn has_conflict(&self) -> bool {
2255 if self.has_conflict {
2256 return true;
2257 }
2258 let Some(file) = self.file.as_ref() else {
2259 return false;
2260 };
2261 match file.disk_state() {
2262 DiskState::New => false,
2263 DiskState::Present { mtime } => match self.saved_mtime {
2264 Some(saved_mtime) => {
2265 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2266 }
2267 None => true,
2268 },
2269 DiskState::Deleted => false,
2270 }
2271 }
2272
2273 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2274 pub fn subscribe(&mut self) -> Subscription<usize> {
2275 self.text.subscribe()
2276 }
2277
2278 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2279 ///
2280 /// This allows downstream code to check if the buffer's text has changed without
2281 /// waiting for an effect cycle, which would be required if using eents.
2282 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2283 if let Err(ix) = self
2284 .change_bits
2285 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2286 {
2287 self.change_bits.insert(ix, bit);
2288 }
2289 }
2290
2291 /// Set the change bit for all "listeners".
2292 fn was_changed(&mut self) {
2293 self.change_bits.retain(|change_bit| {
2294 change_bit
2295 .upgrade()
2296 .inspect(|bit| {
2297 _ = bit.replace(true);
2298 })
2299 .is_some()
2300 });
2301 }
2302
2303 /// Starts a transaction, if one is not already in-progress. When undoing or
2304 /// redoing edits, all of the edits performed within a transaction are undone
2305 /// or redone together.
2306 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2307 self.start_transaction_at(Instant::now())
2308 }
2309
2310 /// Starts a transaction, providing the current time. Subsequent transactions
2311 /// that occur within a short period of time will be grouped together. This
2312 /// is controlled by the buffer's undo grouping duration.
2313 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2314 self.transaction_depth += 1;
2315 if self.was_dirty_before_starting_transaction.is_none() {
2316 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2317 }
2318 self.text.start_transaction_at(now)
2319 }
2320
2321 /// Terminates the current transaction, if this is the outermost transaction.
2322 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2323 self.end_transaction_at(Instant::now(), cx)
2324 }
2325
2326 /// Terminates the current transaction, providing the current time. Subsequent transactions
2327 /// that occur within a short period of time will be grouped together. This
2328 /// is controlled by the buffer's undo grouping duration.
2329 pub fn end_transaction_at(
2330 &mut self,
2331 now: Instant,
2332 cx: &mut Context<Self>,
2333 ) -> Option<TransactionId> {
2334 assert!(self.transaction_depth > 0);
2335 self.transaction_depth -= 1;
2336 let was_dirty = if self.transaction_depth == 0 {
2337 self.was_dirty_before_starting_transaction.take().unwrap()
2338 } else {
2339 false
2340 };
2341 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2342 self.did_edit(&start_version, was_dirty, cx);
2343 Some(transaction_id)
2344 } else {
2345 None
2346 }
2347 }
2348
2349 /// Manually add a transaction to the buffer's undo history.
2350 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2351 self.text.push_transaction(transaction, now);
2352 }
2353
2354 /// Differs from `push_transaction` in that it does not clear the redo
2355 /// stack. Intended to be used to create a parent transaction to merge
2356 /// potential child transactions into.
2357 ///
2358 /// The caller is responsible for removing it from the undo history using
2359 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2360 /// are merged into this transaction, the caller is responsible for ensuring
2361 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2362 /// cleared is to create transactions with the usual `start_transaction` and
2363 /// `end_transaction` methods and merging the resulting transactions into
2364 /// the transaction created by this method
2365 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2366 self.text.push_empty_transaction(now)
2367 }
2368
2369 /// Prevent the last transaction from being grouped with any subsequent transactions,
2370 /// even if they occur with the buffer's undo grouping duration.
2371 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2372 self.text.finalize_last_transaction()
2373 }
2374
2375 /// Manually group all changes since a given transaction.
2376 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2377 self.text.group_until_transaction(transaction_id);
2378 }
2379
2380 /// Manually remove a transaction from the buffer's undo history
2381 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2382 self.text.forget_transaction(transaction_id)
2383 }
2384
2385 /// Retrieve a transaction from the buffer's undo history
2386 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2387 self.text.get_transaction(transaction_id)
2388 }
2389
2390 /// Manually merge two transactions in the buffer's undo history.
2391 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2392 self.text.merge_transactions(transaction, destination);
2393 }
2394
2395 /// Waits for the buffer to receive operations with the given timestamps.
2396 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2397 &mut self,
2398 edit_ids: It,
2399 ) -> impl Future<Output = Result<()>> + use<It> {
2400 self.text.wait_for_edits(edit_ids)
2401 }
2402
2403 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2404 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2405 &mut self,
2406 anchors: It,
2407 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2408 self.text.wait_for_anchors(anchors)
2409 }
2410
2411 /// Waits for the buffer to receive operations up to the given version.
2412 pub fn wait_for_version(
2413 &mut self,
2414 version: clock::Global,
2415 ) -> impl Future<Output = Result<()>> + use<> {
2416 self.text.wait_for_version(version)
2417 }
2418
2419 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2420 /// [`Buffer::wait_for_version`] to resolve with an error.
2421 pub fn give_up_waiting(&mut self) {
2422 self.text.give_up_waiting();
2423 }
2424
2425 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2426 let mut rx = None;
2427 if !self.autoindent_requests.is_empty() {
2428 let channel = oneshot::channel();
2429 self.wait_for_autoindent_txs.push(channel.0);
2430 rx = Some(channel.1);
2431 }
2432 rx
2433 }
2434
2435 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2436 pub fn set_active_selections(
2437 &mut self,
2438 selections: Arc<[Selection<Anchor>]>,
2439 line_mode: bool,
2440 cursor_shape: CursorShape,
2441 cx: &mut Context<Self>,
2442 ) {
2443 let lamport_timestamp = self.text.lamport_clock.tick();
2444 self.remote_selections.insert(
2445 self.text.replica_id(),
2446 SelectionSet {
2447 selections: selections.clone(),
2448 lamport_timestamp,
2449 line_mode,
2450 cursor_shape,
2451 },
2452 );
2453 self.send_operation(
2454 Operation::UpdateSelections {
2455 selections,
2456 line_mode,
2457 lamport_timestamp,
2458 cursor_shape,
2459 },
2460 true,
2461 cx,
2462 );
2463 self.non_text_state_update_count += 1;
2464 cx.notify();
2465 }
2466
2467 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2468 /// this replica.
2469 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2470 if self
2471 .remote_selections
2472 .get(&self.text.replica_id())
2473 .is_none_or(|set| !set.selections.is_empty())
2474 {
2475 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2476 }
2477 }
2478
2479 pub fn set_agent_selections(
2480 &mut self,
2481 selections: Arc<[Selection<Anchor>]>,
2482 line_mode: bool,
2483 cursor_shape: CursorShape,
2484 cx: &mut Context<Self>,
2485 ) {
2486 let lamport_timestamp = self.text.lamport_clock.tick();
2487 self.remote_selections.insert(
2488 ReplicaId::AGENT,
2489 SelectionSet {
2490 selections,
2491 lamport_timestamp,
2492 line_mode,
2493 cursor_shape,
2494 },
2495 );
2496 self.non_text_state_update_count += 1;
2497 cx.notify();
2498 }
2499
2500 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2501 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2502 }
2503
2504 /// Replaces the buffer's entire text.
2505 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2506 where
2507 T: Into<Arc<str>>,
2508 {
2509 self.autoindent_requests.clear();
2510 self.edit([(0..self.len(), text)], None, cx)
2511 }
2512
2513 /// Appends the given text to the end of the buffer.
2514 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2515 where
2516 T: Into<Arc<str>>,
2517 {
2518 self.edit([(self.len()..self.len(), text)], None, cx)
2519 }
2520
2521 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2522 /// delete, and a string of text to insert at that location.
2523 ///
2524 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2525 /// request for the edited ranges, which will be processed when the buffer finishes
2526 /// parsing.
2527 ///
2528 /// Parsing takes place at the end of a transaction, and may compute synchronously
2529 /// or asynchronously, depending on the changes.
2530 pub fn edit<I, S, T>(
2531 &mut self,
2532 edits_iter: I,
2533 autoindent_mode: Option<AutoindentMode>,
2534 cx: &mut Context<Self>,
2535 ) -> Option<clock::Lamport>
2536 where
2537 I: IntoIterator<Item = (Range<S>, T)>,
2538 S: ToOffset,
2539 T: Into<Arc<str>>,
2540 {
2541 // Skip invalid edits and coalesce contiguous ones.
2542 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2543
2544 for (range, new_text) in edits_iter {
2545 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2546
2547 if range.start > range.end {
2548 mem::swap(&mut range.start, &mut range.end);
2549 }
2550 let new_text = new_text.into();
2551 if !new_text.is_empty() || !range.is_empty() {
2552 if let Some((prev_range, prev_text)) = edits.last_mut()
2553 && prev_range.end >= range.start
2554 {
2555 prev_range.end = cmp::max(prev_range.end, range.end);
2556 *prev_text = format!("{prev_text}{new_text}").into();
2557 } else {
2558 edits.push((range, new_text));
2559 }
2560 }
2561 }
2562 if edits.is_empty() {
2563 return None;
2564 }
2565
2566 self.start_transaction();
2567 self.pending_autoindent.take();
2568 let autoindent_request = autoindent_mode
2569 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2570
2571 let edit_operation = self.text.edit(edits.iter().cloned());
2572 let edit_id = edit_operation.timestamp();
2573
2574 if let Some((before_edit, mode)) = autoindent_request {
2575 let mut delta = 0isize;
2576 let mut previous_setting = None;
2577 let entries: Vec<_> = edits
2578 .into_iter()
2579 .enumerate()
2580 .zip(&edit_operation.as_edit().unwrap().new_text)
2581 .filter(|((_, (range, _)), _)| {
2582 let language = before_edit.language_at(range.start);
2583 let language_id = language.map(|l| l.id());
2584 if let Some((cached_language_id, auto_indent)) = previous_setting
2585 && cached_language_id == language_id
2586 {
2587 auto_indent
2588 } else {
2589 // The auto-indent setting is not present in editorconfigs, hence
2590 // we can avoid passing the file here.
2591 let auto_indent =
2592 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2593 previous_setting = Some((language_id, auto_indent));
2594 auto_indent
2595 }
2596 })
2597 .map(|((ix, (range, _)), new_text)| {
2598 let new_text_length = new_text.len();
2599 let old_start = range.start.to_point(&before_edit);
2600 let new_start = (delta + range.start as isize) as usize;
2601 let range_len = range.end - range.start;
2602 delta += new_text_length as isize - range_len as isize;
2603
2604 // Decide what range of the insertion to auto-indent, and whether
2605 // the first line of the insertion should be considered a newly-inserted line
2606 // or an edit to an existing line.
2607 let mut range_of_insertion_to_indent = 0..new_text_length;
2608 let mut first_line_is_new = true;
2609
2610 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2611 let old_line_end = before_edit.line_len(old_start.row);
2612
2613 if old_start.column > old_line_start {
2614 first_line_is_new = false;
2615 }
2616
2617 if !new_text.contains('\n')
2618 && (old_start.column + (range_len as u32) < old_line_end
2619 || old_line_end == old_line_start)
2620 {
2621 first_line_is_new = false;
2622 }
2623
2624 // When inserting text starting with a newline, avoid auto-indenting the
2625 // previous line.
2626 if new_text.starts_with('\n') {
2627 range_of_insertion_to_indent.start += 1;
2628 first_line_is_new = true;
2629 }
2630
2631 let mut original_indent_column = None;
2632 if let AutoindentMode::Block {
2633 original_indent_columns,
2634 } = &mode
2635 {
2636 original_indent_column = Some(if new_text.starts_with('\n') {
2637 indent_size_for_text(
2638 new_text[range_of_insertion_to_indent.clone()].chars(),
2639 )
2640 .len
2641 } else {
2642 original_indent_columns
2643 .get(ix)
2644 .copied()
2645 .flatten()
2646 .unwrap_or_else(|| {
2647 indent_size_for_text(
2648 new_text[range_of_insertion_to_indent.clone()].chars(),
2649 )
2650 .len
2651 })
2652 });
2653
2654 // Avoid auto-indenting the line after the edit.
2655 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2656 range_of_insertion_to_indent.end -= 1;
2657 }
2658 }
2659
2660 AutoindentRequestEntry {
2661 first_line_is_new,
2662 original_indent_column,
2663 indent_size: before_edit.language_indent_size_at(range.start, cx),
2664 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2665 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2666 }
2667 })
2668 .collect();
2669
2670 if !entries.is_empty() {
2671 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2672 before_edit,
2673 entries,
2674 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2675 ignore_empty_lines: false,
2676 }));
2677 }
2678 }
2679
2680 self.end_transaction(cx);
2681 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2682 Some(edit_id)
2683 }
2684
2685 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2686 self.was_changed();
2687
2688 if self.edits_since::<usize>(old_version).next().is_none() {
2689 return;
2690 }
2691
2692 self.reparse(cx, true);
2693 cx.emit(BufferEvent::Edited);
2694 if was_dirty != self.is_dirty() {
2695 cx.emit(BufferEvent::DirtyChanged);
2696 }
2697 cx.notify();
2698 }
2699
2700 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2701 where
2702 I: IntoIterator<Item = Range<T>>,
2703 T: ToOffset + Copy,
2704 {
2705 let before_edit = self.snapshot();
2706 let entries = ranges
2707 .into_iter()
2708 .map(|range| AutoindentRequestEntry {
2709 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2710 first_line_is_new: true,
2711 indent_size: before_edit.language_indent_size_at(range.start, cx),
2712 original_indent_column: None,
2713 })
2714 .collect();
2715 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2716 before_edit,
2717 entries,
2718 is_block_mode: false,
2719 ignore_empty_lines: true,
2720 }));
2721 self.request_autoindent(cx);
2722 }
2723
2724 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2725 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2726 pub fn insert_empty_line(
2727 &mut self,
2728 position: impl ToPoint,
2729 space_above: bool,
2730 space_below: bool,
2731 cx: &mut Context<Self>,
2732 ) -> Point {
2733 let mut position = position.to_point(self);
2734
2735 self.start_transaction();
2736
2737 self.edit(
2738 [(position..position, "\n")],
2739 Some(AutoindentMode::EachLine),
2740 cx,
2741 );
2742
2743 if position.column > 0 {
2744 position += Point::new(1, 0);
2745 }
2746
2747 if !self.is_line_blank(position.row) {
2748 self.edit(
2749 [(position..position, "\n")],
2750 Some(AutoindentMode::EachLine),
2751 cx,
2752 );
2753 }
2754
2755 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2756 self.edit(
2757 [(position..position, "\n")],
2758 Some(AutoindentMode::EachLine),
2759 cx,
2760 );
2761 position.row += 1;
2762 }
2763
2764 if space_below
2765 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2766 {
2767 self.edit(
2768 [(position..position, "\n")],
2769 Some(AutoindentMode::EachLine),
2770 cx,
2771 );
2772 }
2773
2774 self.end_transaction(cx);
2775
2776 position
2777 }
2778
2779 /// Applies the given remote operations to the buffer.
2780 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2781 self.pending_autoindent.take();
2782 let was_dirty = self.is_dirty();
2783 let old_version = self.version.clone();
2784 let mut deferred_ops = Vec::new();
2785 let buffer_ops = ops
2786 .into_iter()
2787 .filter_map(|op| match op {
2788 Operation::Buffer(op) => Some(op),
2789 _ => {
2790 if self.can_apply_op(&op) {
2791 self.apply_op(op, cx);
2792 } else {
2793 deferred_ops.push(op);
2794 }
2795 None
2796 }
2797 })
2798 .collect::<Vec<_>>();
2799 for operation in buffer_ops.iter() {
2800 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2801 }
2802 self.text.apply_ops(buffer_ops);
2803 self.deferred_ops.insert(deferred_ops);
2804 self.flush_deferred_ops(cx);
2805 self.did_edit(&old_version, was_dirty, cx);
2806 // Notify independently of whether the buffer was edited as the operations could include a
2807 // selection update.
2808 cx.notify();
2809 }
2810
2811 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2812 let mut deferred_ops = Vec::new();
2813 for op in self.deferred_ops.drain().iter().cloned() {
2814 if self.can_apply_op(&op) {
2815 self.apply_op(op, cx);
2816 } else {
2817 deferred_ops.push(op);
2818 }
2819 }
2820 self.deferred_ops.insert(deferred_ops);
2821 }
2822
2823 pub fn has_deferred_ops(&self) -> bool {
2824 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2825 }
2826
2827 fn can_apply_op(&self, operation: &Operation) -> bool {
2828 match operation {
2829 Operation::Buffer(_) => {
2830 unreachable!("buffer operations should never be applied at this layer")
2831 }
2832 Operation::UpdateDiagnostics {
2833 diagnostics: diagnostic_set,
2834 ..
2835 } => diagnostic_set.iter().all(|diagnostic| {
2836 self.text.can_resolve(&diagnostic.range.start)
2837 && self.text.can_resolve(&diagnostic.range.end)
2838 }),
2839 Operation::UpdateSelections { selections, .. } => selections
2840 .iter()
2841 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2842 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2843 }
2844 }
2845
2846 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2847 match operation {
2848 Operation::Buffer(_) => {
2849 unreachable!("buffer operations should never be applied at this layer")
2850 }
2851 Operation::UpdateDiagnostics {
2852 server_id,
2853 diagnostics: diagnostic_set,
2854 lamport_timestamp,
2855 } => {
2856 let snapshot = self.snapshot();
2857 self.apply_diagnostic_update(
2858 server_id,
2859 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2860 lamport_timestamp,
2861 cx,
2862 );
2863 }
2864 Operation::UpdateSelections {
2865 selections,
2866 lamport_timestamp,
2867 line_mode,
2868 cursor_shape,
2869 } => {
2870 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2871 && set.lamport_timestamp > lamport_timestamp
2872 {
2873 return;
2874 }
2875
2876 self.remote_selections.insert(
2877 lamport_timestamp.replica_id,
2878 SelectionSet {
2879 selections,
2880 lamport_timestamp,
2881 line_mode,
2882 cursor_shape,
2883 },
2884 );
2885 self.text.lamport_clock.observe(lamport_timestamp);
2886 self.non_text_state_update_count += 1;
2887 }
2888 Operation::UpdateCompletionTriggers {
2889 triggers,
2890 lamport_timestamp,
2891 server_id,
2892 } => {
2893 if triggers.is_empty() {
2894 self.completion_triggers_per_language_server
2895 .remove(&server_id);
2896 self.completion_triggers = self
2897 .completion_triggers_per_language_server
2898 .values()
2899 .flat_map(|triggers| triggers.iter().cloned())
2900 .collect();
2901 } else {
2902 self.completion_triggers_per_language_server
2903 .insert(server_id, triggers.iter().cloned().collect());
2904 self.completion_triggers.extend(triggers);
2905 }
2906 self.text.lamport_clock.observe(lamport_timestamp);
2907 }
2908 Operation::UpdateLineEnding {
2909 line_ending,
2910 lamport_timestamp,
2911 } => {
2912 self.text.set_line_ending(line_ending);
2913 self.text.lamport_clock.observe(lamport_timestamp);
2914 }
2915 }
2916 }
2917
2918 fn apply_diagnostic_update(
2919 &mut self,
2920 server_id: LanguageServerId,
2921 diagnostics: DiagnosticSet,
2922 lamport_timestamp: clock::Lamport,
2923 cx: &mut Context<Self>,
2924 ) {
2925 if lamport_timestamp > self.diagnostics_timestamp {
2926 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2927 if diagnostics.is_empty() {
2928 if let Ok(ix) = ix {
2929 self.diagnostics.remove(ix);
2930 }
2931 } else {
2932 match ix {
2933 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2934 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2935 };
2936 }
2937 self.diagnostics_timestamp = lamport_timestamp;
2938 self.non_text_state_update_count += 1;
2939 self.text.lamport_clock.observe(lamport_timestamp);
2940 cx.notify();
2941 cx.emit(BufferEvent::DiagnosticsUpdated);
2942 }
2943 }
2944
2945 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2946 self.was_changed();
2947 cx.emit(BufferEvent::Operation {
2948 operation,
2949 is_local,
2950 });
2951 }
2952
2953 /// Removes the selections for a given peer.
2954 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2955 self.remote_selections.remove(&replica_id);
2956 cx.notify();
2957 }
2958
2959 /// Undoes the most recent transaction.
2960 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2961 let was_dirty = self.is_dirty();
2962 let old_version = self.version.clone();
2963
2964 if let Some((transaction_id, operation)) = self.text.undo() {
2965 self.send_operation(Operation::Buffer(operation), true, cx);
2966 self.did_edit(&old_version, was_dirty, cx);
2967 Some(transaction_id)
2968 } else {
2969 None
2970 }
2971 }
2972
2973 /// Manually undoes a specific transaction in the buffer's undo history.
2974 pub fn undo_transaction(
2975 &mut self,
2976 transaction_id: TransactionId,
2977 cx: &mut Context<Self>,
2978 ) -> bool {
2979 let was_dirty = self.is_dirty();
2980 let old_version = self.version.clone();
2981 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2982 self.send_operation(Operation::Buffer(operation), true, cx);
2983 self.did_edit(&old_version, was_dirty, cx);
2984 true
2985 } else {
2986 false
2987 }
2988 }
2989
2990 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2991 pub fn undo_to_transaction(
2992 &mut self,
2993 transaction_id: TransactionId,
2994 cx: &mut Context<Self>,
2995 ) -> bool {
2996 let was_dirty = self.is_dirty();
2997 let old_version = self.version.clone();
2998
2999 let operations = self.text.undo_to_transaction(transaction_id);
3000 let undone = !operations.is_empty();
3001 for operation in operations {
3002 self.send_operation(Operation::Buffer(operation), true, cx);
3003 }
3004 if undone {
3005 self.did_edit(&old_version, was_dirty, cx)
3006 }
3007 undone
3008 }
3009
3010 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3011 let was_dirty = self.is_dirty();
3012 let operation = self.text.undo_operations(counts);
3013 let old_version = self.version.clone();
3014 self.send_operation(Operation::Buffer(operation), true, cx);
3015 self.did_edit(&old_version, was_dirty, cx);
3016 }
3017
3018 /// Manually redoes a specific transaction in the buffer's redo history.
3019 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3020 let was_dirty = self.is_dirty();
3021 let old_version = self.version.clone();
3022
3023 if let Some((transaction_id, operation)) = self.text.redo() {
3024 self.send_operation(Operation::Buffer(operation), true, cx);
3025 self.did_edit(&old_version, was_dirty, cx);
3026 Some(transaction_id)
3027 } else {
3028 None
3029 }
3030 }
3031
3032 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3033 pub fn redo_to_transaction(
3034 &mut self,
3035 transaction_id: TransactionId,
3036 cx: &mut Context<Self>,
3037 ) -> bool {
3038 let was_dirty = self.is_dirty();
3039 let old_version = self.version.clone();
3040
3041 let operations = self.text.redo_to_transaction(transaction_id);
3042 let redone = !operations.is_empty();
3043 for operation in operations {
3044 self.send_operation(Operation::Buffer(operation), true, cx);
3045 }
3046 if redone {
3047 self.did_edit(&old_version, was_dirty, cx)
3048 }
3049 redone
3050 }
3051
3052 /// Override current completion triggers with the user-provided completion triggers.
3053 pub fn set_completion_triggers(
3054 &mut self,
3055 server_id: LanguageServerId,
3056 triggers: BTreeSet<String>,
3057 cx: &mut Context<Self>,
3058 ) {
3059 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3060 if triggers.is_empty() {
3061 self.completion_triggers_per_language_server
3062 .remove(&server_id);
3063 self.completion_triggers = self
3064 .completion_triggers_per_language_server
3065 .values()
3066 .flat_map(|triggers| triggers.iter().cloned())
3067 .collect();
3068 } else {
3069 self.completion_triggers_per_language_server
3070 .insert(server_id, triggers.clone());
3071 self.completion_triggers.extend(triggers.iter().cloned());
3072 }
3073 self.send_operation(
3074 Operation::UpdateCompletionTriggers {
3075 triggers: triggers.into_iter().collect(),
3076 lamport_timestamp: self.completion_triggers_timestamp,
3077 server_id,
3078 },
3079 true,
3080 cx,
3081 );
3082 cx.notify();
3083 }
3084
3085 /// Returns a list of strings which trigger a completion menu for this language.
3086 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3087 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3088 &self.completion_triggers
3089 }
3090
3091 /// Call this directly after performing edits to prevent the preview tab
3092 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3093 /// to return false until there are additional edits.
3094 pub fn refresh_preview(&mut self) {
3095 self.preview_version = self.version.clone();
3096 }
3097
3098 /// Whether we should preserve the preview status of a tab containing this buffer.
3099 pub fn preserve_preview(&self) -> bool {
3100 !self.has_edits_since(&self.preview_version)
3101 }
3102}
3103
3104#[doc(hidden)]
3105#[cfg(any(test, feature = "test-support"))]
3106impl Buffer {
3107 pub fn edit_via_marked_text(
3108 &mut self,
3109 marked_string: &str,
3110 autoindent_mode: Option<AutoindentMode>,
3111 cx: &mut Context<Self>,
3112 ) {
3113 let edits = self.edits_for_marked_text(marked_string);
3114 self.edit(edits, autoindent_mode, cx);
3115 }
3116
3117 pub fn set_group_interval(&mut self, group_interval: Duration) {
3118 self.text.set_group_interval(group_interval);
3119 }
3120
3121 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3122 where
3123 T: rand::Rng,
3124 {
3125 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3126 let mut last_end = None;
3127 for _ in 0..old_range_count {
3128 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3129 break;
3130 }
3131
3132 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3133 let mut range = self.random_byte_range(new_start, rng);
3134 if rng.random_bool(0.2) {
3135 mem::swap(&mut range.start, &mut range.end);
3136 }
3137 last_end = Some(range.end);
3138
3139 let new_text_len = rng.random_range(0..10);
3140 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3141 new_text = new_text.to_uppercase();
3142
3143 edits.push((range, new_text));
3144 }
3145 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3146 self.edit(edits, None, cx);
3147 }
3148
3149 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3150 let was_dirty = self.is_dirty();
3151 let old_version = self.version.clone();
3152
3153 let ops = self.text.randomly_undo_redo(rng);
3154 if !ops.is_empty() {
3155 for op in ops {
3156 self.send_operation(Operation::Buffer(op), true, cx);
3157 self.did_edit(&old_version, was_dirty, cx);
3158 }
3159 }
3160 }
3161}
3162
3163impl EventEmitter<BufferEvent> for Buffer {}
3164
3165impl Deref for Buffer {
3166 type Target = TextBuffer;
3167
3168 fn deref(&self) -> &Self::Target {
3169 &self.text
3170 }
3171}
3172
3173impl BufferSnapshot {
3174 /// Returns [`IndentSize`] for a given line that respects user settings and
3175 /// language preferences.
3176 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3177 indent_size_for_line(self, row)
3178 }
3179
3180 /// Returns [`IndentSize`] for a given position that respects user settings
3181 /// and language preferences.
3182 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3183 let settings = language_settings(
3184 self.language_at(position).map(|l| l.name()),
3185 self.file(),
3186 cx,
3187 );
3188 if settings.hard_tabs {
3189 IndentSize::tab()
3190 } else {
3191 IndentSize::spaces(settings.tab_size.get())
3192 }
3193 }
3194
3195 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3196 /// is passed in as `single_indent_size`.
3197 pub fn suggested_indents(
3198 &self,
3199 rows: impl Iterator<Item = u32>,
3200 single_indent_size: IndentSize,
3201 ) -> BTreeMap<u32, IndentSize> {
3202 let mut result = BTreeMap::new();
3203
3204 for row_range in contiguous_ranges(rows, 10) {
3205 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3206 Some(suggestions) => suggestions,
3207 _ => break,
3208 };
3209
3210 for (row, suggestion) in row_range.zip(suggestions) {
3211 let indent_size = if let Some(suggestion) = suggestion {
3212 result
3213 .get(&suggestion.basis_row)
3214 .copied()
3215 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3216 .with_delta(suggestion.delta, single_indent_size)
3217 } else {
3218 self.indent_size_for_line(row)
3219 };
3220
3221 result.insert(row, indent_size);
3222 }
3223 }
3224
3225 result
3226 }
3227
3228 fn suggest_autoindents(
3229 &self,
3230 row_range: Range<u32>,
3231 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3232 let config = &self.language.as_ref()?.config;
3233 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3234
3235 #[derive(Debug, Clone)]
3236 struct StartPosition {
3237 start: Point,
3238 suffix: SharedString,
3239 language: Arc<Language>,
3240 }
3241
3242 // Find the suggested indentation ranges based on the syntax tree.
3243 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3244 let end = Point::new(row_range.end, 0);
3245 let range = (start..end).to_offset(&self.text);
3246 let mut matches = self.syntax.matches_with_options(
3247 range.clone(),
3248 &self.text,
3249 TreeSitterOptions {
3250 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3251 max_start_depth: None,
3252 },
3253 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3254 );
3255 let indent_configs = matches
3256 .grammars()
3257 .iter()
3258 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3259 .collect::<Vec<_>>();
3260
3261 let mut indent_ranges = Vec::<Range<Point>>::new();
3262 let mut start_positions = Vec::<StartPosition>::new();
3263 let mut outdent_positions = Vec::<Point>::new();
3264 while let Some(mat) = matches.peek() {
3265 let mut start: Option<Point> = None;
3266 let mut end: Option<Point> = None;
3267
3268 let config = indent_configs[mat.grammar_index];
3269 for capture in mat.captures {
3270 if capture.index == config.indent_capture_ix {
3271 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3272 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3273 } else if Some(capture.index) == config.start_capture_ix {
3274 start = Some(Point::from_ts_point(capture.node.end_position()));
3275 } else if Some(capture.index) == config.end_capture_ix {
3276 end = Some(Point::from_ts_point(capture.node.start_position()));
3277 } else if Some(capture.index) == config.outdent_capture_ix {
3278 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3279 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3280 start_positions.push(StartPosition {
3281 start: Point::from_ts_point(capture.node.start_position()),
3282 suffix: suffix.clone(),
3283 language: mat.language.clone(),
3284 });
3285 }
3286 }
3287
3288 matches.advance();
3289 if let Some((start, end)) = start.zip(end) {
3290 if start.row == end.row {
3291 continue;
3292 }
3293 let range = start..end;
3294 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3295 Err(ix) => indent_ranges.insert(ix, range),
3296 Ok(ix) => {
3297 let prev_range = &mut indent_ranges[ix];
3298 prev_range.end = prev_range.end.max(range.end);
3299 }
3300 }
3301 }
3302 }
3303
3304 let mut error_ranges = Vec::<Range<Point>>::new();
3305 let mut matches = self
3306 .syntax
3307 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3308 while let Some(mat) = matches.peek() {
3309 let node = mat.captures[0].node;
3310 let start = Point::from_ts_point(node.start_position());
3311 let end = Point::from_ts_point(node.end_position());
3312 let range = start..end;
3313 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3314 Ok(ix) | Err(ix) => ix,
3315 };
3316 let mut end_ix = ix;
3317 while let Some(existing_range) = error_ranges.get(end_ix) {
3318 if existing_range.end < end {
3319 end_ix += 1;
3320 } else {
3321 break;
3322 }
3323 }
3324 error_ranges.splice(ix..end_ix, [range]);
3325 matches.advance();
3326 }
3327
3328 outdent_positions.sort();
3329 for outdent_position in outdent_positions {
3330 // find the innermost indent range containing this outdent_position
3331 // set its end to the outdent position
3332 if let Some(range_to_truncate) = indent_ranges
3333 .iter_mut()
3334 .rfind(|indent_range| indent_range.contains(&outdent_position))
3335 {
3336 range_to_truncate.end = outdent_position;
3337 }
3338 }
3339
3340 start_positions.sort_by_key(|b| b.start);
3341
3342 // Find the suggested indentation increases and decreased based on regexes.
3343 let mut regex_outdent_map = HashMap::default();
3344 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3345 let mut start_positions_iter = start_positions.iter().peekable();
3346
3347 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3348 self.for_each_line(
3349 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3350 ..Point::new(row_range.end, 0),
3351 |row, line| {
3352 let indent_len = self.indent_size_for_line(row).len;
3353 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3354 let row_language_config = row_language
3355 .as_ref()
3356 .map(|lang| lang.config())
3357 .unwrap_or(config);
3358
3359 if row_language_config
3360 .decrease_indent_pattern
3361 .as_ref()
3362 .is_some_and(|regex| regex.is_match(line))
3363 {
3364 indent_change_rows.push((row, Ordering::Less));
3365 }
3366 if row_language_config
3367 .increase_indent_pattern
3368 .as_ref()
3369 .is_some_and(|regex| regex.is_match(line))
3370 {
3371 indent_change_rows.push((row + 1, Ordering::Greater));
3372 }
3373 while let Some(pos) = start_positions_iter.peek() {
3374 if pos.start.row < row {
3375 let pos = start_positions_iter.next().unwrap().clone();
3376 last_seen_suffix
3377 .entry(pos.suffix.to_string())
3378 .or_default()
3379 .push(pos);
3380 } else {
3381 break;
3382 }
3383 }
3384 for rule in &row_language_config.decrease_indent_patterns {
3385 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3386 let row_start_column = self.indent_size_for_line(row).len;
3387 let basis_row = rule
3388 .valid_after
3389 .iter()
3390 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3391 .flatten()
3392 .filter(|pos| {
3393 row_language
3394 .as_ref()
3395 .or(self.language.as_ref())
3396 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3397 })
3398 .filter(|pos| pos.start.column <= row_start_column)
3399 .max_by_key(|pos| pos.start.row);
3400 if let Some(outdent_to) = basis_row {
3401 regex_outdent_map.insert(row, outdent_to.start.row);
3402 }
3403 break;
3404 }
3405 }
3406 },
3407 );
3408
3409 let mut indent_changes = indent_change_rows.into_iter().peekable();
3410 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3411 prev_non_blank_row.unwrap_or(0)
3412 } else {
3413 row_range.start.saturating_sub(1)
3414 };
3415
3416 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3417 Some(row_range.map(move |row| {
3418 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3419
3420 let mut indent_from_prev_row = false;
3421 let mut outdent_from_prev_row = false;
3422 let mut outdent_to_row = u32::MAX;
3423 let mut from_regex = false;
3424
3425 while let Some((indent_row, delta)) = indent_changes.peek() {
3426 match indent_row.cmp(&row) {
3427 Ordering::Equal => match delta {
3428 Ordering::Less => {
3429 from_regex = true;
3430 outdent_from_prev_row = true
3431 }
3432 Ordering::Greater => {
3433 indent_from_prev_row = true;
3434 from_regex = true
3435 }
3436 _ => {}
3437 },
3438
3439 Ordering::Greater => break,
3440 Ordering::Less => {}
3441 }
3442
3443 indent_changes.next();
3444 }
3445
3446 for range in &indent_ranges {
3447 if range.start.row >= row {
3448 break;
3449 }
3450 if range.start.row == prev_row && range.end > row_start {
3451 indent_from_prev_row = true;
3452 }
3453 if range.end > prev_row_start && range.end <= row_start {
3454 outdent_to_row = outdent_to_row.min(range.start.row);
3455 }
3456 }
3457
3458 if let Some(basis_row) = regex_outdent_map.get(&row) {
3459 indent_from_prev_row = false;
3460 outdent_to_row = *basis_row;
3461 from_regex = true;
3462 }
3463
3464 let within_error = error_ranges
3465 .iter()
3466 .any(|e| e.start.row < row && e.end > row_start);
3467
3468 let suggestion = if outdent_to_row == prev_row
3469 || (outdent_from_prev_row && indent_from_prev_row)
3470 {
3471 Some(IndentSuggestion {
3472 basis_row: prev_row,
3473 delta: Ordering::Equal,
3474 within_error: within_error && !from_regex,
3475 })
3476 } else if indent_from_prev_row {
3477 Some(IndentSuggestion {
3478 basis_row: prev_row,
3479 delta: Ordering::Greater,
3480 within_error: within_error && !from_regex,
3481 })
3482 } else if outdent_to_row < prev_row {
3483 Some(IndentSuggestion {
3484 basis_row: outdent_to_row,
3485 delta: Ordering::Equal,
3486 within_error: within_error && !from_regex,
3487 })
3488 } else if outdent_from_prev_row {
3489 Some(IndentSuggestion {
3490 basis_row: prev_row,
3491 delta: Ordering::Less,
3492 within_error: within_error && !from_regex,
3493 })
3494 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3495 {
3496 Some(IndentSuggestion {
3497 basis_row: prev_row,
3498 delta: Ordering::Equal,
3499 within_error: within_error && !from_regex,
3500 })
3501 } else {
3502 None
3503 };
3504
3505 prev_row = row;
3506 prev_row_start = row_start;
3507 suggestion
3508 }))
3509 }
3510
3511 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3512 while row > 0 {
3513 row -= 1;
3514 if !self.is_line_blank(row) {
3515 return Some(row);
3516 }
3517 }
3518 None
3519 }
3520
3521 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3522 let captures = self.syntax.captures(range, &self.text, |grammar| {
3523 grammar
3524 .highlights_config
3525 .as_ref()
3526 .map(|config| &config.query)
3527 });
3528 let highlight_maps = captures
3529 .grammars()
3530 .iter()
3531 .map(|grammar| grammar.highlight_map())
3532 .collect();
3533 (captures, highlight_maps)
3534 }
3535
3536 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3537 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3538 /// returned in chunks where each chunk has a single syntax highlighting style and
3539 /// diagnostic status.
3540 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3541 let range = range.start.to_offset(self)..range.end.to_offset(self);
3542
3543 let mut syntax = None;
3544 if language_aware {
3545 syntax = Some(self.get_highlights(range.clone()));
3546 }
3547 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3548 let diagnostics = language_aware;
3549 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3550 }
3551
3552 pub fn highlighted_text_for_range<T: ToOffset>(
3553 &self,
3554 range: Range<T>,
3555 override_style: Option<HighlightStyle>,
3556 syntax_theme: &SyntaxTheme,
3557 ) -> HighlightedText {
3558 HighlightedText::from_buffer_range(
3559 range,
3560 &self.text,
3561 &self.syntax,
3562 override_style,
3563 syntax_theme,
3564 )
3565 }
3566
3567 /// Invokes the given callback for each line of text in the given range of the buffer.
3568 /// Uses callback to avoid allocating a string for each line.
3569 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3570 let mut line = String::new();
3571 let mut row = range.start.row;
3572 for chunk in self
3573 .as_rope()
3574 .chunks_in_range(range.to_offset(self))
3575 .chain(["\n"])
3576 {
3577 for (newline_ix, text) in chunk.split('\n').enumerate() {
3578 if newline_ix > 0 {
3579 callback(row, &line);
3580 row += 1;
3581 line.clear();
3582 }
3583 line.push_str(text);
3584 }
3585 }
3586 }
3587
3588 /// Iterates over every [`SyntaxLayer`] in the buffer.
3589 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3590 self.syntax_layers_for_range(0..self.len(), true)
3591 }
3592
3593 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3594 let offset = position.to_offset(self);
3595 self.syntax_layers_for_range(offset..offset, false)
3596 .filter(|l| {
3597 if let Some(ranges) = l.included_sub_ranges {
3598 ranges.iter().any(|range| {
3599 let start = range.start.to_offset(self);
3600 start <= offset && {
3601 let end = range.end.to_offset(self);
3602 offset < end
3603 }
3604 })
3605 } else {
3606 l.node().start_byte() <= offset && l.node().end_byte() > offset
3607 }
3608 })
3609 .last()
3610 }
3611
3612 pub fn syntax_layers_for_range<D: ToOffset>(
3613 &self,
3614 range: Range<D>,
3615 include_hidden: bool,
3616 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3617 self.syntax
3618 .layers_for_range(range, &self.text, include_hidden)
3619 }
3620
3621 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3622 &self,
3623 range: Range<D>,
3624 ) -> Option<SyntaxLayer<'_>> {
3625 let range = range.to_offset(self);
3626 self.syntax
3627 .layers_for_range(range, &self.text, false)
3628 .max_by(|a, b| {
3629 if a.depth != b.depth {
3630 a.depth.cmp(&b.depth)
3631 } else if a.offset.0 != b.offset.0 {
3632 a.offset.0.cmp(&b.offset.0)
3633 } else {
3634 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3635 }
3636 })
3637 }
3638
3639 /// Returns the main [`Language`].
3640 pub fn language(&self) -> Option<&Arc<Language>> {
3641 self.language.as_ref()
3642 }
3643
3644 /// Returns the [`Language`] at the given location.
3645 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3646 self.syntax_layer_at(position)
3647 .map(|info| info.language)
3648 .or(self.language.as_ref())
3649 }
3650
3651 /// Returns the settings for the language at the given location.
3652 pub fn settings_at<'a, D: ToOffset>(
3653 &'a self,
3654 position: D,
3655 cx: &'a App,
3656 ) -> Cow<'a, LanguageSettings> {
3657 language_settings(
3658 self.language_at(position).map(|l| l.name()),
3659 self.file.as_ref(),
3660 cx,
3661 )
3662 }
3663
3664 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3665 CharClassifier::new(self.language_scope_at(point))
3666 }
3667
3668 /// Returns the [`LanguageScope`] at the given location.
3669 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3670 let offset = position.to_offset(self);
3671 let mut scope = None;
3672 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3673
3674 // Use the layer that has the smallest node intersecting the given point.
3675 for layer in self
3676 .syntax
3677 .layers_for_range(offset..offset, &self.text, false)
3678 {
3679 let mut cursor = layer.node().walk();
3680
3681 let mut range = None;
3682 loop {
3683 let child_range = cursor.node().byte_range();
3684 if !child_range.contains(&offset) {
3685 break;
3686 }
3687
3688 range = Some(child_range);
3689 if cursor.goto_first_child_for_byte(offset).is_none() {
3690 break;
3691 }
3692 }
3693
3694 if let Some(range) = range
3695 && smallest_range_and_depth.as_ref().is_none_or(
3696 |(smallest_range, smallest_range_depth)| {
3697 if layer.depth > *smallest_range_depth {
3698 true
3699 } else if layer.depth == *smallest_range_depth {
3700 range.len() < smallest_range.len()
3701 } else {
3702 false
3703 }
3704 },
3705 )
3706 {
3707 smallest_range_and_depth = Some((range, layer.depth));
3708 scope = Some(LanguageScope {
3709 language: layer.language.clone(),
3710 override_id: layer.override_id(offset, &self.text),
3711 });
3712 }
3713 }
3714
3715 scope.or_else(|| {
3716 self.language.clone().map(|language| LanguageScope {
3717 language,
3718 override_id: None,
3719 })
3720 })
3721 }
3722
3723 /// Returns a tuple of the range and character kind of the word
3724 /// surrounding the given position.
3725 pub fn surrounding_word<T: ToOffset>(
3726 &self,
3727 start: T,
3728 scope_context: Option<CharScopeContext>,
3729 ) -> (Range<usize>, Option<CharKind>) {
3730 let mut start = start.to_offset(self);
3731 let mut end = start;
3732 let mut next_chars = self.chars_at(start).take(128).peekable();
3733 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3734
3735 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3736 let word_kind = cmp::max(
3737 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3738 next_chars.peek().copied().map(|c| classifier.kind(c)),
3739 );
3740
3741 for ch in prev_chars {
3742 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3743 start -= ch.len_utf8();
3744 } else {
3745 break;
3746 }
3747 }
3748
3749 for ch in next_chars {
3750 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3751 end += ch.len_utf8();
3752 } else {
3753 break;
3754 }
3755 }
3756
3757 (start..end, word_kind)
3758 }
3759
3760 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3761 /// range. When `require_larger` is true, the node found must be larger than the query range.
3762 ///
3763 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3764 /// be moved to the root of the tree.
3765 fn goto_node_enclosing_range(
3766 cursor: &mut tree_sitter::TreeCursor,
3767 query_range: &Range<usize>,
3768 require_larger: bool,
3769 ) -> bool {
3770 let mut ascending = false;
3771 loop {
3772 let mut range = cursor.node().byte_range();
3773 if query_range.is_empty() {
3774 // When the query range is empty and the current node starts after it, move to the
3775 // previous sibling to find the node the containing node.
3776 if range.start > query_range.start {
3777 cursor.goto_previous_sibling();
3778 range = cursor.node().byte_range();
3779 }
3780 } else {
3781 // When the query range is non-empty and the current node ends exactly at the start,
3782 // move to the next sibling to find a node that extends beyond the start.
3783 if range.end == query_range.start {
3784 cursor.goto_next_sibling();
3785 range = cursor.node().byte_range();
3786 }
3787 }
3788
3789 let encloses = range.contains_inclusive(query_range)
3790 && (!require_larger || range.len() > query_range.len());
3791 if !encloses {
3792 ascending = true;
3793 if !cursor.goto_parent() {
3794 return false;
3795 }
3796 continue;
3797 } else if ascending {
3798 return true;
3799 }
3800
3801 // Descend into the current node.
3802 if cursor
3803 .goto_first_child_for_byte(query_range.start)
3804 .is_none()
3805 {
3806 return true;
3807 }
3808 }
3809 }
3810
3811 pub fn syntax_ancestor<'a, T: ToOffset>(
3812 &'a self,
3813 range: Range<T>,
3814 ) -> Option<tree_sitter::Node<'a>> {
3815 let range = range.start.to_offset(self)..range.end.to_offset(self);
3816 let mut result: Option<tree_sitter::Node<'a>> = None;
3817 for layer in self
3818 .syntax
3819 .layers_for_range(range.clone(), &self.text, true)
3820 {
3821 let mut cursor = layer.node().walk();
3822
3823 // Find the node that both contains the range and is larger than it.
3824 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3825 continue;
3826 }
3827
3828 let left_node = cursor.node();
3829 let mut layer_result = left_node;
3830
3831 // For an empty range, try to find another node immediately to the right of the range.
3832 if left_node.end_byte() == range.start {
3833 let mut right_node = None;
3834 while !cursor.goto_next_sibling() {
3835 if !cursor.goto_parent() {
3836 break;
3837 }
3838 }
3839
3840 while cursor.node().start_byte() == range.start {
3841 right_node = Some(cursor.node());
3842 if !cursor.goto_first_child() {
3843 break;
3844 }
3845 }
3846
3847 // If there is a candidate node on both sides of the (empty) range, then
3848 // decide between the two by favoring a named node over an anonymous token.
3849 // If both nodes are the same in that regard, favor the right one.
3850 if let Some(right_node) = right_node
3851 && (right_node.is_named() || !left_node.is_named())
3852 {
3853 layer_result = right_node;
3854 }
3855 }
3856
3857 if let Some(previous_result) = &result
3858 && previous_result.byte_range().len() < layer_result.byte_range().len()
3859 {
3860 continue;
3861 }
3862 result = Some(layer_result);
3863 }
3864
3865 result
3866 }
3867
3868 /// Find the previous sibling syntax node at the given range.
3869 ///
3870 /// This function locates the syntax node that precedes the node containing
3871 /// the given range. It searches hierarchically by:
3872 /// 1. Finding the node that contains the given range
3873 /// 2. Looking for the previous sibling at the same tree level
3874 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3875 ///
3876 /// Returns `None` if there is no previous sibling at any ancestor level.
3877 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3878 &'a self,
3879 range: Range<T>,
3880 ) -> Option<tree_sitter::Node<'a>> {
3881 let range = range.start.to_offset(self)..range.end.to_offset(self);
3882 let mut result: Option<tree_sitter::Node<'a>> = None;
3883
3884 for layer in self
3885 .syntax
3886 .layers_for_range(range.clone(), &self.text, true)
3887 {
3888 let mut cursor = layer.node().walk();
3889
3890 // Find the node that contains the range
3891 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3892 continue;
3893 }
3894
3895 // Look for the previous sibling, moving up ancestor levels if needed
3896 loop {
3897 if cursor.goto_previous_sibling() {
3898 let layer_result = cursor.node();
3899
3900 if let Some(previous_result) = &result {
3901 if previous_result.byte_range().end < layer_result.byte_range().end {
3902 continue;
3903 }
3904 }
3905 result = Some(layer_result);
3906 break;
3907 }
3908
3909 // No sibling found at this level, try moving up to parent
3910 if !cursor.goto_parent() {
3911 break;
3912 }
3913 }
3914 }
3915
3916 result
3917 }
3918
3919 /// Find the next sibling syntax node at the given range.
3920 ///
3921 /// This function locates the syntax node that follows the node containing
3922 /// the given range. It searches hierarchically by:
3923 /// 1. Finding the node that contains the given range
3924 /// 2. Looking for the next sibling at the same tree level
3925 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3926 ///
3927 /// Returns `None` if there is no next sibling at any ancestor level.
3928 pub fn syntax_next_sibling<'a, T: ToOffset>(
3929 &'a self,
3930 range: Range<T>,
3931 ) -> Option<tree_sitter::Node<'a>> {
3932 let range = range.start.to_offset(self)..range.end.to_offset(self);
3933 let mut result: Option<tree_sitter::Node<'a>> = None;
3934
3935 for layer in self
3936 .syntax
3937 .layers_for_range(range.clone(), &self.text, true)
3938 {
3939 let mut cursor = layer.node().walk();
3940
3941 // Find the node that contains the range
3942 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3943 continue;
3944 }
3945
3946 // Look for the next sibling, moving up ancestor levels if needed
3947 loop {
3948 if cursor.goto_next_sibling() {
3949 let layer_result = cursor.node();
3950
3951 if let Some(previous_result) = &result {
3952 if previous_result.byte_range().start > layer_result.byte_range().start {
3953 continue;
3954 }
3955 }
3956 result = Some(layer_result);
3957 break;
3958 }
3959
3960 // No sibling found at this level, try moving up to parent
3961 if !cursor.goto_parent() {
3962 break;
3963 }
3964 }
3965 }
3966
3967 result
3968 }
3969
3970 /// Returns the root syntax node within the given row
3971 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3972 let start_offset = position.to_offset(self);
3973
3974 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3975
3976 let layer = self
3977 .syntax
3978 .layers_for_range(start_offset..start_offset, &self.text, true)
3979 .next()?;
3980
3981 let mut cursor = layer.node().walk();
3982
3983 // Descend to the first leaf that touches the start of the range.
3984 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3985 if cursor.node().end_byte() == start_offset {
3986 cursor.goto_next_sibling();
3987 }
3988 }
3989
3990 // Ascend to the root node within the same row.
3991 while cursor.goto_parent() {
3992 if cursor.node().start_position().row != row {
3993 break;
3994 }
3995 }
3996
3997 Some(cursor.node())
3998 }
3999
4000 /// Returns the outline for the buffer.
4001 ///
4002 /// This method allows passing an optional [`SyntaxTheme`] to
4003 /// syntax-highlight the returned symbols.
4004 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4005 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4006 }
4007
4008 /// Returns all the symbols that contain the given position.
4009 ///
4010 /// This method allows passing an optional [`SyntaxTheme`] to
4011 /// syntax-highlight the returned symbols.
4012 pub fn symbols_containing<T: ToOffset>(
4013 &self,
4014 position: T,
4015 theme: Option<&SyntaxTheme>,
4016 ) -> Vec<OutlineItem<Anchor>> {
4017 let position = position.to_offset(self);
4018 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4019 let end = self.clip_offset(position + 1, Bias::Right);
4020 let mut items = self.outline_items_containing(start..end, false, theme);
4021 let mut prev_depth = None;
4022 items.retain(|item| {
4023 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4024 prev_depth = Some(item.depth);
4025 result
4026 });
4027 items
4028 }
4029
4030 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4031 let range = range.to_offset(self);
4032 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4033 grammar.outline_config.as_ref().map(|c| &c.query)
4034 });
4035 let configs = matches
4036 .grammars()
4037 .iter()
4038 .map(|g| g.outline_config.as_ref().unwrap())
4039 .collect::<Vec<_>>();
4040
4041 while let Some(mat) = matches.peek() {
4042 let config = &configs[mat.grammar_index];
4043 let containing_item_node = maybe!({
4044 let item_node = mat.captures.iter().find_map(|cap| {
4045 if cap.index == config.item_capture_ix {
4046 Some(cap.node)
4047 } else {
4048 None
4049 }
4050 })?;
4051
4052 let item_byte_range = item_node.byte_range();
4053 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4054 None
4055 } else {
4056 Some(item_node)
4057 }
4058 });
4059
4060 if let Some(item_node) = containing_item_node {
4061 return Some(
4062 Point::from_ts_point(item_node.start_position())
4063 ..Point::from_ts_point(item_node.end_position()),
4064 );
4065 }
4066
4067 matches.advance();
4068 }
4069 None
4070 }
4071
4072 pub fn outline_items_containing<T: ToOffset>(
4073 &self,
4074 range: Range<T>,
4075 include_extra_context: bool,
4076 theme: Option<&SyntaxTheme>,
4077 ) -> Vec<OutlineItem<Anchor>> {
4078 self.outline_items_containing_internal(
4079 range,
4080 include_extra_context,
4081 theme,
4082 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4083 )
4084 }
4085
4086 pub fn outline_items_as_points_containing<T: ToOffset>(
4087 &self,
4088 range: Range<T>,
4089 include_extra_context: bool,
4090 theme: Option<&SyntaxTheme>,
4091 ) -> Vec<OutlineItem<Point>> {
4092 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4093 range
4094 })
4095 }
4096
4097 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4098 &self,
4099 range: Range<T>,
4100 include_extra_context: bool,
4101 theme: Option<&SyntaxTheme>,
4102 ) -> Vec<OutlineItem<usize>> {
4103 self.outline_items_containing_internal(
4104 range,
4105 include_extra_context,
4106 theme,
4107 |buffer, range| range.to_offset(buffer),
4108 )
4109 }
4110
4111 fn outline_items_containing_internal<T: ToOffset, U>(
4112 &self,
4113 range: Range<T>,
4114 include_extra_context: bool,
4115 theme: Option<&SyntaxTheme>,
4116 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4117 ) -> Vec<OutlineItem<U>> {
4118 let range = range.to_offset(self);
4119 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4120 grammar.outline_config.as_ref().map(|c| &c.query)
4121 });
4122
4123 let mut items = Vec::new();
4124 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4125 while let Some(mat) = matches.peek() {
4126 let config = matches.grammars()[mat.grammar_index]
4127 .outline_config
4128 .as_ref()
4129 .unwrap();
4130 if let Some(item) =
4131 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4132 {
4133 items.push(item);
4134 } else if let Some(capture) = mat
4135 .captures
4136 .iter()
4137 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4138 {
4139 let capture_range = capture.node.start_position()..capture.node.end_position();
4140 let mut capture_row_range =
4141 capture_range.start.row as u32..capture_range.end.row as u32;
4142 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4143 {
4144 capture_row_range.end -= 1;
4145 }
4146 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4147 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4148 last_row_range.end = capture_row_range.end;
4149 } else {
4150 annotation_row_ranges.push(capture_row_range);
4151 }
4152 } else {
4153 annotation_row_ranges.push(capture_row_range);
4154 }
4155 }
4156 matches.advance();
4157 }
4158
4159 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4160
4161 // Assign depths based on containment relationships and convert to anchors.
4162 let mut item_ends_stack = Vec::<Point>::new();
4163 let mut anchor_items = Vec::new();
4164 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4165 for item in items {
4166 while let Some(last_end) = item_ends_stack.last().copied() {
4167 if last_end < item.range.end {
4168 item_ends_stack.pop();
4169 } else {
4170 break;
4171 }
4172 }
4173
4174 let mut annotation_row_range = None;
4175 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4176 let row_preceding_item = item.range.start.row.saturating_sub(1);
4177 if next_annotation_row_range.end < row_preceding_item {
4178 annotation_row_ranges.next();
4179 } else {
4180 if next_annotation_row_range.end == row_preceding_item {
4181 annotation_row_range = Some(next_annotation_row_range.clone());
4182 annotation_row_ranges.next();
4183 }
4184 break;
4185 }
4186 }
4187
4188 anchor_items.push(OutlineItem {
4189 depth: item_ends_stack.len(),
4190 range: range_callback(self, item.range.clone()),
4191 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4192 text: item.text,
4193 highlight_ranges: item.highlight_ranges,
4194 name_ranges: item.name_ranges,
4195 body_range: item.body_range.map(|r| range_callback(self, r)),
4196 annotation_range: annotation_row_range.map(|annotation_range| {
4197 let point_range = Point::new(annotation_range.start, 0)
4198 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4199 range_callback(self, point_range)
4200 }),
4201 });
4202 item_ends_stack.push(item.range.end);
4203 }
4204
4205 anchor_items
4206 }
4207
4208 fn next_outline_item(
4209 &self,
4210 config: &OutlineConfig,
4211 mat: &SyntaxMapMatch,
4212 range: &Range<usize>,
4213 include_extra_context: bool,
4214 theme: Option<&SyntaxTheme>,
4215 ) -> Option<OutlineItem<Point>> {
4216 let item_node = mat.captures.iter().find_map(|cap| {
4217 if cap.index == config.item_capture_ix {
4218 Some(cap.node)
4219 } else {
4220 None
4221 }
4222 })?;
4223
4224 let item_byte_range = item_node.byte_range();
4225 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4226 return None;
4227 }
4228 let item_point_range = Point::from_ts_point(item_node.start_position())
4229 ..Point::from_ts_point(item_node.end_position());
4230
4231 let mut open_point = None;
4232 let mut close_point = None;
4233
4234 let mut buffer_ranges = Vec::new();
4235 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4236 let mut range = node.start_byte()..node.end_byte();
4237 let start = node.start_position();
4238 if node.end_position().row > start.row {
4239 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4240 }
4241
4242 if !range.is_empty() {
4243 buffer_ranges.push((range, node_is_name));
4244 }
4245 };
4246
4247 for capture in mat.captures {
4248 if capture.index == config.name_capture_ix {
4249 add_to_buffer_ranges(capture.node, true);
4250 } else if Some(capture.index) == config.context_capture_ix
4251 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4252 {
4253 add_to_buffer_ranges(capture.node, false);
4254 } else {
4255 if Some(capture.index) == config.open_capture_ix {
4256 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4257 } else if Some(capture.index) == config.close_capture_ix {
4258 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4259 }
4260 }
4261 }
4262
4263 if buffer_ranges.is_empty() {
4264 return None;
4265 }
4266 let source_range_for_text =
4267 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4268
4269 let mut text = String::new();
4270 let mut highlight_ranges = Vec::new();
4271 let mut name_ranges = Vec::new();
4272 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4273 let mut last_buffer_range_end = 0;
4274 for (buffer_range, is_name) in buffer_ranges {
4275 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4276 if space_added {
4277 text.push(' ');
4278 }
4279 let before_append_len = text.len();
4280 let mut offset = buffer_range.start;
4281 chunks.seek(buffer_range.clone());
4282 for mut chunk in chunks.by_ref() {
4283 if chunk.text.len() > buffer_range.end - offset {
4284 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4285 offset = buffer_range.end;
4286 } else {
4287 offset += chunk.text.len();
4288 }
4289 let style = chunk
4290 .syntax_highlight_id
4291 .zip(theme)
4292 .and_then(|(highlight, theme)| highlight.style(theme));
4293 if let Some(style) = style {
4294 let start = text.len();
4295 let end = start + chunk.text.len();
4296 highlight_ranges.push((start..end, style));
4297 }
4298 text.push_str(chunk.text);
4299 if offset >= buffer_range.end {
4300 break;
4301 }
4302 }
4303 if is_name {
4304 let after_append_len = text.len();
4305 let start = if space_added && !name_ranges.is_empty() {
4306 before_append_len - 1
4307 } else {
4308 before_append_len
4309 };
4310 name_ranges.push(start..after_append_len);
4311 }
4312 last_buffer_range_end = buffer_range.end;
4313 }
4314
4315 Some(OutlineItem {
4316 depth: 0, // We'll calculate the depth later
4317 range: item_point_range,
4318 source_range_for_text: source_range_for_text.to_point(self),
4319 text,
4320 highlight_ranges,
4321 name_ranges,
4322 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4323 annotation_range: None,
4324 })
4325 }
4326
4327 pub fn function_body_fold_ranges<T: ToOffset>(
4328 &self,
4329 within: Range<T>,
4330 ) -> impl Iterator<Item = Range<usize>> + '_ {
4331 self.text_object_ranges(within, TreeSitterOptions::default())
4332 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4333 }
4334
4335 /// For each grammar in the language, runs the provided
4336 /// [`tree_sitter::Query`] against the given range.
4337 pub fn matches(
4338 &self,
4339 range: Range<usize>,
4340 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4341 ) -> SyntaxMapMatches<'_> {
4342 self.syntax.matches(range, self, query)
4343 }
4344
4345 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4346 /// Hence, may return more bracket pairs than the range contains.
4347 ///
4348 /// Will omit known chunks.
4349 /// The resulting bracket match collections are not ordered.
4350 pub fn fetch_bracket_ranges(
4351 &self,
4352 range: Range<usize>,
4353 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4354 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4355 let mut all_bracket_matches = HashMap::default();
4356
4357 for chunk in self
4358 .tree_sitter_data
4359 .chunks
4360 .applicable_chunks(&[range.to_point(self)])
4361 {
4362 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4363 continue;
4364 }
4365 let chunk_range = chunk.anchor_range();
4366 let chunk_range = chunk_range.to_offset(&self);
4367
4368 if let Some(cached_brackets) =
4369 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4370 {
4371 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4372 continue;
4373 }
4374
4375 let mut all_brackets = Vec::new();
4376 let mut opens = Vec::new();
4377 let mut color_pairs = Vec::new();
4378
4379 let mut matches = self.syntax.matches_with_options(
4380 chunk_range.clone(),
4381 &self.text,
4382 TreeSitterOptions {
4383 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4384 max_start_depth: None,
4385 },
4386 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4387 );
4388 let configs = matches
4389 .grammars()
4390 .iter()
4391 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4392 .collect::<Vec<_>>();
4393
4394 while let Some(mat) = matches.peek() {
4395 let mut open = None;
4396 let mut close = None;
4397 let syntax_layer_depth = mat.depth;
4398 let config = configs[mat.grammar_index];
4399 let pattern = &config.patterns[mat.pattern_index];
4400 for capture in mat.captures {
4401 if capture.index == config.open_capture_ix {
4402 open = Some(capture.node.byte_range());
4403 } else if capture.index == config.close_capture_ix {
4404 close = Some(capture.node.byte_range());
4405 }
4406 }
4407
4408 matches.advance();
4409
4410 let Some((open_range, close_range)) = open.zip(close) else {
4411 continue;
4412 };
4413
4414 let bracket_range = open_range.start..=close_range.end;
4415 if !bracket_range.overlaps(&chunk_range) {
4416 continue;
4417 }
4418
4419 let index = all_brackets.len();
4420 all_brackets.push(BracketMatch {
4421 open_range: open_range.clone(),
4422 close_range: close_range.clone(),
4423 newline_only: pattern.newline_only,
4424 syntax_layer_depth,
4425 color_index: None,
4426 });
4427
4428 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4429 // bracket will match the entire tag with all text inside.
4430 // For now, avoid highlighting any pair that has more than single char in each bracket.
4431 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4432 let should_color =
4433 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4434 if should_color {
4435 opens.push(open_range.clone());
4436 color_pairs.push((open_range, close_range, index));
4437 }
4438 }
4439
4440 opens.sort_by_key(|r| (r.start, r.end));
4441 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4442 color_pairs.sort_by_key(|(_, close, _)| close.end);
4443
4444 let mut open_stack = Vec::new();
4445 let mut open_index = 0;
4446 for (open, close, index) in color_pairs {
4447 while open_index < opens.len() && opens[open_index].start < close.start {
4448 open_stack.push(opens[open_index].clone());
4449 open_index += 1;
4450 }
4451
4452 if open_stack.last() == Some(&open) {
4453 let depth_index = open_stack.len() - 1;
4454 all_brackets[index].color_index = Some(depth_index);
4455 open_stack.pop();
4456 }
4457 }
4458
4459 all_brackets.sort_by_key(|bracket_match| {
4460 (bracket_match.open_range.start, bracket_match.open_range.end)
4461 });
4462
4463 if let empty_slot @ None =
4464 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4465 {
4466 *empty_slot = Some(all_brackets.clone());
4467 }
4468 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4469 }
4470
4471 all_bracket_matches
4472 }
4473
4474 pub fn all_bracket_ranges(
4475 &self,
4476 range: Range<usize>,
4477 ) -> impl Iterator<Item = BracketMatch<usize>> {
4478 self.fetch_bracket_ranges(range.clone(), None)
4479 .into_values()
4480 .flatten()
4481 .filter(move |bracket_match| {
4482 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4483 bracket_range.overlaps(&range)
4484 })
4485 }
4486
4487 /// Returns bracket range pairs overlapping or adjacent to `range`
4488 pub fn bracket_ranges<T: ToOffset>(
4489 &self,
4490 range: Range<T>,
4491 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4492 // Find bracket pairs that *inclusively* contain the given range.
4493 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4494 self.all_bracket_ranges(range)
4495 .filter(|pair| !pair.newline_only)
4496 }
4497
4498 pub fn debug_variables_query<T: ToOffset>(
4499 &self,
4500 range: Range<T>,
4501 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4502 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4503
4504 let mut matches = self.syntax.matches_with_options(
4505 range.clone(),
4506 &self.text,
4507 TreeSitterOptions::default(),
4508 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4509 );
4510
4511 let configs = matches
4512 .grammars()
4513 .iter()
4514 .map(|grammar| grammar.debug_variables_config.as_ref())
4515 .collect::<Vec<_>>();
4516
4517 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4518
4519 iter::from_fn(move || {
4520 loop {
4521 while let Some(capture) = captures.pop() {
4522 if capture.0.overlaps(&range) {
4523 return Some(capture);
4524 }
4525 }
4526
4527 let mat = matches.peek()?;
4528
4529 let Some(config) = configs[mat.grammar_index].as_ref() else {
4530 matches.advance();
4531 continue;
4532 };
4533
4534 for capture in mat.captures {
4535 let Some(ix) = config
4536 .objects_by_capture_ix
4537 .binary_search_by_key(&capture.index, |e| e.0)
4538 .ok()
4539 else {
4540 continue;
4541 };
4542 let text_object = config.objects_by_capture_ix[ix].1;
4543 let byte_range = capture.node.byte_range();
4544
4545 let mut found = false;
4546 for (range, existing) in captures.iter_mut() {
4547 if existing == &text_object {
4548 range.start = range.start.min(byte_range.start);
4549 range.end = range.end.max(byte_range.end);
4550 found = true;
4551 break;
4552 }
4553 }
4554
4555 if !found {
4556 captures.push((byte_range, text_object));
4557 }
4558 }
4559
4560 matches.advance();
4561 }
4562 })
4563 }
4564
4565 pub fn text_object_ranges<T: ToOffset>(
4566 &self,
4567 range: Range<T>,
4568 options: TreeSitterOptions,
4569 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4570 let range =
4571 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4572
4573 let mut matches =
4574 self.syntax
4575 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4576 grammar.text_object_config.as_ref().map(|c| &c.query)
4577 });
4578
4579 let configs = matches
4580 .grammars()
4581 .iter()
4582 .map(|grammar| grammar.text_object_config.as_ref())
4583 .collect::<Vec<_>>();
4584
4585 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4586
4587 iter::from_fn(move || {
4588 loop {
4589 while let Some(capture) = captures.pop() {
4590 if capture.0.overlaps(&range) {
4591 return Some(capture);
4592 }
4593 }
4594
4595 let mat = matches.peek()?;
4596
4597 let Some(config) = configs[mat.grammar_index].as_ref() else {
4598 matches.advance();
4599 continue;
4600 };
4601
4602 for capture in mat.captures {
4603 let Some(ix) = config
4604 .text_objects_by_capture_ix
4605 .binary_search_by_key(&capture.index, |e| e.0)
4606 .ok()
4607 else {
4608 continue;
4609 };
4610 let text_object = config.text_objects_by_capture_ix[ix].1;
4611 let byte_range = capture.node.byte_range();
4612
4613 let mut found = false;
4614 for (range, existing) in captures.iter_mut() {
4615 if existing == &text_object {
4616 range.start = range.start.min(byte_range.start);
4617 range.end = range.end.max(byte_range.end);
4618 found = true;
4619 break;
4620 }
4621 }
4622
4623 if !found {
4624 captures.push((byte_range, text_object));
4625 }
4626 }
4627
4628 matches.advance();
4629 }
4630 })
4631 }
4632
4633 /// Returns enclosing bracket ranges containing the given range
4634 pub fn enclosing_bracket_ranges<T: ToOffset>(
4635 &self,
4636 range: Range<T>,
4637 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4638 let range = range.start.to_offset(self)..range.end.to_offset(self);
4639
4640 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4641 let max_depth = result
4642 .iter()
4643 .map(|mat| mat.syntax_layer_depth)
4644 .max()
4645 .unwrap_or(0);
4646 result.into_iter().filter(move |pair| {
4647 pair.open_range.start <= range.start
4648 && pair.close_range.end >= range.end
4649 && pair.syntax_layer_depth == max_depth
4650 })
4651 }
4652
4653 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4654 ///
4655 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4656 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4657 &self,
4658 range: Range<T>,
4659 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4660 ) -> Option<(Range<usize>, Range<usize>)> {
4661 let range = range.start.to_offset(self)..range.end.to_offset(self);
4662
4663 // Get the ranges of the innermost pair of brackets.
4664 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4665
4666 for pair in self.enclosing_bracket_ranges(range) {
4667 if let Some(range_filter) = range_filter
4668 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4669 {
4670 continue;
4671 }
4672
4673 let len = pair.close_range.end - pair.open_range.start;
4674
4675 if let Some((existing_open, existing_close)) = &result {
4676 let existing_len = existing_close.end - existing_open.start;
4677 if len > existing_len {
4678 continue;
4679 }
4680 }
4681
4682 result = Some((pair.open_range, pair.close_range));
4683 }
4684
4685 result
4686 }
4687
4688 /// Returns anchor ranges for any matches of the redaction query.
4689 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4690 /// will be run on the relevant section of the buffer.
4691 pub fn redacted_ranges<T: ToOffset>(
4692 &self,
4693 range: Range<T>,
4694 ) -> impl Iterator<Item = Range<usize>> + '_ {
4695 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4696 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4697 grammar
4698 .redactions_config
4699 .as_ref()
4700 .map(|config| &config.query)
4701 });
4702
4703 let configs = syntax_matches
4704 .grammars()
4705 .iter()
4706 .map(|grammar| grammar.redactions_config.as_ref())
4707 .collect::<Vec<_>>();
4708
4709 iter::from_fn(move || {
4710 let redacted_range = syntax_matches
4711 .peek()
4712 .and_then(|mat| {
4713 configs[mat.grammar_index].and_then(|config| {
4714 mat.captures
4715 .iter()
4716 .find(|capture| capture.index == config.redaction_capture_ix)
4717 })
4718 })
4719 .map(|mat| mat.node.byte_range());
4720 syntax_matches.advance();
4721 redacted_range
4722 })
4723 }
4724
4725 pub fn injections_intersecting_range<T: ToOffset>(
4726 &self,
4727 range: Range<T>,
4728 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4729 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4730
4731 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4732 grammar
4733 .injection_config
4734 .as_ref()
4735 .map(|config| &config.query)
4736 });
4737
4738 let configs = syntax_matches
4739 .grammars()
4740 .iter()
4741 .map(|grammar| grammar.injection_config.as_ref())
4742 .collect::<Vec<_>>();
4743
4744 iter::from_fn(move || {
4745 let ranges = syntax_matches.peek().and_then(|mat| {
4746 let config = &configs[mat.grammar_index]?;
4747 let content_capture_range = mat.captures.iter().find_map(|capture| {
4748 if capture.index == config.content_capture_ix {
4749 Some(capture.node.byte_range())
4750 } else {
4751 None
4752 }
4753 })?;
4754 let language = self.language_at(content_capture_range.start)?;
4755 Some((content_capture_range, language))
4756 });
4757 syntax_matches.advance();
4758 ranges
4759 })
4760 }
4761
4762 pub fn runnable_ranges(
4763 &self,
4764 offset_range: Range<usize>,
4765 ) -> impl Iterator<Item = RunnableRange> + '_ {
4766 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4767 grammar.runnable_config.as_ref().map(|config| &config.query)
4768 });
4769
4770 let test_configs = syntax_matches
4771 .grammars()
4772 .iter()
4773 .map(|grammar| grammar.runnable_config.as_ref())
4774 .collect::<Vec<_>>();
4775
4776 iter::from_fn(move || {
4777 loop {
4778 let mat = syntax_matches.peek()?;
4779
4780 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4781 let mut run_range = None;
4782 let full_range = mat.captures.iter().fold(
4783 Range {
4784 start: usize::MAX,
4785 end: 0,
4786 },
4787 |mut acc, next| {
4788 let byte_range = next.node.byte_range();
4789 if acc.start > byte_range.start {
4790 acc.start = byte_range.start;
4791 }
4792 if acc.end < byte_range.end {
4793 acc.end = byte_range.end;
4794 }
4795 acc
4796 },
4797 );
4798 if full_range.start > full_range.end {
4799 // We did not find a full spanning range of this match.
4800 return None;
4801 }
4802 let extra_captures: SmallVec<[_; 1]> =
4803 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4804 test_configs
4805 .extra_captures
4806 .get(capture.index as usize)
4807 .cloned()
4808 .and_then(|tag_name| match tag_name {
4809 RunnableCapture::Named(name) => {
4810 Some((capture.node.byte_range(), name))
4811 }
4812 RunnableCapture::Run => {
4813 let _ = run_range.insert(capture.node.byte_range());
4814 None
4815 }
4816 })
4817 }));
4818 let run_range = run_range?;
4819 let tags = test_configs
4820 .query
4821 .property_settings(mat.pattern_index)
4822 .iter()
4823 .filter_map(|property| {
4824 if *property.key == *"tag" {
4825 property
4826 .value
4827 .as_ref()
4828 .map(|value| RunnableTag(value.to_string().into()))
4829 } else {
4830 None
4831 }
4832 })
4833 .collect();
4834 let extra_captures = extra_captures
4835 .into_iter()
4836 .map(|(range, name)| {
4837 (
4838 name.to_string(),
4839 self.text_for_range(range).collect::<String>(),
4840 )
4841 })
4842 .collect();
4843 // All tags should have the same range.
4844 Some(RunnableRange {
4845 run_range,
4846 full_range,
4847 runnable: Runnable {
4848 tags,
4849 language: mat.language,
4850 buffer: self.remote_id(),
4851 },
4852 extra_captures,
4853 buffer_id: self.remote_id(),
4854 })
4855 });
4856
4857 syntax_matches.advance();
4858 if test_range.is_some() {
4859 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4860 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4861 return test_range;
4862 }
4863 }
4864 })
4865 }
4866
4867 /// Returns selections for remote peers intersecting the given range.
4868 #[allow(clippy::type_complexity)]
4869 pub fn selections_in_range(
4870 &self,
4871 range: Range<Anchor>,
4872 include_local: bool,
4873 ) -> impl Iterator<
4874 Item = (
4875 ReplicaId,
4876 bool,
4877 CursorShape,
4878 impl Iterator<Item = &Selection<Anchor>> + '_,
4879 ),
4880 > + '_ {
4881 self.remote_selections
4882 .iter()
4883 .filter(move |(replica_id, set)| {
4884 (include_local || **replica_id != self.text.replica_id())
4885 && !set.selections.is_empty()
4886 })
4887 .map(move |(replica_id, set)| {
4888 let start_ix = match set.selections.binary_search_by(|probe| {
4889 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4890 }) {
4891 Ok(ix) | Err(ix) => ix,
4892 };
4893 let end_ix = match set.selections.binary_search_by(|probe| {
4894 probe.start.cmp(&range.end, self).then(Ordering::Less)
4895 }) {
4896 Ok(ix) | Err(ix) => ix,
4897 };
4898
4899 (
4900 *replica_id,
4901 set.line_mode,
4902 set.cursor_shape,
4903 set.selections[start_ix..end_ix].iter(),
4904 )
4905 })
4906 }
4907
4908 /// Returns if the buffer contains any diagnostics.
4909 pub fn has_diagnostics(&self) -> bool {
4910 !self.diagnostics.is_empty()
4911 }
4912
4913 /// Returns all the diagnostics intersecting the given range.
4914 pub fn diagnostics_in_range<'a, T, O>(
4915 &'a self,
4916 search_range: Range<T>,
4917 reversed: bool,
4918 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4919 where
4920 T: 'a + Clone + ToOffset,
4921 O: 'a + FromAnchor,
4922 {
4923 let mut iterators: Vec<_> = self
4924 .diagnostics
4925 .iter()
4926 .map(|(_, collection)| {
4927 collection
4928 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4929 .peekable()
4930 })
4931 .collect();
4932
4933 std::iter::from_fn(move || {
4934 let (next_ix, _) = iterators
4935 .iter_mut()
4936 .enumerate()
4937 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4938 .min_by(|(_, a), (_, b)| {
4939 let cmp = a
4940 .range
4941 .start
4942 .cmp(&b.range.start, self)
4943 // when range is equal, sort by diagnostic severity
4944 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4945 // and stabilize order with group_id
4946 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4947 if reversed { cmp.reverse() } else { cmp }
4948 })?;
4949 iterators[next_ix]
4950 .next()
4951 .map(
4952 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4953 diagnostic,
4954 range: FromAnchor::from_anchor(&range.start, self)
4955 ..FromAnchor::from_anchor(&range.end, self),
4956 },
4957 )
4958 })
4959 }
4960
4961 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4962 /// should be used instead.
4963 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4964 &self.diagnostics
4965 }
4966
4967 /// Returns all the diagnostic groups associated with the given
4968 /// language server ID. If no language server ID is provided,
4969 /// all diagnostics groups are returned.
4970 pub fn diagnostic_groups(
4971 &self,
4972 language_server_id: Option<LanguageServerId>,
4973 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4974 let mut groups = Vec::new();
4975
4976 if let Some(language_server_id) = language_server_id {
4977 if let Ok(ix) = self
4978 .diagnostics
4979 .binary_search_by_key(&language_server_id, |e| e.0)
4980 {
4981 self.diagnostics[ix]
4982 .1
4983 .groups(language_server_id, &mut groups, self);
4984 }
4985 } else {
4986 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4987 diagnostics.groups(*language_server_id, &mut groups, self);
4988 }
4989 }
4990
4991 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4992 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4993 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4994 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4995 });
4996
4997 groups
4998 }
4999
5000 /// Returns an iterator over the diagnostics for the given group.
5001 pub fn diagnostic_group<O>(
5002 &self,
5003 group_id: usize,
5004 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5005 where
5006 O: FromAnchor + 'static,
5007 {
5008 self.diagnostics
5009 .iter()
5010 .flat_map(move |(_, set)| set.group(group_id, self))
5011 }
5012
5013 /// An integer version number that accounts for all updates besides
5014 /// the buffer's text itself (which is versioned via a version vector).
5015 pub fn non_text_state_update_count(&self) -> usize {
5016 self.non_text_state_update_count
5017 }
5018
5019 /// An integer version that changes when the buffer's syntax changes.
5020 pub fn syntax_update_count(&self) -> usize {
5021 self.syntax.update_count()
5022 }
5023
5024 /// Returns a snapshot of underlying file.
5025 pub fn file(&self) -> Option<&Arc<dyn File>> {
5026 self.file.as_ref()
5027 }
5028
5029 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5030 if let Some(file) = self.file() {
5031 if file.path().file_name().is_none() || include_root {
5032 Some(file.full_path(cx).to_string_lossy().into_owned())
5033 } else {
5034 Some(file.path().display(file.path_style(cx)).to_string())
5035 }
5036 } else {
5037 None
5038 }
5039 }
5040
5041 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5042 let query_str = query.fuzzy_contents;
5043 if query_str.is_some_and(|query| query.is_empty()) {
5044 return BTreeMap::default();
5045 }
5046
5047 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5048 language,
5049 override_id: None,
5050 }));
5051
5052 let mut query_ix = 0;
5053 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5054 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5055
5056 let mut words = BTreeMap::default();
5057 let mut current_word_start_ix = None;
5058 let mut chunk_ix = query.range.start;
5059 for chunk in self.chunks(query.range, false) {
5060 for (i, c) in chunk.text.char_indices() {
5061 let ix = chunk_ix + i;
5062 if classifier.is_word(c) {
5063 if current_word_start_ix.is_none() {
5064 current_word_start_ix = Some(ix);
5065 }
5066
5067 if let Some(query_chars) = &query_chars
5068 && query_ix < query_len
5069 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5070 {
5071 query_ix += 1;
5072 }
5073 continue;
5074 } else if let Some(word_start) = current_word_start_ix.take()
5075 && query_ix == query_len
5076 {
5077 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5078 let mut word_text = self.text_for_range(word_start..ix).peekable();
5079 let first_char = word_text
5080 .peek()
5081 .and_then(|first_chunk| first_chunk.chars().next());
5082 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5083 if !query.skip_digits
5084 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5085 {
5086 words.insert(word_text.collect(), word_range);
5087 }
5088 }
5089 query_ix = 0;
5090 }
5091 chunk_ix += chunk.text.len();
5092 }
5093
5094 words
5095 }
5096}
5097
5098pub struct WordsQuery<'a> {
5099 /// Only returns words with all chars from the fuzzy string in them.
5100 pub fuzzy_contents: Option<&'a str>,
5101 /// Skips words that start with a digit.
5102 pub skip_digits: bool,
5103 /// Buffer offset range, to look for words.
5104 pub range: Range<usize>,
5105}
5106
5107fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5108 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5109}
5110
5111fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5112 let mut result = IndentSize::spaces(0);
5113 for c in text {
5114 let kind = match c {
5115 ' ' => IndentKind::Space,
5116 '\t' => IndentKind::Tab,
5117 _ => break,
5118 };
5119 if result.len == 0 {
5120 result.kind = kind;
5121 }
5122 result.len += 1;
5123 }
5124 result
5125}
5126
5127impl Clone for BufferSnapshot {
5128 fn clone(&self) -> Self {
5129 Self {
5130 text: self.text.clone(),
5131 syntax: self.syntax.clone(),
5132 file: self.file.clone(),
5133 remote_selections: self.remote_selections.clone(),
5134 diagnostics: self.diagnostics.clone(),
5135 language: self.language.clone(),
5136 tree_sitter_data: self.tree_sitter_data.clone(),
5137 non_text_state_update_count: self.non_text_state_update_count,
5138 }
5139 }
5140}
5141
5142impl Deref for BufferSnapshot {
5143 type Target = text::BufferSnapshot;
5144
5145 fn deref(&self) -> &Self::Target {
5146 &self.text
5147 }
5148}
5149
5150unsafe impl Send for BufferChunks<'_> {}
5151
5152impl<'a> BufferChunks<'a> {
5153 pub(crate) fn new(
5154 text: &'a Rope,
5155 range: Range<usize>,
5156 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5157 diagnostics: bool,
5158 buffer_snapshot: Option<&'a BufferSnapshot>,
5159 ) -> Self {
5160 let mut highlights = None;
5161 if let Some((captures, highlight_maps)) = syntax {
5162 highlights = Some(BufferChunkHighlights {
5163 captures,
5164 next_capture: None,
5165 stack: Default::default(),
5166 highlight_maps,
5167 })
5168 }
5169
5170 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5171 let chunks = text.chunks_in_range(range.clone());
5172
5173 let mut this = BufferChunks {
5174 range,
5175 buffer_snapshot,
5176 chunks,
5177 diagnostic_endpoints,
5178 error_depth: 0,
5179 warning_depth: 0,
5180 information_depth: 0,
5181 hint_depth: 0,
5182 unnecessary_depth: 0,
5183 underline: true,
5184 highlights,
5185 };
5186 this.initialize_diagnostic_endpoints();
5187 this
5188 }
5189
5190 /// Seeks to the given byte offset in the buffer.
5191 pub fn seek(&mut self, range: Range<usize>) {
5192 let old_range = std::mem::replace(&mut self.range, range.clone());
5193 self.chunks.set_range(self.range.clone());
5194 if let Some(highlights) = self.highlights.as_mut() {
5195 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5196 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5197 highlights
5198 .stack
5199 .retain(|(end_offset, _)| *end_offset > range.start);
5200 if let Some(capture) = &highlights.next_capture
5201 && range.start >= capture.node.start_byte()
5202 {
5203 let next_capture_end = capture.node.end_byte();
5204 if range.start < next_capture_end {
5205 highlights.stack.push((
5206 next_capture_end,
5207 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5208 ));
5209 }
5210 highlights.next_capture.take();
5211 }
5212 } else if let Some(snapshot) = self.buffer_snapshot {
5213 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5214 *highlights = BufferChunkHighlights {
5215 captures,
5216 next_capture: None,
5217 stack: Default::default(),
5218 highlight_maps,
5219 };
5220 } else {
5221 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5222 // Seeking such BufferChunks is not supported.
5223 debug_assert!(
5224 false,
5225 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5226 );
5227 }
5228
5229 highlights.captures.set_byte_range(self.range.clone());
5230 self.initialize_diagnostic_endpoints();
5231 }
5232 }
5233
5234 fn initialize_diagnostic_endpoints(&mut self) {
5235 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5236 && let Some(buffer) = self.buffer_snapshot
5237 {
5238 let mut diagnostic_endpoints = Vec::new();
5239 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5240 diagnostic_endpoints.push(DiagnosticEndpoint {
5241 offset: entry.range.start,
5242 is_start: true,
5243 severity: entry.diagnostic.severity,
5244 is_unnecessary: entry.diagnostic.is_unnecessary,
5245 underline: entry.diagnostic.underline,
5246 });
5247 diagnostic_endpoints.push(DiagnosticEndpoint {
5248 offset: entry.range.end,
5249 is_start: false,
5250 severity: entry.diagnostic.severity,
5251 is_unnecessary: entry.diagnostic.is_unnecessary,
5252 underline: entry.diagnostic.underline,
5253 });
5254 }
5255 diagnostic_endpoints
5256 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5257 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5258 self.hint_depth = 0;
5259 self.error_depth = 0;
5260 self.warning_depth = 0;
5261 self.information_depth = 0;
5262 }
5263 }
5264
5265 /// The current byte offset in the buffer.
5266 pub fn offset(&self) -> usize {
5267 self.range.start
5268 }
5269
5270 pub fn range(&self) -> Range<usize> {
5271 self.range.clone()
5272 }
5273
5274 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5275 let depth = match endpoint.severity {
5276 DiagnosticSeverity::ERROR => &mut self.error_depth,
5277 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5278 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5279 DiagnosticSeverity::HINT => &mut self.hint_depth,
5280 _ => return,
5281 };
5282 if endpoint.is_start {
5283 *depth += 1;
5284 } else {
5285 *depth -= 1;
5286 }
5287
5288 if endpoint.is_unnecessary {
5289 if endpoint.is_start {
5290 self.unnecessary_depth += 1;
5291 } else {
5292 self.unnecessary_depth -= 1;
5293 }
5294 }
5295 }
5296
5297 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5298 if self.error_depth > 0 {
5299 Some(DiagnosticSeverity::ERROR)
5300 } else if self.warning_depth > 0 {
5301 Some(DiagnosticSeverity::WARNING)
5302 } else if self.information_depth > 0 {
5303 Some(DiagnosticSeverity::INFORMATION)
5304 } else if self.hint_depth > 0 {
5305 Some(DiagnosticSeverity::HINT)
5306 } else {
5307 None
5308 }
5309 }
5310
5311 fn current_code_is_unnecessary(&self) -> bool {
5312 self.unnecessary_depth > 0
5313 }
5314}
5315
5316impl<'a> Iterator for BufferChunks<'a> {
5317 type Item = Chunk<'a>;
5318
5319 fn next(&mut self) -> Option<Self::Item> {
5320 let mut next_capture_start = usize::MAX;
5321 let mut next_diagnostic_endpoint = usize::MAX;
5322
5323 if let Some(highlights) = self.highlights.as_mut() {
5324 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5325 if *parent_capture_end <= self.range.start {
5326 highlights.stack.pop();
5327 } else {
5328 break;
5329 }
5330 }
5331
5332 if highlights.next_capture.is_none() {
5333 highlights.next_capture = highlights.captures.next();
5334 }
5335
5336 while let Some(capture) = highlights.next_capture.as_ref() {
5337 if self.range.start < capture.node.start_byte() {
5338 next_capture_start = capture.node.start_byte();
5339 break;
5340 } else {
5341 let highlight_id =
5342 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5343 highlights
5344 .stack
5345 .push((capture.node.end_byte(), highlight_id));
5346 highlights.next_capture = highlights.captures.next();
5347 }
5348 }
5349 }
5350
5351 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5352 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5353 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5354 if endpoint.offset <= self.range.start {
5355 self.update_diagnostic_depths(endpoint);
5356 diagnostic_endpoints.next();
5357 self.underline = endpoint.underline;
5358 } else {
5359 next_diagnostic_endpoint = endpoint.offset;
5360 break;
5361 }
5362 }
5363 }
5364 self.diagnostic_endpoints = diagnostic_endpoints;
5365
5366 if let Some(ChunkBitmaps {
5367 text: chunk,
5368 chars: chars_map,
5369 tabs,
5370 }) = self.chunks.peek_with_bitmaps()
5371 {
5372 let chunk_start = self.range.start;
5373 let mut chunk_end = (self.chunks.offset() + chunk.len())
5374 .min(next_capture_start)
5375 .min(next_diagnostic_endpoint);
5376 let mut highlight_id = None;
5377 if let Some(highlights) = self.highlights.as_ref()
5378 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5379 {
5380 chunk_end = chunk_end.min(*parent_capture_end);
5381 highlight_id = Some(*parent_highlight_id);
5382 }
5383 let bit_start = chunk_start - self.chunks.offset();
5384 let bit_end = chunk_end - self.chunks.offset();
5385
5386 let slice = &chunk[bit_start..bit_end];
5387
5388 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5389 let tabs = (tabs >> bit_start) & mask;
5390 let chars = (chars_map >> bit_start) & mask;
5391
5392 self.range.start = chunk_end;
5393 if self.range.start == self.chunks.offset() + chunk.len() {
5394 self.chunks.next().unwrap();
5395 }
5396
5397 Some(Chunk {
5398 text: slice,
5399 syntax_highlight_id: highlight_id,
5400 underline: self.underline,
5401 diagnostic_severity: self.current_diagnostic_severity(),
5402 is_unnecessary: self.current_code_is_unnecessary(),
5403 tabs,
5404 chars,
5405 ..Chunk::default()
5406 })
5407 } else {
5408 None
5409 }
5410 }
5411}
5412
5413impl operation_queue::Operation for Operation {
5414 fn lamport_timestamp(&self) -> clock::Lamport {
5415 match self {
5416 Operation::Buffer(_) => {
5417 unreachable!("buffer operations should never be deferred at this layer")
5418 }
5419 Operation::UpdateDiagnostics {
5420 lamport_timestamp, ..
5421 }
5422 | Operation::UpdateSelections {
5423 lamport_timestamp, ..
5424 }
5425 | Operation::UpdateCompletionTriggers {
5426 lamport_timestamp, ..
5427 }
5428 | Operation::UpdateLineEnding {
5429 lamport_timestamp, ..
5430 } => *lamport_timestamp,
5431 }
5432 }
5433}
5434
5435impl Default for Diagnostic {
5436 fn default() -> Self {
5437 Self {
5438 source: Default::default(),
5439 source_kind: DiagnosticSourceKind::Other,
5440 code: None,
5441 code_description: None,
5442 severity: DiagnosticSeverity::ERROR,
5443 message: Default::default(),
5444 markdown: None,
5445 group_id: 0,
5446 is_primary: false,
5447 is_disk_based: false,
5448 is_unnecessary: false,
5449 underline: true,
5450 data: None,
5451 registration_id: None,
5452 }
5453 }
5454}
5455
5456impl IndentSize {
5457 /// Returns an [`IndentSize`] representing the given spaces.
5458 pub fn spaces(len: u32) -> Self {
5459 Self {
5460 len,
5461 kind: IndentKind::Space,
5462 }
5463 }
5464
5465 /// Returns an [`IndentSize`] representing a tab.
5466 pub fn tab() -> Self {
5467 Self {
5468 len: 1,
5469 kind: IndentKind::Tab,
5470 }
5471 }
5472
5473 /// An iterator over the characters represented by this [`IndentSize`].
5474 pub fn chars(&self) -> impl Iterator<Item = char> {
5475 iter::repeat(self.char()).take(self.len as usize)
5476 }
5477
5478 /// The character representation of this [`IndentSize`].
5479 pub fn char(&self) -> char {
5480 match self.kind {
5481 IndentKind::Space => ' ',
5482 IndentKind::Tab => '\t',
5483 }
5484 }
5485
5486 /// Consumes the current [`IndentSize`] and returns a new one that has
5487 /// been shrunk or enlarged by the given size along the given direction.
5488 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5489 match direction {
5490 Ordering::Less => {
5491 if self.kind == size.kind && self.len >= size.len {
5492 self.len -= size.len;
5493 }
5494 }
5495 Ordering::Equal => {}
5496 Ordering::Greater => {
5497 if self.len == 0 {
5498 self = size;
5499 } else if self.kind == size.kind {
5500 self.len += size.len;
5501 }
5502 }
5503 }
5504 self
5505 }
5506
5507 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5508 match self.kind {
5509 IndentKind::Space => self.len as usize,
5510 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5511 }
5512 }
5513}
5514
5515#[cfg(any(test, feature = "test-support"))]
5516pub struct TestFile {
5517 pub path: Arc<RelPath>,
5518 pub root_name: String,
5519 pub local_root: Option<PathBuf>,
5520}
5521
5522#[cfg(any(test, feature = "test-support"))]
5523impl File for TestFile {
5524 fn path(&self) -> &Arc<RelPath> {
5525 &self.path
5526 }
5527
5528 fn full_path(&self, _: &gpui::App) -> PathBuf {
5529 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5530 }
5531
5532 fn as_local(&self) -> Option<&dyn LocalFile> {
5533 if self.local_root.is_some() {
5534 Some(self)
5535 } else {
5536 None
5537 }
5538 }
5539
5540 fn disk_state(&self) -> DiskState {
5541 unimplemented!()
5542 }
5543
5544 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5545 self.path().file_name().unwrap_or(self.root_name.as_ref())
5546 }
5547
5548 fn worktree_id(&self, _: &App) -> WorktreeId {
5549 WorktreeId::from_usize(0)
5550 }
5551
5552 fn to_proto(&self, _: &App) -> rpc::proto::File {
5553 unimplemented!()
5554 }
5555
5556 fn is_private(&self) -> bool {
5557 false
5558 }
5559
5560 fn path_style(&self, _cx: &App) -> PathStyle {
5561 PathStyle::local()
5562 }
5563}
5564
5565#[cfg(any(test, feature = "test-support"))]
5566impl LocalFile for TestFile {
5567 fn abs_path(&self, _cx: &App) -> PathBuf {
5568 PathBuf::from(self.local_root.as_ref().unwrap())
5569 .join(&self.root_name)
5570 .join(self.path.as_std_path())
5571 }
5572
5573 fn load(&self, _cx: &App) -> Task<Result<String>> {
5574 unimplemented!()
5575 }
5576
5577 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5578 unimplemented!()
5579 }
5580}
5581
5582pub(crate) fn contiguous_ranges(
5583 values: impl Iterator<Item = u32>,
5584 max_len: usize,
5585) -> impl Iterator<Item = Range<u32>> {
5586 let mut values = values;
5587 let mut current_range: Option<Range<u32>> = None;
5588 std::iter::from_fn(move || {
5589 loop {
5590 if let Some(value) = values.next() {
5591 if let Some(range) = &mut current_range
5592 && value == range.end
5593 && range.len() < max_len
5594 {
5595 range.end += 1;
5596 continue;
5597 }
5598
5599 let prev_range = current_range.clone();
5600 current_range = Some(value..(value + 1));
5601 if prev_range.is_some() {
5602 return prev_range;
5603 }
5604 } else {
5605 return current_range.take();
5606 }
5607 }
5608 })
5609}
5610
5611#[derive(Default, Debug)]
5612pub struct CharClassifier {
5613 scope: Option<LanguageScope>,
5614 scope_context: Option<CharScopeContext>,
5615 ignore_punctuation: bool,
5616}
5617
5618impl CharClassifier {
5619 pub fn new(scope: Option<LanguageScope>) -> Self {
5620 Self {
5621 scope,
5622 scope_context: None,
5623 ignore_punctuation: false,
5624 }
5625 }
5626
5627 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5628 Self {
5629 scope_context,
5630 ..self
5631 }
5632 }
5633
5634 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5635 Self {
5636 ignore_punctuation,
5637 ..self
5638 }
5639 }
5640
5641 pub fn is_whitespace(&self, c: char) -> bool {
5642 self.kind(c) == CharKind::Whitespace
5643 }
5644
5645 pub fn is_word(&self, c: char) -> bool {
5646 self.kind(c) == CharKind::Word
5647 }
5648
5649 pub fn is_punctuation(&self, c: char) -> bool {
5650 self.kind(c) == CharKind::Punctuation
5651 }
5652
5653 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5654 if c.is_alphanumeric() || c == '_' {
5655 return CharKind::Word;
5656 }
5657
5658 if let Some(scope) = &self.scope {
5659 let characters = match self.scope_context {
5660 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5661 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5662 None => scope.word_characters(),
5663 };
5664 if let Some(characters) = characters
5665 && characters.contains(&c)
5666 {
5667 return CharKind::Word;
5668 }
5669 }
5670
5671 if c.is_whitespace() {
5672 return CharKind::Whitespace;
5673 }
5674
5675 if ignore_punctuation {
5676 CharKind::Word
5677 } else {
5678 CharKind::Punctuation
5679 }
5680 }
5681
5682 pub fn kind(&self, c: char) -> CharKind {
5683 self.kind_with(c, self.ignore_punctuation)
5684 }
5685}
5686
5687/// Find all of the ranges of whitespace that occur at the ends of lines
5688/// in the given rope.
5689///
5690/// This could also be done with a regex search, but this implementation
5691/// avoids copying text.
5692pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5693 let mut ranges = Vec::new();
5694
5695 let mut offset = 0;
5696 let mut prev_chunk_trailing_whitespace_range = 0..0;
5697 for chunk in rope.chunks() {
5698 let mut prev_line_trailing_whitespace_range = 0..0;
5699 for (i, line) in chunk.split('\n').enumerate() {
5700 let line_end_offset = offset + line.len();
5701 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5702 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5703
5704 if i == 0 && trimmed_line_len == 0 {
5705 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5706 }
5707 if !prev_line_trailing_whitespace_range.is_empty() {
5708 ranges.push(prev_line_trailing_whitespace_range);
5709 }
5710
5711 offset = line_end_offset + 1;
5712 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5713 }
5714
5715 offset -= 1;
5716 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5717 }
5718
5719 if !prev_chunk_trailing_whitespace_range.is_empty() {
5720 ranges.push(prev_chunk_trailing_whitespace_range);
5721 }
5722
5723 ranges
5724}