1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430}
431
432impl DiskState {
433 /// Returns the file's last known modification time on disk.
434 pub fn mtime(self) -> Option<MTime> {
435 match self {
436 DiskState::New => None,
437 DiskState::Present { mtime } => Some(mtime),
438 DiskState::Deleted => None,
439 }
440 }
441
442 pub fn exists(&self) -> bool {
443 match self {
444 DiskState::New => false,
445 DiskState::Present { .. } => true,
446 DiskState::Deleted => false,
447 }
448 }
449}
450
451/// The file associated with a buffer, in the case where the file is on the local disk.
452pub trait LocalFile: File {
453 /// Returns the absolute path of this file
454 fn abs_path(&self, cx: &App) -> PathBuf;
455
456 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
457 fn load(&self, cx: &App) -> Task<Result<String>>;
458
459 /// Loads the file's contents from disk.
460 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
461}
462
463/// The auto-indent behavior associated with an editing operation.
464/// For some editing operations, each affected line of text has its
465/// indentation recomputed. For other operations, the entire block
466/// of edited text is adjusted uniformly.
467#[derive(Clone, Debug)]
468pub enum AutoindentMode {
469 /// Indent each line of inserted text.
470 EachLine,
471 /// Apply the same indentation adjustment to all of the lines
472 /// in a given insertion.
473 Block {
474 /// The original indentation column of the first line of each
475 /// insertion, if it has been copied.
476 ///
477 /// Knowing this makes it possible to preserve the relative indentation
478 /// of every line in the insertion from when it was copied.
479 ///
480 /// If the original indent column is `a`, and the first line of insertion
481 /// is then auto-indented to column `b`, then every other line of
482 /// the insertion will be auto-indented to column `b - a`
483 original_indent_columns: Vec<Option<u32>>,
484 },
485}
486
487#[derive(Clone)]
488struct AutoindentRequest {
489 before_edit: BufferSnapshot,
490 entries: Vec<AutoindentRequestEntry>,
491 is_block_mode: bool,
492 ignore_empty_lines: bool,
493}
494
495#[derive(Debug, Clone)]
496struct AutoindentRequestEntry {
497 /// A range of the buffer whose indentation should be adjusted.
498 range: Range<Anchor>,
499 /// Whether or not these lines should be considered brand new, for the
500 /// purpose of auto-indent. When text is not new, its indentation will
501 /// only be adjusted if the suggested indentation level has *changed*
502 /// since the edit was made.
503 first_line_is_new: bool,
504 indent_size: IndentSize,
505 original_indent_column: Option<u32>,
506}
507
508#[derive(Debug)]
509struct IndentSuggestion {
510 basis_row: u32,
511 delta: Ordering,
512 within_error: bool,
513}
514
515struct BufferChunkHighlights<'a> {
516 captures: SyntaxMapCaptures<'a>,
517 next_capture: Option<SyntaxMapCapture<'a>>,
518 stack: Vec<(usize, HighlightId)>,
519 highlight_maps: Vec<HighlightMap>,
520}
521
522/// An iterator that yields chunks of a buffer's text, along with their
523/// syntax highlights and diagnostic status.
524pub struct BufferChunks<'a> {
525 buffer_snapshot: Option<&'a BufferSnapshot>,
526 range: Range<usize>,
527 chunks: text::Chunks<'a>,
528 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
529 error_depth: usize,
530 warning_depth: usize,
531 information_depth: usize,
532 hint_depth: usize,
533 unnecessary_depth: usize,
534 underline: bool,
535 highlights: Option<BufferChunkHighlights<'a>>,
536}
537
538/// A chunk of a buffer's text, along with its syntax highlight and
539/// diagnostic status.
540#[derive(Clone, Debug, Default)]
541pub struct Chunk<'a> {
542 /// The text of the chunk.
543 pub text: &'a str,
544 /// The syntax highlighting style of the chunk.
545 pub syntax_highlight_id: Option<HighlightId>,
546 /// The highlight style that has been applied to this chunk in
547 /// the editor.
548 pub highlight_style: Option<HighlightStyle>,
549 /// The severity of diagnostic associated with this chunk, if any.
550 pub diagnostic_severity: Option<DiagnosticSeverity>,
551 /// A bitset of which characters are tabs in this string.
552 pub tabs: u128,
553 /// Bitmap of character indices in this chunk
554 pub chars: u128,
555 /// Whether this chunk of text is marked as unnecessary.
556 pub is_unnecessary: bool,
557 /// Whether this chunk of text was originally a tab character.
558 pub is_tab: bool,
559 /// Whether this chunk of text was originally an inlay.
560 pub is_inlay: bool,
561 /// Whether to underline the corresponding text range in the editor.
562 pub underline: bool,
563}
564
565/// A set of edits to a given version of a buffer, computed asynchronously.
566#[derive(Debug)]
567pub struct Diff {
568 pub base_version: clock::Global,
569 pub line_ending: LineEnding,
570 pub edits: Vec<(Range<usize>, Arc<str>)>,
571}
572
573#[derive(Debug, Clone, Copy)]
574pub(crate) struct DiagnosticEndpoint {
575 offset: usize,
576 is_start: bool,
577 underline: bool,
578 severity: DiagnosticSeverity,
579 is_unnecessary: bool,
580}
581
582/// A class of characters, used for characterizing a run of text.
583#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
584pub enum CharKind {
585 /// Whitespace.
586 Whitespace,
587 /// Punctuation.
588 Punctuation,
589 /// Word.
590 Word,
591}
592
593/// Context for character classification within a specific scope.
594#[derive(Copy, Clone, Eq, PartialEq, Debug)]
595pub enum CharScopeContext {
596 /// Character classification for completion queries.
597 ///
598 /// This context treats certain characters as word constituents that would
599 /// normally be considered punctuation, such as '-' in Tailwind classes
600 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
601 Completion,
602 /// Character classification for linked edits.
603 ///
604 /// This context handles characters that should be treated as part of
605 /// identifiers during linked editing operations, such as '.' in JSX
606 /// component names like `<Animated.View>`.
607 LinkedEdit,
608}
609
610/// A runnable is a set of data about a region that could be resolved into a task
611pub struct Runnable {
612 pub tags: SmallVec<[RunnableTag; 1]>,
613 pub language: Arc<Language>,
614 pub buffer: BufferId,
615}
616
617#[derive(Default, Clone, Debug)]
618pub struct HighlightedText {
619 pub text: SharedString,
620 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
621}
622
623#[derive(Default, Debug)]
624struct HighlightedTextBuilder {
625 pub text: String,
626 highlights: Vec<(Range<usize>, HighlightStyle)>,
627}
628
629impl HighlightedText {
630 pub fn from_buffer_range<T: ToOffset>(
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) -> Self {
637 let mut highlighted_text = HighlightedTextBuilder::default();
638 highlighted_text.add_text_from_buffer_range(
639 range,
640 snapshot,
641 syntax_snapshot,
642 override_style,
643 syntax_theme,
644 );
645 highlighted_text.build()
646 }
647
648 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
649 gpui::StyledText::new(self.text.clone())
650 .with_default_highlights(default_style, self.highlights.iter().cloned())
651 }
652
653 /// Returns the first line without leading whitespace unless highlighted
654 /// and a boolean indicating if there are more lines after
655 pub fn first_line_preview(self) -> (Self, bool) {
656 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
657 let first_line = &self.text[..newline_ix];
658
659 // Trim leading whitespace, unless an edit starts prior to it.
660 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
661 if let Some((first_highlight_range, _)) = self.highlights.first() {
662 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
663 }
664
665 let preview_text = &first_line[preview_start_ix..];
666 let preview_highlights = self
667 .highlights
668 .into_iter()
669 .skip_while(|(range, _)| range.end <= preview_start_ix)
670 .take_while(|(range, _)| range.start < newline_ix)
671 .filter_map(|(mut range, highlight)| {
672 range.start = range.start.saturating_sub(preview_start_ix);
673 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
674 if range.is_empty() {
675 None
676 } else {
677 Some((range, highlight))
678 }
679 });
680
681 let preview = Self {
682 text: SharedString::new(preview_text),
683 highlights: preview_highlights.collect(),
684 };
685
686 (preview, self.text.len() > newline_ix)
687 }
688}
689
690impl HighlightedTextBuilder {
691 pub fn build(self) -> HighlightedText {
692 HighlightedText {
693 text: self.text.into(),
694 highlights: self.highlights,
695 }
696 }
697
698 pub fn add_text_from_buffer_range<T: ToOffset>(
699 &mut self,
700 range: Range<T>,
701 snapshot: &text::BufferSnapshot,
702 syntax_snapshot: &SyntaxSnapshot,
703 override_style: Option<HighlightStyle>,
704 syntax_theme: &SyntaxTheme,
705 ) {
706 let range = range.to_offset(snapshot);
707 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
708 let start = self.text.len();
709 self.text.push_str(chunk.text);
710 let end = self.text.len();
711
712 if let Some(highlight_style) = chunk
713 .syntax_highlight_id
714 .and_then(|id| id.style(syntax_theme))
715 {
716 let highlight_style = override_style.map_or(highlight_style, |override_style| {
717 highlight_style.highlight(override_style)
718 });
719 self.highlights.push((start..end, highlight_style));
720 } else if let Some(override_style) = override_style {
721 self.highlights.push((start..end, override_style));
722 }
723 }
724 }
725
726 fn highlighted_chunks<'a>(
727 range: Range<usize>,
728 snapshot: &'a text::BufferSnapshot,
729 syntax_snapshot: &'a SyntaxSnapshot,
730 ) -> BufferChunks<'a> {
731 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
732 grammar
733 .highlights_config
734 .as_ref()
735 .map(|config| &config.query)
736 });
737
738 let highlight_maps = captures
739 .grammars()
740 .iter()
741 .map(|grammar| grammar.highlight_map())
742 .collect();
743
744 BufferChunks::new(
745 snapshot.as_rope(),
746 range,
747 Some((captures, highlight_maps)),
748 false,
749 None,
750 )
751 }
752}
753
754#[derive(Clone)]
755pub struct EditPreview {
756 old_snapshot: text::BufferSnapshot,
757 applied_edits_snapshot: text::BufferSnapshot,
758 syntax_snapshot: SyntaxSnapshot,
759}
760
761impl EditPreview {
762 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
763 let (first, _) = edits.first()?;
764 let (last, _) = edits.last()?;
765
766 let start = first.start.to_point(&self.old_snapshot);
767 let old_end = last.end.to_point(&self.old_snapshot);
768 let new_end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 let start = Point::new(start.row.saturating_sub(3), 0);
774 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
775 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
776
777 Some(unified_diff(
778 &self
779 .old_snapshot
780 .text_for_range(start..old_end)
781 .collect::<String>(),
782 &self
783 .applied_edits_snapshot
784 .text_for_range(start..new_end)
785 .collect::<String>(),
786 ))
787 }
788
789 pub fn highlight_edits(
790 &self,
791 current_snapshot: &BufferSnapshot,
792 edits: &[(Range<Anchor>, impl AsRef<str>)],
793 include_deletions: bool,
794 cx: &App,
795 ) -> HighlightedText {
796 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
797 return HighlightedText::default();
798 };
799
800 let mut highlighted_text = HighlightedTextBuilder::default();
801
802 let visible_range_in_preview_snapshot =
803 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
804 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
805
806 let insertion_highlight_style = HighlightStyle {
807 background_color: Some(cx.theme().status().created_background),
808 ..Default::default()
809 };
810 let deletion_highlight_style = HighlightStyle {
811 background_color: Some(cx.theme().status().deleted_background),
812 ..Default::default()
813 };
814 let syntax_theme = cx.theme().syntax();
815
816 for (range, edit_text) in edits {
817 let edit_new_end_in_preview_snapshot = range
818 .end
819 .bias_right(&self.old_snapshot)
820 .to_offset(&self.applied_edits_snapshot);
821 let edit_start_in_preview_snapshot =
822 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
823
824 let unchanged_range_in_preview_snapshot =
825 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
826 if !unchanged_range_in_preview_snapshot.is_empty() {
827 highlighted_text.add_text_from_buffer_range(
828 unchanged_range_in_preview_snapshot,
829 &self.applied_edits_snapshot,
830 &self.syntax_snapshot,
831 None,
832 syntax_theme,
833 );
834 }
835
836 let range_in_current_snapshot = range.to_offset(current_snapshot);
837 if include_deletions && !range_in_current_snapshot.is_empty() {
838 highlighted_text.add_text_from_buffer_range(
839 range_in_current_snapshot,
840 ¤t_snapshot.text,
841 ¤t_snapshot.syntax,
842 Some(deletion_highlight_style),
843 syntax_theme,
844 );
845 }
846
847 if !edit_text.as_ref().is_empty() {
848 highlighted_text.add_text_from_buffer_range(
849 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
850 &self.applied_edits_snapshot,
851 &self.syntax_snapshot,
852 Some(insertion_highlight_style),
853 syntax_theme,
854 );
855 }
856
857 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
858 }
859
860 highlighted_text.add_text_from_buffer_range(
861 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
862 &self.applied_edits_snapshot,
863 &self.syntax_snapshot,
864 None,
865 syntax_theme,
866 );
867
868 highlighted_text.build()
869 }
870
871 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
872 cx.new(|cx| {
873 let mut buffer = Buffer::local_normalized(
874 self.applied_edits_snapshot.as_rope().clone(),
875 self.applied_edits_snapshot.line_ending(),
876 cx,
877 );
878 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
879 buffer
880 })
881 }
882
883 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
884 let (first, _) = edits.first()?;
885 let (last, _) = edits.last()?;
886
887 let start = first
888 .start
889 .bias_left(&self.old_snapshot)
890 .to_point(&self.applied_edits_snapshot);
891 let end = last
892 .end
893 .bias_right(&self.old_snapshot)
894 .to_point(&self.applied_edits_snapshot);
895
896 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
897 let range = Point::new(start.row, 0)
898 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
899
900 Some(range)
901 }
902}
903
904#[derive(Clone, Debug, PartialEq, Eq)]
905pub struct BracketMatch<T> {
906 pub open_range: Range<T>,
907 pub close_range: Range<T>,
908 pub newline_only: bool,
909 pub syntax_layer_depth: usize,
910 pub color_index: Option<usize>,
911}
912
913impl<T> BracketMatch<T> {
914 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
915 (self.open_range, self.close_range)
916 }
917}
918
919impl Buffer {
920 /// Create a new buffer with the given base text.
921 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
922 Self::build(
923 TextBuffer::new(
924 ReplicaId::LOCAL,
925 cx.entity_id().as_non_zero_u64().into(),
926 base_text.into(),
927 ),
928 None,
929 Capability::ReadWrite,
930 )
931 }
932
933 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
934 pub fn local_normalized(
935 base_text_normalized: Rope,
936 line_ending: LineEnding,
937 cx: &Context<Self>,
938 ) -> Self {
939 Self::build(
940 TextBuffer::new_normalized(
941 ReplicaId::LOCAL,
942 cx.entity_id().as_non_zero_u64().into(),
943 line_ending,
944 base_text_normalized,
945 ),
946 None,
947 Capability::ReadWrite,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer.
952 pub fn remote(
953 remote_id: BufferId,
954 replica_id: ReplicaId,
955 capability: Capability,
956 base_text: impl Into<String>,
957 ) -> Self {
958 Self::build(
959 TextBuffer::new(replica_id, remote_id, base_text.into()),
960 None,
961 capability,
962 )
963 }
964
965 /// Create a new buffer that is a replica of a remote buffer, populating its
966 /// state from the given protobuf message.
967 pub fn from_proto(
968 replica_id: ReplicaId,
969 capability: Capability,
970 message: proto::BufferState,
971 file: Option<Arc<dyn File>>,
972 ) -> Result<Self> {
973 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
974 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
975 let mut this = Self::build(buffer, file, capability);
976 this.text.set_line_ending(proto::deserialize_line_ending(
977 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
978 ));
979 this.saved_version = proto::deserialize_version(&message.saved_version);
980 this.saved_mtime = message.saved_mtime.map(|time| time.into());
981 Ok(this)
982 }
983
984 /// Serialize the buffer's state to a protobuf message.
985 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
986 proto::BufferState {
987 id: self.remote_id().into(),
988 file: self.file.as_ref().map(|f| f.to_proto(cx)),
989 base_text: self.base_text().to_string(),
990 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
991 saved_version: proto::serialize_version(&self.saved_version),
992 saved_mtime: self.saved_mtime.map(|time| time.into()),
993 }
994 }
995
996 /// Serialize as protobufs all of the changes to the buffer since the given version.
997 pub fn serialize_ops(
998 &self,
999 since: Option<clock::Global>,
1000 cx: &App,
1001 ) -> Task<Vec<proto::Operation>> {
1002 let mut operations = Vec::new();
1003 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1004
1005 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1006 proto::serialize_operation(&Operation::UpdateSelections {
1007 selections: set.selections.clone(),
1008 lamport_timestamp: set.lamport_timestamp,
1009 line_mode: set.line_mode,
1010 cursor_shape: set.cursor_shape,
1011 })
1012 }));
1013
1014 for (server_id, diagnostics) in &self.diagnostics {
1015 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1016 lamport_timestamp: self.diagnostics_timestamp,
1017 server_id: *server_id,
1018 diagnostics: diagnostics.iter().cloned().collect(),
1019 }));
1020 }
1021
1022 for (server_id, completions) in &self.completion_triggers_per_language_server {
1023 operations.push(proto::serialize_operation(
1024 &Operation::UpdateCompletionTriggers {
1025 triggers: completions.iter().cloned().collect(),
1026 lamport_timestamp: self.completion_triggers_timestamp,
1027 server_id: *server_id,
1028 },
1029 ));
1030 }
1031
1032 let text_operations = self.text.operations().clone();
1033 cx.background_spawn(async move {
1034 let since = since.unwrap_or_default();
1035 operations.extend(
1036 text_operations
1037 .iter()
1038 .filter(|(_, op)| !since.observed(op.timestamp()))
1039 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1040 );
1041 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1042 operations
1043 })
1044 }
1045
1046 /// Assign a language to the buffer, returning the buffer.
1047 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1048 self.set_language_async(Some(language), cx);
1049 self
1050 }
1051
1052 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1053 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1054 self.set_language(Some(language), cx);
1055 self
1056 }
1057
1058 /// Returns the [`Capability`] of this buffer.
1059 pub fn capability(&self) -> Capability {
1060 self.capability
1061 }
1062
1063 /// Whether this buffer can only be read.
1064 pub fn read_only(&self) -> bool {
1065 self.capability == Capability::ReadOnly
1066 }
1067
1068 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1069 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1070 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1071 let snapshot = buffer.snapshot();
1072 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1073 let tree_sitter_data = TreeSitterData::new(snapshot);
1074 Self {
1075 saved_mtime,
1076 tree_sitter_data: Arc::new(tree_sitter_data),
1077 saved_version: buffer.version(),
1078 preview_version: buffer.version(),
1079 reload_task: None,
1080 transaction_depth: 0,
1081 was_dirty_before_starting_transaction: None,
1082 has_unsaved_edits: Cell::new((buffer.version(), false)),
1083 text: buffer,
1084 branch_state: None,
1085 file,
1086 capability,
1087 syntax_map,
1088 reparse: None,
1089 non_text_state_update_count: 0,
1090 sync_parse_timeout: Duration::from_millis(1),
1091 parse_status: watch::channel(ParseStatus::Idle),
1092 autoindent_requests: Default::default(),
1093 wait_for_autoindent_txs: Default::default(),
1094 pending_autoindent: Default::default(),
1095 language: None,
1096 remote_selections: Default::default(),
1097 diagnostics: Default::default(),
1098 diagnostics_timestamp: Lamport::MIN,
1099 completion_triggers: Default::default(),
1100 completion_triggers_per_language_server: Default::default(),
1101 completion_triggers_timestamp: Lamport::MIN,
1102 deferred_ops: OperationQueue::new(),
1103 has_conflict: false,
1104 change_bits: Default::default(),
1105 _subscriptions: Vec::new(),
1106 encoding: encoding_rs::UTF_8,
1107 has_bom: false,
1108 }
1109 }
1110
1111 pub fn build_snapshot(
1112 text: Rope,
1113 language: Option<Arc<Language>>,
1114 language_registry: Option<Arc<LanguageRegistry>>,
1115 cx: &mut App,
1116 ) -> impl Future<Output = BufferSnapshot> + use<> {
1117 let entity_id = cx.reserve_entity::<Self>().entity_id();
1118 let buffer_id = entity_id.as_non_zero_u64().into();
1119 async move {
1120 let text =
1121 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1122 .snapshot();
1123 let mut syntax = SyntaxMap::new(&text).snapshot();
1124 if let Some(language) = language.clone() {
1125 let language_registry = language_registry.clone();
1126 syntax.reparse(&text, language_registry, language);
1127 }
1128 let tree_sitter_data = TreeSitterData::new(text.clone());
1129 BufferSnapshot {
1130 text,
1131 syntax,
1132 file: None,
1133 diagnostics: Default::default(),
1134 remote_selections: Default::default(),
1135 tree_sitter_data: Arc::new(tree_sitter_data),
1136 language,
1137 non_text_state_update_count: 0,
1138 }
1139 }
1140 }
1141
1142 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1143 let entity_id = cx.reserve_entity::<Self>().entity_id();
1144 let buffer_id = entity_id.as_non_zero_u64().into();
1145 let text = TextBuffer::new_normalized(
1146 ReplicaId::LOCAL,
1147 buffer_id,
1148 Default::default(),
1149 Rope::new(),
1150 )
1151 .snapshot();
1152 let syntax = SyntaxMap::new(&text).snapshot();
1153 let tree_sitter_data = TreeSitterData::new(text.clone());
1154 BufferSnapshot {
1155 text,
1156 syntax,
1157 tree_sitter_data: Arc::new(tree_sitter_data),
1158 file: None,
1159 diagnostics: Default::default(),
1160 remote_selections: Default::default(),
1161 language: None,
1162 non_text_state_update_count: 0,
1163 }
1164 }
1165
1166 #[cfg(any(test, feature = "test-support"))]
1167 pub fn build_snapshot_sync(
1168 text: Rope,
1169 language: Option<Arc<Language>>,
1170 language_registry: Option<Arc<LanguageRegistry>>,
1171 cx: &mut App,
1172 ) -> BufferSnapshot {
1173 let entity_id = cx.reserve_entity::<Self>().entity_id();
1174 let buffer_id = entity_id.as_non_zero_u64().into();
1175 let text =
1176 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1177 .snapshot();
1178 let mut syntax = SyntaxMap::new(&text).snapshot();
1179 if let Some(language) = language.clone() {
1180 syntax.reparse(&text, language_registry, language);
1181 }
1182 let tree_sitter_data = TreeSitterData::new(text.clone());
1183 BufferSnapshot {
1184 text,
1185 syntax,
1186 tree_sitter_data: Arc::new(tree_sitter_data),
1187 file: None,
1188 diagnostics: Default::default(),
1189 remote_selections: Default::default(),
1190 language,
1191 non_text_state_update_count: 0,
1192 }
1193 }
1194
1195 /// Retrieve a snapshot of the buffer's current state. This is computationally
1196 /// cheap, and allows reading from the buffer on a background thread.
1197 pub fn snapshot(&self) -> BufferSnapshot {
1198 let text = self.text.snapshot();
1199 let mut syntax_map = self.syntax_map.lock();
1200 syntax_map.interpolate(&text);
1201 let syntax = syntax_map.snapshot();
1202
1203 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1204 Arc::new(TreeSitterData::new(text.clone()))
1205 } else {
1206 self.tree_sitter_data.clone()
1207 };
1208
1209 BufferSnapshot {
1210 text,
1211 syntax,
1212 tree_sitter_data,
1213 file: self.file.clone(),
1214 remote_selections: self.remote_selections.clone(),
1215 diagnostics: self.diagnostics.clone(),
1216 language: self.language.clone(),
1217 non_text_state_update_count: self.non_text_state_update_count,
1218 }
1219 }
1220
1221 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1222 let this = cx.entity();
1223 cx.new(|cx| {
1224 let mut branch = Self {
1225 branch_state: Some(BufferBranchState {
1226 base_buffer: this.clone(),
1227 merged_operations: Default::default(),
1228 }),
1229 language: self.language.clone(),
1230 has_conflict: self.has_conflict,
1231 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1232 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1233 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1234 };
1235 if let Some(language_registry) = self.language_registry() {
1236 branch.set_language_registry(language_registry);
1237 }
1238
1239 // Reparse the branch buffer so that we get syntax highlighting immediately.
1240 branch.reparse(cx, true);
1241
1242 branch
1243 })
1244 }
1245
1246 pub fn preview_edits(
1247 &self,
1248 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1249 cx: &App,
1250 ) -> Task<EditPreview> {
1251 let registry = self.language_registry();
1252 let language = self.language().cloned();
1253 let old_snapshot = self.text.snapshot();
1254 let mut branch_buffer = self.text.branch();
1255 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1256 cx.background_spawn(async move {
1257 if !edits.is_empty() {
1258 if let Some(language) = language.clone() {
1259 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1260 }
1261
1262 branch_buffer.edit(edits.iter().cloned());
1263 let snapshot = branch_buffer.snapshot();
1264 syntax_snapshot.interpolate(&snapshot);
1265
1266 if let Some(language) = language {
1267 syntax_snapshot.reparse(&snapshot, registry, language);
1268 }
1269 }
1270 EditPreview {
1271 old_snapshot,
1272 applied_edits_snapshot: branch_buffer.snapshot(),
1273 syntax_snapshot,
1274 }
1275 })
1276 }
1277
1278 /// Applies all of the changes in this buffer that intersect any of the
1279 /// given `ranges` to its base buffer.
1280 ///
1281 /// If `ranges` is empty, then all changes will be applied. This buffer must
1282 /// be a branch buffer to call this method.
1283 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1284 let Some(base_buffer) = self.base_buffer() else {
1285 debug_panic!("not a branch buffer");
1286 return;
1287 };
1288
1289 let mut ranges = if ranges.is_empty() {
1290 &[0..usize::MAX]
1291 } else {
1292 ranges.as_slice()
1293 }
1294 .iter()
1295 .peekable();
1296
1297 let mut edits = Vec::new();
1298 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1299 let mut is_included = false;
1300 while let Some(range) = ranges.peek() {
1301 if range.end < edit.new.start {
1302 ranges.next().unwrap();
1303 } else {
1304 if range.start <= edit.new.end {
1305 is_included = true;
1306 }
1307 break;
1308 }
1309 }
1310
1311 if is_included {
1312 edits.push((
1313 edit.old.clone(),
1314 self.text_for_range(edit.new.clone()).collect::<String>(),
1315 ));
1316 }
1317 }
1318
1319 let operation = base_buffer.update(cx, |base_buffer, cx| {
1320 // cx.emit(BufferEvent::DiffBaseChanged);
1321 base_buffer.edit(edits, None, cx)
1322 });
1323
1324 if let Some(operation) = operation
1325 && let Some(BufferBranchState {
1326 merged_operations, ..
1327 }) = &mut self.branch_state
1328 {
1329 merged_operations.push(operation);
1330 }
1331 }
1332
1333 fn on_base_buffer_event(
1334 &mut self,
1335 _: Entity<Buffer>,
1336 event: &BufferEvent,
1337 cx: &mut Context<Self>,
1338 ) {
1339 let BufferEvent::Operation { operation, .. } = event else {
1340 return;
1341 };
1342 let Some(BufferBranchState {
1343 merged_operations, ..
1344 }) = &mut self.branch_state
1345 else {
1346 return;
1347 };
1348
1349 let mut operation_to_undo = None;
1350 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1351 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1352 {
1353 merged_operations.remove(ix);
1354 operation_to_undo = Some(operation.timestamp);
1355 }
1356
1357 self.apply_ops([operation.clone()], cx);
1358
1359 if let Some(timestamp) = operation_to_undo {
1360 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1361 self.undo_operations(counts, cx);
1362 }
1363 }
1364
1365 #[cfg(test)]
1366 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1367 &self.text
1368 }
1369
1370 /// Retrieve a snapshot of the buffer's raw text, without any
1371 /// language-related state like the syntax tree or diagnostics.
1372 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1373 self.text.snapshot()
1374 }
1375
1376 /// The file associated with the buffer, if any.
1377 pub fn file(&self) -> Option<&Arc<dyn File>> {
1378 self.file.as_ref()
1379 }
1380
1381 /// The version of the buffer that was last saved or reloaded from disk.
1382 pub fn saved_version(&self) -> &clock::Global {
1383 &self.saved_version
1384 }
1385
1386 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1387 pub fn saved_mtime(&self) -> Option<MTime> {
1388 self.saved_mtime
1389 }
1390
1391 /// Returns the character encoding of the buffer's file.
1392 pub fn encoding(&self) -> &'static Encoding {
1393 self.encoding
1394 }
1395
1396 /// Sets the character encoding of the buffer.
1397 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1398 self.encoding = encoding;
1399 }
1400
1401 /// Returns whether the buffer has a Byte Order Mark.
1402 pub fn has_bom(&self) -> bool {
1403 self.has_bom
1404 }
1405
1406 /// Sets whether the buffer has a Byte Order Mark.
1407 pub fn set_has_bom(&mut self, has_bom: bool) {
1408 self.has_bom = has_bom;
1409 }
1410
1411 /// Assign a language to the buffer.
1412 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1413 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1414 }
1415
1416 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1417 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1418 self.set_language_(language, true, cx);
1419 }
1420
1421 fn set_language_(
1422 &mut self,
1423 language: Option<Arc<Language>>,
1424 may_block: bool,
1425 cx: &mut Context<Self>,
1426 ) {
1427 self.non_text_state_update_count += 1;
1428 self.syntax_map.lock().clear(&self.text);
1429 let old_language = std::mem::replace(&mut self.language, language);
1430 self.was_changed();
1431 self.reparse(cx, may_block);
1432 let has_fresh_language =
1433 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1434 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1435 }
1436
1437 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1438 /// other languages if parts of the buffer are written in different languages.
1439 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1440 self.syntax_map
1441 .lock()
1442 .set_language_registry(language_registry);
1443 }
1444
1445 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1446 self.syntax_map.lock().language_registry()
1447 }
1448
1449 /// Assign the line ending type to the buffer.
1450 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1451 self.text.set_line_ending(line_ending);
1452
1453 let lamport_timestamp = self.text.lamport_clock.tick();
1454 self.send_operation(
1455 Operation::UpdateLineEnding {
1456 line_ending,
1457 lamport_timestamp,
1458 },
1459 true,
1460 cx,
1461 );
1462 }
1463
1464 /// Assign the buffer a new [`Capability`].
1465 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1466 if self.capability != capability {
1467 self.capability = capability;
1468 cx.emit(BufferEvent::CapabilityChanged)
1469 }
1470 }
1471
1472 /// This method is called to signal that the buffer has been saved.
1473 pub fn did_save(
1474 &mut self,
1475 version: clock::Global,
1476 mtime: Option<MTime>,
1477 cx: &mut Context<Self>,
1478 ) {
1479 self.saved_version = version.clone();
1480 self.has_unsaved_edits.set((version, false));
1481 self.has_conflict = false;
1482 self.saved_mtime = mtime;
1483 self.was_changed();
1484 cx.emit(BufferEvent::Saved);
1485 cx.notify();
1486 }
1487
1488 /// Reloads the contents of the buffer from disk.
1489 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1490 let (tx, rx) = futures::channel::oneshot::channel();
1491 let prev_version = self.text.version();
1492 self.reload_task = Some(cx.spawn(async move |this, cx| {
1493 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1494 let file = this.file.as_ref()?.as_local()?;
1495
1496 Some((file.disk_state().mtime(), file.load(cx)))
1497 })?
1498 else {
1499 return Ok(());
1500 };
1501
1502 let new_text = new_text.await?;
1503 let diff = this
1504 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1505 .await;
1506 this.update(cx, |this, cx| {
1507 if this.version() == diff.base_version {
1508 this.finalize_last_transaction();
1509 this.apply_diff(diff, cx);
1510 tx.send(this.finalize_last_transaction().cloned()).ok();
1511 this.has_conflict = false;
1512 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1513 } else {
1514 if !diff.edits.is_empty()
1515 || this
1516 .edits_since::<usize>(&diff.base_version)
1517 .next()
1518 .is_some()
1519 {
1520 this.has_conflict = true;
1521 }
1522
1523 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1524 }
1525
1526 this.reload_task.take();
1527 })
1528 }));
1529 rx
1530 }
1531
1532 /// This method is called to signal that the buffer has been reloaded.
1533 pub fn did_reload(
1534 &mut self,
1535 version: clock::Global,
1536 line_ending: LineEnding,
1537 mtime: Option<MTime>,
1538 cx: &mut Context<Self>,
1539 ) {
1540 self.saved_version = version;
1541 self.has_unsaved_edits
1542 .set((self.saved_version.clone(), false));
1543 self.text.set_line_ending(line_ending);
1544 self.saved_mtime = mtime;
1545 cx.emit(BufferEvent::Reloaded);
1546 cx.notify();
1547 }
1548
1549 /// Updates the [`File`] backing this buffer. This should be called when
1550 /// the file has changed or has been deleted.
1551 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1552 let was_dirty = self.is_dirty();
1553 let mut file_changed = false;
1554
1555 if let Some(old_file) = self.file.as_ref() {
1556 if new_file.path() != old_file.path() {
1557 file_changed = true;
1558 }
1559
1560 let old_state = old_file.disk_state();
1561 let new_state = new_file.disk_state();
1562 if old_state != new_state {
1563 file_changed = true;
1564 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1565 cx.emit(BufferEvent::ReloadNeeded)
1566 }
1567 }
1568 } else {
1569 file_changed = true;
1570 };
1571
1572 self.file = Some(new_file);
1573 if file_changed {
1574 self.was_changed();
1575 self.non_text_state_update_count += 1;
1576 if was_dirty != self.is_dirty() {
1577 cx.emit(BufferEvent::DirtyChanged);
1578 }
1579 cx.emit(BufferEvent::FileHandleChanged);
1580 cx.notify();
1581 }
1582 }
1583
1584 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1585 Some(self.branch_state.as_ref()?.base_buffer.clone())
1586 }
1587
1588 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1589 pub fn language(&self) -> Option<&Arc<Language>> {
1590 self.language.as_ref()
1591 }
1592
1593 /// Returns the [`Language`] at the given location.
1594 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1595 let offset = position.to_offset(self);
1596 let mut is_first = true;
1597 let start_anchor = self.anchor_before(offset);
1598 let end_anchor = self.anchor_after(offset);
1599 self.syntax_map
1600 .lock()
1601 .layers_for_range(offset..offset, &self.text, false)
1602 .filter(|layer| {
1603 if is_first {
1604 is_first = false;
1605 return true;
1606 }
1607
1608 layer
1609 .included_sub_ranges
1610 .map(|sub_ranges| {
1611 sub_ranges.iter().any(|sub_range| {
1612 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1613 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1614 !is_before_start && !is_after_end
1615 })
1616 })
1617 .unwrap_or(true)
1618 })
1619 .last()
1620 .map(|info| info.language.clone())
1621 .or_else(|| self.language.clone())
1622 }
1623
1624 /// Returns each [`Language`] for the active syntax layers at the given location.
1625 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1626 let offset = position.to_offset(self);
1627 let mut languages: Vec<Arc<Language>> = self
1628 .syntax_map
1629 .lock()
1630 .layers_for_range(offset..offset, &self.text, false)
1631 .map(|info| info.language.clone())
1632 .collect();
1633
1634 if languages.is_empty()
1635 && let Some(buffer_language) = self.language()
1636 {
1637 languages.push(buffer_language.clone());
1638 }
1639
1640 languages
1641 }
1642
1643 /// An integer version number that accounts for all updates besides
1644 /// the buffer's text itself (which is versioned via a version vector).
1645 pub fn non_text_state_update_count(&self) -> usize {
1646 self.non_text_state_update_count
1647 }
1648
1649 /// Whether the buffer is being parsed in the background.
1650 #[cfg(any(test, feature = "test-support"))]
1651 pub fn is_parsing(&self) -> bool {
1652 self.reparse.is_some()
1653 }
1654
1655 /// Indicates whether the buffer contains any regions that may be
1656 /// written in a language that hasn't been loaded yet.
1657 pub fn contains_unknown_injections(&self) -> bool {
1658 self.syntax_map.lock().contains_unknown_injections()
1659 }
1660
1661 #[cfg(any(test, feature = "test-support"))]
1662 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1663 self.sync_parse_timeout = timeout;
1664 }
1665
1666 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1667 match Arc::get_mut(&mut self.tree_sitter_data) {
1668 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1669 None => {
1670 let tree_sitter_data = TreeSitterData::new(snapshot);
1671 self.tree_sitter_data = Arc::new(tree_sitter_data)
1672 }
1673 }
1674 }
1675
1676 /// Called after an edit to synchronize the buffer's main parse tree with
1677 /// the buffer's new underlying state.
1678 ///
1679 /// Locks the syntax map and interpolates the edits since the last reparse
1680 /// into the foreground syntax tree.
1681 ///
1682 /// Then takes a stable snapshot of the syntax map before unlocking it.
1683 /// The snapshot with the interpolated edits is sent to a background thread,
1684 /// where we ask Tree-sitter to perform an incremental parse.
1685 ///
1686 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1687 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1688 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1689 ///
1690 /// If we time out waiting on the parse, we spawn a second task waiting
1691 /// until the parse does complete and return with the interpolated tree still
1692 /// in the foreground. When the background parse completes, call back into
1693 /// the main thread and assign the foreground parse state.
1694 ///
1695 /// If the buffer or grammar changed since the start of the background parse,
1696 /// initiate an additional reparse recursively. To avoid concurrent parses
1697 /// for the same buffer, we only initiate a new parse if we are not already
1698 /// parsing in the background.
1699 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1700 if self.text.version() != *self.tree_sitter_data.version() {
1701 self.invalidate_tree_sitter_data(self.text.snapshot());
1702 }
1703 if self.reparse.is_some() {
1704 return;
1705 }
1706 let language = if let Some(language) = self.language.clone() {
1707 language
1708 } else {
1709 return;
1710 };
1711
1712 let text = self.text_snapshot();
1713 let parsed_version = self.version();
1714
1715 let mut syntax_map = self.syntax_map.lock();
1716 syntax_map.interpolate(&text);
1717 let language_registry = syntax_map.language_registry();
1718 let mut syntax_snapshot = syntax_map.snapshot();
1719 drop(syntax_map);
1720
1721 let parse_task = cx.background_spawn({
1722 let language = language.clone();
1723 let language_registry = language_registry.clone();
1724 async move {
1725 syntax_snapshot.reparse(&text, language_registry, language);
1726 syntax_snapshot
1727 }
1728 });
1729
1730 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1731 if may_block {
1732 match cx
1733 .background_executor()
1734 .block_with_timeout(self.sync_parse_timeout, parse_task)
1735 {
1736 Ok(new_syntax_snapshot) => {
1737 self.did_finish_parsing(new_syntax_snapshot, cx);
1738 self.reparse = None;
1739 }
1740 Err(parse_task) => {
1741 self.reparse = Some(cx.spawn(async move |this, cx| {
1742 let new_syntax_map = cx.background_spawn(parse_task).await;
1743 this.update(cx, move |this, cx| {
1744 let grammar_changed = || {
1745 this.language.as_ref().is_none_or(|current_language| {
1746 !Arc::ptr_eq(&language, current_language)
1747 })
1748 };
1749 let language_registry_changed = || {
1750 new_syntax_map.contains_unknown_injections()
1751 && language_registry.is_some_and(|registry| {
1752 registry.version()
1753 != new_syntax_map.language_registry_version()
1754 })
1755 };
1756 let parse_again = this.version.changed_since(&parsed_version)
1757 || language_registry_changed()
1758 || grammar_changed();
1759 this.did_finish_parsing(new_syntax_map, cx);
1760 this.reparse = None;
1761 if parse_again {
1762 this.reparse(cx, false);
1763 }
1764 })
1765 .ok();
1766 }));
1767 }
1768 }
1769 } else {
1770 self.reparse = Some(cx.spawn(async move |this, cx| {
1771 let new_syntax_map = cx.background_spawn(parse_task).await;
1772 this.update(cx, move |this, cx| {
1773 let grammar_changed = || {
1774 this.language.as_ref().is_none_or(|current_language| {
1775 !Arc::ptr_eq(&language, current_language)
1776 })
1777 };
1778 let language_registry_changed = || {
1779 new_syntax_map.contains_unknown_injections()
1780 && language_registry.is_some_and(|registry| {
1781 registry.version() != new_syntax_map.language_registry_version()
1782 })
1783 };
1784 let parse_again = this.version.changed_since(&parsed_version)
1785 || language_registry_changed()
1786 || grammar_changed();
1787 this.did_finish_parsing(new_syntax_map, cx);
1788 this.reparse = None;
1789 if parse_again {
1790 this.reparse(cx, false);
1791 }
1792 })
1793 .ok();
1794 }));
1795 }
1796 }
1797
1798 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1799 self.was_changed();
1800 self.non_text_state_update_count += 1;
1801 self.syntax_map.lock().did_parse(syntax_snapshot);
1802 self.request_autoindent(cx);
1803 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1804 self.invalidate_tree_sitter_data(self.text.snapshot());
1805 cx.emit(BufferEvent::Reparsed);
1806 cx.notify();
1807 }
1808
1809 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1810 self.parse_status.1.clone()
1811 }
1812
1813 /// Wait until the buffer is no longer parsing
1814 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1815 let mut parse_status = self.parse_status();
1816 async move {
1817 while *parse_status.borrow() != ParseStatus::Idle {
1818 if parse_status.changed().await.is_err() {
1819 break;
1820 }
1821 }
1822 }
1823 }
1824
1825 /// Assign to the buffer a set of diagnostics created by a given language server.
1826 pub fn update_diagnostics(
1827 &mut self,
1828 server_id: LanguageServerId,
1829 diagnostics: DiagnosticSet,
1830 cx: &mut Context<Self>,
1831 ) {
1832 let lamport_timestamp = self.text.lamport_clock.tick();
1833 let op = Operation::UpdateDiagnostics {
1834 server_id,
1835 diagnostics: diagnostics.iter().cloned().collect(),
1836 lamport_timestamp,
1837 };
1838
1839 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1840 self.send_operation(op, true, cx);
1841 }
1842
1843 pub fn buffer_diagnostics(
1844 &self,
1845 for_server: Option<LanguageServerId>,
1846 ) -> Vec<&DiagnosticEntry<Anchor>> {
1847 match for_server {
1848 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1849 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1850 Err(_) => Vec::new(),
1851 },
1852 None => self
1853 .diagnostics
1854 .iter()
1855 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1856 .collect(),
1857 }
1858 }
1859
1860 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1861 if let Some(indent_sizes) = self.compute_autoindents() {
1862 let indent_sizes = cx.background_spawn(indent_sizes);
1863 match cx
1864 .background_executor()
1865 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1866 {
1867 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1868 Err(indent_sizes) => {
1869 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1870 let indent_sizes = indent_sizes.await;
1871 this.update(cx, |this, cx| {
1872 this.apply_autoindents(indent_sizes, cx);
1873 })
1874 .ok();
1875 }));
1876 }
1877 }
1878 } else {
1879 self.autoindent_requests.clear();
1880 for tx in self.wait_for_autoindent_txs.drain(..) {
1881 tx.send(()).ok();
1882 }
1883 }
1884 }
1885
1886 fn compute_autoindents(
1887 &self,
1888 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1889 let max_rows_between_yields = 100;
1890 let snapshot = self.snapshot();
1891 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1892 return None;
1893 }
1894
1895 let autoindent_requests = self.autoindent_requests.clone();
1896 Some(async move {
1897 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1898 for request in autoindent_requests {
1899 // Resolve each edited range to its row in the current buffer and in the
1900 // buffer before this batch of edits.
1901 let mut row_ranges = Vec::new();
1902 let mut old_to_new_rows = BTreeMap::new();
1903 let mut language_indent_sizes_by_new_row = Vec::new();
1904 for entry in &request.entries {
1905 let position = entry.range.start;
1906 let new_row = position.to_point(&snapshot).row;
1907 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1908 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1909
1910 if !entry.first_line_is_new {
1911 let old_row = position.to_point(&request.before_edit).row;
1912 old_to_new_rows.insert(old_row, new_row);
1913 }
1914 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1915 }
1916
1917 // Build a map containing the suggested indentation for each of the edited lines
1918 // with respect to the state of the buffer before these edits. This map is keyed
1919 // by the rows for these lines in the current state of the buffer.
1920 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1921 let old_edited_ranges =
1922 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1923 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1924 let mut language_indent_size = IndentSize::default();
1925 for old_edited_range in old_edited_ranges {
1926 let suggestions = request
1927 .before_edit
1928 .suggest_autoindents(old_edited_range.clone())
1929 .into_iter()
1930 .flatten();
1931 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1932 if let Some(suggestion) = suggestion {
1933 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1934
1935 // Find the indent size based on the language for this row.
1936 while let Some((row, size)) = language_indent_sizes.peek() {
1937 if *row > new_row {
1938 break;
1939 }
1940 language_indent_size = *size;
1941 language_indent_sizes.next();
1942 }
1943
1944 let suggested_indent = old_to_new_rows
1945 .get(&suggestion.basis_row)
1946 .and_then(|from_row| {
1947 Some(old_suggestions.get(from_row).copied()?.0)
1948 })
1949 .unwrap_or_else(|| {
1950 request
1951 .before_edit
1952 .indent_size_for_line(suggestion.basis_row)
1953 })
1954 .with_delta(suggestion.delta, language_indent_size);
1955 old_suggestions
1956 .insert(new_row, (suggested_indent, suggestion.within_error));
1957 }
1958 }
1959 yield_now().await;
1960 }
1961
1962 // Compute new suggestions for each line, but only include them in the result
1963 // if they differ from the old suggestion for that line.
1964 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1965 let mut language_indent_size = IndentSize::default();
1966 for (row_range, original_indent_column) in row_ranges {
1967 let new_edited_row_range = if request.is_block_mode {
1968 row_range.start..row_range.start + 1
1969 } else {
1970 row_range.clone()
1971 };
1972
1973 let suggestions = snapshot
1974 .suggest_autoindents(new_edited_row_range.clone())
1975 .into_iter()
1976 .flatten();
1977 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1978 if let Some(suggestion) = suggestion {
1979 // Find the indent size based on the language for this row.
1980 while let Some((row, size)) = language_indent_sizes.peek() {
1981 if *row > new_row {
1982 break;
1983 }
1984 language_indent_size = *size;
1985 language_indent_sizes.next();
1986 }
1987
1988 let suggested_indent = indent_sizes
1989 .get(&suggestion.basis_row)
1990 .copied()
1991 .map(|e| e.0)
1992 .unwrap_or_else(|| {
1993 snapshot.indent_size_for_line(suggestion.basis_row)
1994 })
1995 .with_delta(suggestion.delta, language_indent_size);
1996
1997 if old_suggestions.get(&new_row).is_none_or(
1998 |(old_indentation, was_within_error)| {
1999 suggested_indent != *old_indentation
2000 && (!suggestion.within_error || *was_within_error)
2001 },
2002 ) {
2003 indent_sizes.insert(
2004 new_row,
2005 (suggested_indent, request.ignore_empty_lines),
2006 );
2007 }
2008 }
2009 }
2010
2011 if let (true, Some(original_indent_column)) =
2012 (request.is_block_mode, original_indent_column)
2013 {
2014 let new_indent =
2015 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2016 *indent
2017 } else {
2018 snapshot.indent_size_for_line(row_range.start)
2019 };
2020 let delta = new_indent.len as i64 - original_indent_column as i64;
2021 if delta != 0 {
2022 for row in row_range.skip(1) {
2023 indent_sizes.entry(row).or_insert_with(|| {
2024 let mut size = snapshot.indent_size_for_line(row);
2025 if size.kind == new_indent.kind {
2026 match delta.cmp(&0) {
2027 Ordering::Greater => size.len += delta as u32,
2028 Ordering::Less => {
2029 size.len = size.len.saturating_sub(-delta as u32)
2030 }
2031 Ordering::Equal => {}
2032 }
2033 }
2034 (size, request.ignore_empty_lines)
2035 });
2036 }
2037 }
2038 }
2039
2040 yield_now().await;
2041 }
2042 }
2043
2044 indent_sizes
2045 .into_iter()
2046 .filter_map(|(row, (indent, ignore_empty_lines))| {
2047 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2048 None
2049 } else {
2050 Some((row, indent))
2051 }
2052 })
2053 .collect()
2054 })
2055 }
2056
2057 fn apply_autoindents(
2058 &mut self,
2059 indent_sizes: BTreeMap<u32, IndentSize>,
2060 cx: &mut Context<Self>,
2061 ) {
2062 self.autoindent_requests.clear();
2063 for tx in self.wait_for_autoindent_txs.drain(..) {
2064 tx.send(()).ok();
2065 }
2066
2067 let edits: Vec<_> = indent_sizes
2068 .into_iter()
2069 .filter_map(|(row, indent_size)| {
2070 let current_size = indent_size_for_line(self, row);
2071 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2072 })
2073 .collect();
2074
2075 let preserve_preview = self.preserve_preview();
2076 self.edit(edits, None, cx);
2077 if preserve_preview {
2078 self.refresh_preview();
2079 }
2080 }
2081
2082 /// Create a minimal edit that will cause the given row to be indented
2083 /// with the given size. After applying this edit, the length of the line
2084 /// will always be at least `new_size.len`.
2085 pub fn edit_for_indent_size_adjustment(
2086 row: u32,
2087 current_size: IndentSize,
2088 new_size: IndentSize,
2089 ) -> Option<(Range<Point>, String)> {
2090 if new_size.kind == current_size.kind {
2091 match new_size.len.cmp(¤t_size.len) {
2092 Ordering::Greater => {
2093 let point = Point::new(row, 0);
2094 Some((
2095 point..point,
2096 iter::repeat(new_size.char())
2097 .take((new_size.len - current_size.len) as usize)
2098 .collect::<String>(),
2099 ))
2100 }
2101
2102 Ordering::Less => Some((
2103 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2104 String::new(),
2105 )),
2106
2107 Ordering::Equal => None,
2108 }
2109 } else {
2110 Some((
2111 Point::new(row, 0)..Point::new(row, current_size.len),
2112 iter::repeat(new_size.char())
2113 .take(new_size.len as usize)
2114 .collect::<String>(),
2115 ))
2116 }
2117 }
2118
2119 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2120 /// and the given new text.
2121 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2122 let old_text = self.as_rope().clone();
2123 let base_version = self.version();
2124 cx.background_executor()
2125 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2126 let old_text = old_text.to_string();
2127 let line_ending = LineEnding::detect(&new_text);
2128 LineEnding::normalize(&mut new_text);
2129 let edits = text_diff(&old_text, &new_text);
2130 Diff {
2131 base_version,
2132 line_ending,
2133 edits,
2134 }
2135 })
2136 }
2137
2138 /// Spawns a background task that searches the buffer for any whitespace
2139 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2140 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2141 let old_text = self.as_rope().clone();
2142 let line_ending = self.line_ending();
2143 let base_version = self.version();
2144 cx.background_spawn(async move {
2145 let ranges = trailing_whitespace_ranges(&old_text);
2146 let empty = Arc::<str>::from("");
2147 Diff {
2148 base_version,
2149 line_ending,
2150 edits: ranges
2151 .into_iter()
2152 .map(|range| (range, empty.clone()))
2153 .collect(),
2154 }
2155 })
2156 }
2157
2158 /// Ensures that the buffer ends with a single newline character, and
2159 /// no other whitespace. Skips if the buffer is empty.
2160 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2161 let len = self.len();
2162 if len == 0 {
2163 return;
2164 }
2165 let mut offset = len;
2166 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2167 let non_whitespace_len = chunk
2168 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2169 .len();
2170 offset -= chunk.len();
2171 offset += non_whitespace_len;
2172 if non_whitespace_len != 0 {
2173 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2174 return;
2175 }
2176 break;
2177 }
2178 }
2179 self.edit([(offset..len, "\n")], None, cx);
2180 }
2181
2182 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2183 /// calculated, then adjust the diff to account for those changes, and discard any
2184 /// parts of the diff that conflict with those changes.
2185 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2186 let snapshot = self.snapshot();
2187 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2188 let mut delta = 0;
2189 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2190 while let Some(edit_since) = edits_since.peek() {
2191 // If the edit occurs after a diff hunk, then it does not
2192 // affect that hunk.
2193 if edit_since.old.start > range.end {
2194 break;
2195 }
2196 // If the edit precedes the diff hunk, then adjust the hunk
2197 // to reflect the edit.
2198 else if edit_since.old.end < range.start {
2199 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2200 edits_since.next();
2201 }
2202 // If the edit intersects a diff hunk, then discard that hunk.
2203 else {
2204 return None;
2205 }
2206 }
2207
2208 let start = (range.start as i64 + delta) as usize;
2209 let end = (range.end as i64 + delta) as usize;
2210 Some((start..end, new_text))
2211 });
2212
2213 self.start_transaction();
2214 self.text.set_line_ending(diff.line_ending);
2215 self.edit(adjusted_edits, None, cx);
2216 self.end_transaction(cx)
2217 }
2218
2219 pub fn has_unsaved_edits(&self) -> bool {
2220 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2221
2222 if last_version == self.version {
2223 self.has_unsaved_edits
2224 .set((last_version, has_unsaved_edits));
2225 return has_unsaved_edits;
2226 }
2227
2228 let has_edits = self.has_edits_since(&self.saved_version);
2229 self.has_unsaved_edits
2230 .set((self.version.clone(), has_edits));
2231 has_edits
2232 }
2233
2234 /// Checks if the buffer has unsaved changes.
2235 pub fn is_dirty(&self) -> bool {
2236 if self.capability == Capability::ReadOnly {
2237 return false;
2238 }
2239 if self.has_conflict {
2240 return true;
2241 }
2242 match self.file.as_ref().map(|f| f.disk_state()) {
2243 Some(DiskState::New) | Some(DiskState::Deleted) => {
2244 !self.is_empty() && self.has_unsaved_edits()
2245 }
2246 _ => self.has_unsaved_edits(),
2247 }
2248 }
2249
2250 /// Marks the buffer as having a conflict regardless of current buffer state.
2251 pub fn set_conflict(&mut self) {
2252 self.has_conflict = true;
2253 }
2254
2255 /// Checks if the buffer and its file have both changed since the buffer
2256 /// was last saved or reloaded.
2257 pub fn has_conflict(&self) -> bool {
2258 if self.has_conflict {
2259 return true;
2260 }
2261 let Some(file) = self.file.as_ref() else {
2262 return false;
2263 };
2264 match file.disk_state() {
2265 DiskState::New => false,
2266 DiskState::Present { mtime } => match self.saved_mtime {
2267 Some(saved_mtime) => {
2268 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2269 }
2270 None => true,
2271 },
2272 DiskState::Deleted => false,
2273 }
2274 }
2275
2276 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2277 pub fn subscribe(&mut self) -> Subscription<usize> {
2278 self.text.subscribe()
2279 }
2280
2281 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2282 ///
2283 /// This allows downstream code to check if the buffer's text has changed without
2284 /// waiting for an effect cycle, which would be required if using eents.
2285 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2286 if let Err(ix) = self
2287 .change_bits
2288 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2289 {
2290 self.change_bits.insert(ix, bit);
2291 }
2292 }
2293
2294 /// Set the change bit for all "listeners".
2295 fn was_changed(&mut self) {
2296 self.change_bits.retain(|change_bit| {
2297 change_bit
2298 .upgrade()
2299 .inspect(|bit| {
2300 _ = bit.replace(true);
2301 })
2302 .is_some()
2303 });
2304 }
2305
2306 /// Starts a transaction, if one is not already in-progress. When undoing or
2307 /// redoing edits, all of the edits performed within a transaction are undone
2308 /// or redone together.
2309 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2310 self.start_transaction_at(Instant::now())
2311 }
2312
2313 /// Starts a transaction, providing the current time. Subsequent transactions
2314 /// that occur within a short period of time will be grouped together. This
2315 /// is controlled by the buffer's undo grouping duration.
2316 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2317 self.transaction_depth += 1;
2318 if self.was_dirty_before_starting_transaction.is_none() {
2319 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2320 }
2321 self.text.start_transaction_at(now)
2322 }
2323
2324 /// Terminates the current transaction, if this is the outermost transaction.
2325 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2326 self.end_transaction_at(Instant::now(), cx)
2327 }
2328
2329 /// Terminates the current transaction, providing the current time. Subsequent transactions
2330 /// that occur within a short period of time will be grouped together. This
2331 /// is controlled by the buffer's undo grouping duration.
2332 pub fn end_transaction_at(
2333 &mut self,
2334 now: Instant,
2335 cx: &mut Context<Self>,
2336 ) -> Option<TransactionId> {
2337 assert!(self.transaction_depth > 0);
2338 self.transaction_depth -= 1;
2339 let was_dirty = if self.transaction_depth == 0 {
2340 self.was_dirty_before_starting_transaction.take().unwrap()
2341 } else {
2342 false
2343 };
2344 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2345 self.did_edit(&start_version, was_dirty, cx);
2346 Some(transaction_id)
2347 } else {
2348 None
2349 }
2350 }
2351
2352 /// Manually add a transaction to the buffer's undo history.
2353 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2354 self.text.push_transaction(transaction, now);
2355 }
2356
2357 /// Differs from `push_transaction` in that it does not clear the redo
2358 /// stack. Intended to be used to create a parent transaction to merge
2359 /// potential child transactions into.
2360 ///
2361 /// The caller is responsible for removing it from the undo history using
2362 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2363 /// are merged into this transaction, the caller is responsible for ensuring
2364 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2365 /// cleared is to create transactions with the usual `start_transaction` and
2366 /// `end_transaction` methods and merging the resulting transactions into
2367 /// the transaction created by this method
2368 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2369 self.text.push_empty_transaction(now)
2370 }
2371
2372 /// Prevent the last transaction from being grouped with any subsequent transactions,
2373 /// even if they occur with the buffer's undo grouping duration.
2374 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2375 self.text.finalize_last_transaction()
2376 }
2377
2378 /// Manually group all changes since a given transaction.
2379 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2380 self.text.group_until_transaction(transaction_id);
2381 }
2382
2383 /// Manually remove a transaction from the buffer's undo history
2384 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2385 self.text.forget_transaction(transaction_id)
2386 }
2387
2388 /// Retrieve a transaction from the buffer's undo history
2389 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2390 self.text.get_transaction(transaction_id)
2391 }
2392
2393 /// Manually merge two transactions in the buffer's undo history.
2394 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2395 self.text.merge_transactions(transaction, destination);
2396 }
2397
2398 /// Waits for the buffer to receive operations with the given timestamps.
2399 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2400 &mut self,
2401 edit_ids: It,
2402 ) -> impl Future<Output = Result<()>> + use<It> {
2403 self.text.wait_for_edits(edit_ids)
2404 }
2405
2406 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2407 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2408 &mut self,
2409 anchors: It,
2410 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2411 self.text.wait_for_anchors(anchors)
2412 }
2413
2414 /// Waits for the buffer to receive operations up to the given version.
2415 pub fn wait_for_version(
2416 &mut self,
2417 version: clock::Global,
2418 ) -> impl Future<Output = Result<()>> + use<> {
2419 self.text.wait_for_version(version)
2420 }
2421
2422 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2423 /// [`Buffer::wait_for_version`] to resolve with an error.
2424 pub fn give_up_waiting(&mut self) {
2425 self.text.give_up_waiting();
2426 }
2427
2428 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2429 let mut rx = None;
2430 if !self.autoindent_requests.is_empty() {
2431 let channel = oneshot::channel();
2432 self.wait_for_autoindent_txs.push(channel.0);
2433 rx = Some(channel.1);
2434 }
2435 rx
2436 }
2437
2438 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2439 pub fn set_active_selections(
2440 &mut self,
2441 selections: Arc<[Selection<Anchor>]>,
2442 line_mode: bool,
2443 cursor_shape: CursorShape,
2444 cx: &mut Context<Self>,
2445 ) {
2446 let lamport_timestamp = self.text.lamport_clock.tick();
2447 self.remote_selections.insert(
2448 self.text.replica_id(),
2449 SelectionSet {
2450 selections: selections.clone(),
2451 lamport_timestamp,
2452 line_mode,
2453 cursor_shape,
2454 },
2455 );
2456 self.send_operation(
2457 Operation::UpdateSelections {
2458 selections,
2459 line_mode,
2460 lamport_timestamp,
2461 cursor_shape,
2462 },
2463 true,
2464 cx,
2465 );
2466 self.non_text_state_update_count += 1;
2467 cx.notify();
2468 }
2469
2470 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2471 /// this replica.
2472 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2473 if self
2474 .remote_selections
2475 .get(&self.text.replica_id())
2476 .is_none_or(|set| !set.selections.is_empty())
2477 {
2478 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2479 }
2480 }
2481
2482 pub fn set_agent_selections(
2483 &mut self,
2484 selections: Arc<[Selection<Anchor>]>,
2485 line_mode: bool,
2486 cursor_shape: CursorShape,
2487 cx: &mut Context<Self>,
2488 ) {
2489 let lamport_timestamp = self.text.lamport_clock.tick();
2490 self.remote_selections.insert(
2491 ReplicaId::AGENT,
2492 SelectionSet {
2493 selections,
2494 lamport_timestamp,
2495 line_mode,
2496 cursor_shape,
2497 },
2498 );
2499 self.non_text_state_update_count += 1;
2500 cx.notify();
2501 }
2502
2503 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2504 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2505 }
2506
2507 /// Replaces the buffer's entire text.
2508 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2509 where
2510 T: Into<Arc<str>>,
2511 {
2512 self.autoindent_requests.clear();
2513 self.edit([(0..self.len(), text)], None, cx)
2514 }
2515
2516 /// Appends the given text to the end of the buffer.
2517 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2518 where
2519 T: Into<Arc<str>>,
2520 {
2521 self.edit([(self.len()..self.len(), text)], None, cx)
2522 }
2523
2524 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2525 /// delete, and a string of text to insert at that location.
2526 ///
2527 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2528 /// request for the edited ranges, which will be processed when the buffer finishes
2529 /// parsing.
2530 ///
2531 /// Parsing takes place at the end of a transaction, and may compute synchronously
2532 /// or asynchronously, depending on the changes.
2533 pub fn edit<I, S, T>(
2534 &mut self,
2535 edits_iter: I,
2536 autoindent_mode: Option<AutoindentMode>,
2537 cx: &mut Context<Self>,
2538 ) -> Option<clock::Lamport>
2539 where
2540 I: IntoIterator<Item = (Range<S>, T)>,
2541 S: ToOffset,
2542 T: Into<Arc<str>>,
2543 {
2544 // Skip invalid edits and coalesce contiguous ones.
2545 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2546
2547 for (range, new_text) in edits_iter {
2548 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2549
2550 if range.start > range.end {
2551 mem::swap(&mut range.start, &mut range.end);
2552 }
2553 let new_text = new_text.into();
2554 if !new_text.is_empty() || !range.is_empty() {
2555 if let Some((prev_range, prev_text)) = edits.last_mut()
2556 && prev_range.end >= range.start
2557 {
2558 prev_range.end = cmp::max(prev_range.end, range.end);
2559 *prev_text = format!("{prev_text}{new_text}").into();
2560 } else {
2561 edits.push((range, new_text));
2562 }
2563 }
2564 }
2565 if edits.is_empty() {
2566 return None;
2567 }
2568
2569 self.start_transaction();
2570 self.pending_autoindent.take();
2571 let autoindent_request = autoindent_mode
2572 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2573
2574 let edit_operation = self.text.edit(edits.iter().cloned());
2575 let edit_id = edit_operation.timestamp();
2576
2577 if let Some((before_edit, mode)) = autoindent_request {
2578 let mut delta = 0isize;
2579 let mut previous_setting = None;
2580 let entries: Vec<_> = edits
2581 .into_iter()
2582 .enumerate()
2583 .zip(&edit_operation.as_edit().unwrap().new_text)
2584 .filter(|((_, (range, _)), _)| {
2585 let language = before_edit.language_at(range.start);
2586 let language_id = language.map(|l| l.id());
2587 if let Some((cached_language_id, auto_indent)) = previous_setting
2588 && cached_language_id == language_id
2589 {
2590 auto_indent
2591 } else {
2592 // The auto-indent setting is not present in editorconfigs, hence
2593 // we can avoid passing the file here.
2594 let auto_indent =
2595 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2596 previous_setting = Some((language_id, auto_indent));
2597 auto_indent
2598 }
2599 })
2600 .map(|((ix, (range, _)), new_text)| {
2601 let new_text_length = new_text.len();
2602 let old_start = range.start.to_point(&before_edit);
2603 let new_start = (delta + range.start as isize) as usize;
2604 let range_len = range.end - range.start;
2605 delta += new_text_length as isize - range_len as isize;
2606
2607 // Decide what range of the insertion to auto-indent, and whether
2608 // the first line of the insertion should be considered a newly-inserted line
2609 // or an edit to an existing line.
2610 let mut range_of_insertion_to_indent = 0..new_text_length;
2611 let mut first_line_is_new = true;
2612
2613 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2614 let old_line_end = before_edit.line_len(old_start.row);
2615
2616 if old_start.column > old_line_start {
2617 first_line_is_new = false;
2618 }
2619
2620 if !new_text.contains('\n')
2621 && (old_start.column + (range_len as u32) < old_line_end
2622 || old_line_end == old_line_start)
2623 {
2624 first_line_is_new = false;
2625 }
2626
2627 // When inserting text starting with a newline, avoid auto-indenting the
2628 // previous line.
2629 if new_text.starts_with('\n') {
2630 range_of_insertion_to_indent.start += 1;
2631 first_line_is_new = true;
2632 }
2633
2634 let mut original_indent_column = None;
2635 if let AutoindentMode::Block {
2636 original_indent_columns,
2637 } = &mode
2638 {
2639 original_indent_column = Some(if new_text.starts_with('\n') {
2640 indent_size_for_text(
2641 new_text[range_of_insertion_to_indent.clone()].chars(),
2642 )
2643 .len
2644 } else {
2645 original_indent_columns
2646 .get(ix)
2647 .copied()
2648 .flatten()
2649 .unwrap_or_else(|| {
2650 indent_size_for_text(
2651 new_text[range_of_insertion_to_indent.clone()].chars(),
2652 )
2653 .len
2654 })
2655 });
2656
2657 // Avoid auto-indenting the line after the edit.
2658 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2659 range_of_insertion_to_indent.end -= 1;
2660 }
2661 }
2662
2663 AutoindentRequestEntry {
2664 first_line_is_new,
2665 original_indent_column,
2666 indent_size: before_edit.language_indent_size_at(range.start, cx),
2667 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2668 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2669 }
2670 })
2671 .collect();
2672
2673 if !entries.is_empty() {
2674 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2675 before_edit,
2676 entries,
2677 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2678 ignore_empty_lines: false,
2679 }));
2680 }
2681 }
2682
2683 self.end_transaction(cx);
2684 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2685 Some(edit_id)
2686 }
2687
2688 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2689 self.was_changed();
2690
2691 if self.edits_since::<usize>(old_version).next().is_none() {
2692 return;
2693 }
2694
2695 self.reparse(cx, true);
2696 cx.emit(BufferEvent::Edited);
2697 if was_dirty != self.is_dirty() {
2698 cx.emit(BufferEvent::DirtyChanged);
2699 }
2700 cx.notify();
2701 }
2702
2703 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2704 where
2705 I: IntoIterator<Item = Range<T>>,
2706 T: ToOffset + Copy,
2707 {
2708 let before_edit = self.snapshot();
2709 let entries = ranges
2710 .into_iter()
2711 .map(|range| AutoindentRequestEntry {
2712 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2713 first_line_is_new: true,
2714 indent_size: before_edit.language_indent_size_at(range.start, cx),
2715 original_indent_column: None,
2716 })
2717 .collect();
2718 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2719 before_edit,
2720 entries,
2721 is_block_mode: false,
2722 ignore_empty_lines: true,
2723 }));
2724 self.request_autoindent(cx);
2725 }
2726
2727 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2728 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2729 pub fn insert_empty_line(
2730 &mut self,
2731 position: impl ToPoint,
2732 space_above: bool,
2733 space_below: bool,
2734 cx: &mut Context<Self>,
2735 ) -> Point {
2736 let mut position = position.to_point(self);
2737
2738 self.start_transaction();
2739
2740 self.edit(
2741 [(position..position, "\n")],
2742 Some(AutoindentMode::EachLine),
2743 cx,
2744 );
2745
2746 if position.column > 0 {
2747 position += Point::new(1, 0);
2748 }
2749
2750 if !self.is_line_blank(position.row) {
2751 self.edit(
2752 [(position..position, "\n")],
2753 Some(AutoindentMode::EachLine),
2754 cx,
2755 );
2756 }
2757
2758 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2759 self.edit(
2760 [(position..position, "\n")],
2761 Some(AutoindentMode::EachLine),
2762 cx,
2763 );
2764 position.row += 1;
2765 }
2766
2767 if space_below
2768 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2769 {
2770 self.edit(
2771 [(position..position, "\n")],
2772 Some(AutoindentMode::EachLine),
2773 cx,
2774 );
2775 }
2776
2777 self.end_transaction(cx);
2778
2779 position
2780 }
2781
2782 /// Applies the given remote operations to the buffer.
2783 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2784 self.pending_autoindent.take();
2785 let was_dirty = self.is_dirty();
2786 let old_version = self.version.clone();
2787 let mut deferred_ops = Vec::new();
2788 let buffer_ops = ops
2789 .into_iter()
2790 .filter_map(|op| match op {
2791 Operation::Buffer(op) => Some(op),
2792 _ => {
2793 if self.can_apply_op(&op) {
2794 self.apply_op(op, cx);
2795 } else {
2796 deferred_ops.push(op);
2797 }
2798 None
2799 }
2800 })
2801 .collect::<Vec<_>>();
2802 for operation in buffer_ops.iter() {
2803 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2804 }
2805 self.text.apply_ops(buffer_ops);
2806 self.deferred_ops.insert(deferred_ops);
2807 self.flush_deferred_ops(cx);
2808 self.did_edit(&old_version, was_dirty, cx);
2809 // Notify independently of whether the buffer was edited as the operations could include a
2810 // selection update.
2811 cx.notify();
2812 }
2813
2814 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2815 let mut deferred_ops = Vec::new();
2816 for op in self.deferred_ops.drain().iter().cloned() {
2817 if self.can_apply_op(&op) {
2818 self.apply_op(op, cx);
2819 } else {
2820 deferred_ops.push(op);
2821 }
2822 }
2823 self.deferred_ops.insert(deferred_ops);
2824 }
2825
2826 pub fn has_deferred_ops(&self) -> bool {
2827 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2828 }
2829
2830 fn can_apply_op(&self, operation: &Operation) -> bool {
2831 match operation {
2832 Operation::Buffer(_) => {
2833 unreachable!("buffer operations should never be applied at this layer")
2834 }
2835 Operation::UpdateDiagnostics {
2836 diagnostics: diagnostic_set,
2837 ..
2838 } => diagnostic_set.iter().all(|diagnostic| {
2839 self.text.can_resolve(&diagnostic.range.start)
2840 && self.text.can_resolve(&diagnostic.range.end)
2841 }),
2842 Operation::UpdateSelections { selections, .. } => selections
2843 .iter()
2844 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2845 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2846 }
2847 }
2848
2849 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2850 match operation {
2851 Operation::Buffer(_) => {
2852 unreachable!("buffer operations should never be applied at this layer")
2853 }
2854 Operation::UpdateDiagnostics {
2855 server_id,
2856 diagnostics: diagnostic_set,
2857 lamport_timestamp,
2858 } => {
2859 let snapshot = self.snapshot();
2860 self.apply_diagnostic_update(
2861 server_id,
2862 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2863 lamport_timestamp,
2864 cx,
2865 );
2866 }
2867 Operation::UpdateSelections {
2868 selections,
2869 lamport_timestamp,
2870 line_mode,
2871 cursor_shape,
2872 } => {
2873 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2874 && set.lamport_timestamp > lamport_timestamp
2875 {
2876 return;
2877 }
2878
2879 self.remote_selections.insert(
2880 lamport_timestamp.replica_id,
2881 SelectionSet {
2882 selections,
2883 lamport_timestamp,
2884 line_mode,
2885 cursor_shape,
2886 },
2887 );
2888 self.text.lamport_clock.observe(lamport_timestamp);
2889 self.non_text_state_update_count += 1;
2890 }
2891 Operation::UpdateCompletionTriggers {
2892 triggers,
2893 lamport_timestamp,
2894 server_id,
2895 } => {
2896 if triggers.is_empty() {
2897 self.completion_triggers_per_language_server
2898 .remove(&server_id);
2899 self.completion_triggers = self
2900 .completion_triggers_per_language_server
2901 .values()
2902 .flat_map(|triggers| triggers.iter().cloned())
2903 .collect();
2904 } else {
2905 self.completion_triggers_per_language_server
2906 .insert(server_id, triggers.iter().cloned().collect());
2907 self.completion_triggers.extend(triggers);
2908 }
2909 self.text.lamport_clock.observe(lamport_timestamp);
2910 }
2911 Operation::UpdateLineEnding {
2912 line_ending,
2913 lamport_timestamp,
2914 } => {
2915 self.text.set_line_ending(line_ending);
2916 self.text.lamport_clock.observe(lamport_timestamp);
2917 }
2918 }
2919 }
2920
2921 fn apply_diagnostic_update(
2922 &mut self,
2923 server_id: LanguageServerId,
2924 diagnostics: DiagnosticSet,
2925 lamport_timestamp: clock::Lamport,
2926 cx: &mut Context<Self>,
2927 ) {
2928 if lamport_timestamp > self.diagnostics_timestamp {
2929 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2930 if diagnostics.is_empty() {
2931 if let Ok(ix) = ix {
2932 self.diagnostics.remove(ix);
2933 }
2934 } else {
2935 match ix {
2936 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2937 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2938 };
2939 }
2940 self.diagnostics_timestamp = lamport_timestamp;
2941 self.non_text_state_update_count += 1;
2942 self.text.lamport_clock.observe(lamport_timestamp);
2943 cx.notify();
2944 cx.emit(BufferEvent::DiagnosticsUpdated);
2945 }
2946 }
2947
2948 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2949 self.was_changed();
2950 cx.emit(BufferEvent::Operation {
2951 operation,
2952 is_local,
2953 });
2954 }
2955
2956 /// Removes the selections for a given peer.
2957 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2958 self.remote_selections.remove(&replica_id);
2959 cx.notify();
2960 }
2961
2962 /// Undoes the most recent transaction.
2963 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2964 let was_dirty = self.is_dirty();
2965 let old_version = self.version.clone();
2966
2967 if let Some((transaction_id, operation)) = self.text.undo() {
2968 self.send_operation(Operation::Buffer(operation), true, cx);
2969 self.did_edit(&old_version, was_dirty, cx);
2970 Some(transaction_id)
2971 } else {
2972 None
2973 }
2974 }
2975
2976 /// Manually undoes a specific transaction in the buffer's undo history.
2977 pub fn undo_transaction(
2978 &mut self,
2979 transaction_id: TransactionId,
2980 cx: &mut Context<Self>,
2981 ) -> bool {
2982 let was_dirty = self.is_dirty();
2983 let old_version = self.version.clone();
2984 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2985 self.send_operation(Operation::Buffer(operation), true, cx);
2986 self.did_edit(&old_version, was_dirty, cx);
2987 true
2988 } else {
2989 false
2990 }
2991 }
2992
2993 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2994 pub fn undo_to_transaction(
2995 &mut self,
2996 transaction_id: TransactionId,
2997 cx: &mut Context<Self>,
2998 ) -> bool {
2999 let was_dirty = self.is_dirty();
3000 let old_version = self.version.clone();
3001
3002 let operations = self.text.undo_to_transaction(transaction_id);
3003 let undone = !operations.is_empty();
3004 for operation in operations {
3005 self.send_operation(Operation::Buffer(operation), true, cx);
3006 }
3007 if undone {
3008 self.did_edit(&old_version, was_dirty, cx)
3009 }
3010 undone
3011 }
3012
3013 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3014 let was_dirty = self.is_dirty();
3015 let operation = self.text.undo_operations(counts);
3016 let old_version = self.version.clone();
3017 self.send_operation(Operation::Buffer(operation), true, cx);
3018 self.did_edit(&old_version, was_dirty, cx);
3019 }
3020
3021 /// Manually redoes a specific transaction in the buffer's redo history.
3022 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3023 let was_dirty = self.is_dirty();
3024 let old_version = self.version.clone();
3025
3026 if let Some((transaction_id, operation)) = self.text.redo() {
3027 self.send_operation(Operation::Buffer(operation), true, cx);
3028 self.did_edit(&old_version, was_dirty, cx);
3029 Some(transaction_id)
3030 } else {
3031 None
3032 }
3033 }
3034
3035 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3036 pub fn redo_to_transaction(
3037 &mut self,
3038 transaction_id: TransactionId,
3039 cx: &mut Context<Self>,
3040 ) -> bool {
3041 let was_dirty = self.is_dirty();
3042 let old_version = self.version.clone();
3043
3044 let operations = self.text.redo_to_transaction(transaction_id);
3045 let redone = !operations.is_empty();
3046 for operation in operations {
3047 self.send_operation(Operation::Buffer(operation), true, cx);
3048 }
3049 if redone {
3050 self.did_edit(&old_version, was_dirty, cx)
3051 }
3052 redone
3053 }
3054
3055 /// Override current completion triggers with the user-provided completion triggers.
3056 pub fn set_completion_triggers(
3057 &mut self,
3058 server_id: LanguageServerId,
3059 triggers: BTreeSet<String>,
3060 cx: &mut Context<Self>,
3061 ) {
3062 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3063 if triggers.is_empty() {
3064 self.completion_triggers_per_language_server
3065 .remove(&server_id);
3066 self.completion_triggers = self
3067 .completion_triggers_per_language_server
3068 .values()
3069 .flat_map(|triggers| triggers.iter().cloned())
3070 .collect();
3071 } else {
3072 self.completion_triggers_per_language_server
3073 .insert(server_id, triggers.clone());
3074 self.completion_triggers.extend(triggers.iter().cloned());
3075 }
3076 self.send_operation(
3077 Operation::UpdateCompletionTriggers {
3078 triggers: triggers.into_iter().collect(),
3079 lamport_timestamp: self.completion_triggers_timestamp,
3080 server_id,
3081 },
3082 true,
3083 cx,
3084 );
3085 cx.notify();
3086 }
3087
3088 /// Returns a list of strings which trigger a completion menu for this language.
3089 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3090 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3091 &self.completion_triggers
3092 }
3093
3094 /// Call this directly after performing edits to prevent the preview tab
3095 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3096 /// to return false until there are additional edits.
3097 pub fn refresh_preview(&mut self) {
3098 self.preview_version = self.version.clone();
3099 }
3100
3101 /// Whether we should preserve the preview status of a tab containing this buffer.
3102 pub fn preserve_preview(&self) -> bool {
3103 !self.has_edits_since(&self.preview_version)
3104 }
3105}
3106
3107#[doc(hidden)]
3108#[cfg(any(test, feature = "test-support"))]
3109impl Buffer {
3110 pub fn edit_via_marked_text(
3111 &mut self,
3112 marked_string: &str,
3113 autoindent_mode: Option<AutoindentMode>,
3114 cx: &mut Context<Self>,
3115 ) {
3116 let edits = self.edits_for_marked_text(marked_string);
3117 self.edit(edits, autoindent_mode, cx);
3118 }
3119
3120 pub fn set_group_interval(&mut self, group_interval: Duration) {
3121 self.text.set_group_interval(group_interval);
3122 }
3123
3124 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3125 where
3126 T: rand::Rng,
3127 {
3128 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3129 let mut last_end = None;
3130 for _ in 0..old_range_count {
3131 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3132 break;
3133 }
3134
3135 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3136 let mut range = self.random_byte_range(new_start, rng);
3137 if rng.random_bool(0.2) {
3138 mem::swap(&mut range.start, &mut range.end);
3139 }
3140 last_end = Some(range.end);
3141
3142 let new_text_len = rng.random_range(0..10);
3143 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3144 new_text = new_text.to_uppercase();
3145
3146 edits.push((range, new_text));
3147 }
3148 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3149 self.edit(edits, None, cx);
3150 }
3151
3152 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3153 let was_dirty = self.is_dirty();
3154 let old_version = self.version.clone();
3155
3156 let ops = self.text.randomly_undo_redo(rng);
3157 if !ops.is_empty() {
3158 for op in ops {
3159 self.send_operation(Operation::Buffer(op), true, cx);
3160 self.did_edit(&old_version, was_dirty, cx);
3161 }
3162 }
3163 }
3164}
3165
3166impl EventEmitter<BufferEvent> for Buffer {}
3167
3168impl Deref for Buffer {
3169 type Target = TextBuffer;
3170
3171 fn deref(&self) -> &Self::Target {
3172 &self.text
3173 }
3174}
3175
3176impl BufferSnapshot {
3177 /// Returns [`IndentSize`] for a given line that respects user settings and
3178 /// language preferences.
3179 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3180 indent_size_for_line(self, row)
3181 }
3182
3183 /// Returns [`IndentSize`] for a given position that respects user settings
3184 /// and language preferences.
3185 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3186 let settings = language_settings(
3187 self.language_at(position).map(|l| l.name()),
3188 self.file(),
3189 cx,
3190 );
3191 if settings.hard_tabs {
3192 IndentSize::tab()
3193 } else {
3194 IndentSize::spaces(settings.tab_size.get())
3195 }
3196 }
3197
3198 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3199 /// is passed in as `single_indent_size`.
3200 pub fn suggested_indents(
3201 &self,
3202 rows: impl Iterator<Item = u32>,
3203 single_indent_size: IndentSize,
3204 ) -> BTreeMap<u32, IndentSize> {
3205 let mut result = BTreeMap::new();
3206
3207 for row_range in contiguous_ranges(rows, 10) {
3208 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3209 Some(suggestions) => suggestions,
3210 _ => break,
3211 };
3212
3213 for (row, suggestion) in row_range.zip(suggestions) {
3214 let indent_size = if let Some(suggestion) = suggestion {
3215 result
3216 .get(&suggestion.basis_row)
3217 .copied()
3218 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3219 .with_delta(suggestion.delta, single_indent_size)
3220 } else {
3221 self.indent_size_for_line(row)
3222 };
3223
3224 result.insert(row, indent_size);
3225 }
3226 }
3227
3228 result
3229 }
3230
3231 fn suggest_autoindents(
3232 &self,
3233 row_range: Range<u32>,
3234 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3235 let config = &self.language.as_ref()?.config;
3236 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3237
3238 #[derive(Debug, Clone)]
3239 struct StartPosition {
3240 start: Point,
3241 suffix: SharedString,
3242 language: Arc<Language>,
3243 }
3244
3245 // Find the suggested indentation ranges based on the syntax tree.
3246 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3247 let end = Point::new(row_range.end, 0);
3248 let range = (start..end).to_offset(&self.text);
3249 let mut matches = self.syntax.matches_with_options(
3250 range.clone(),
3251 &self.text,
3252 TreeSitterOptions {
3253 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3254 max_start_depth: None,
3255 },
3256 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3257 );
3258 let indent_configs = matches
3259 .grammars()
3260 .iter()
3261 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3262 .collect::<Vec<_>>();
3263
3264 let mut indent_ranges = Vec::<Range<Point>>::new();
3265 let mut start_positions = Vec::<StartPosition>::new();
3266 let mut outdent_positions = Vec::<Point>::new();
3267 while let Some(mat) = matches.peek() {
3268 let mut start: Option<Point> = None;
3269 let mut end: Option<Point> = None;
3270
3271 let config = indent_configs[mat.grammar_index];
3272 for capture in mat.captures {
3273 if capture.index == config.indent_capture_ix {
3274 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3275 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3276 } else if Some(capture.index) == config.start_capture_ix {
3277 start = Some(Point::from_ts_point(capture.node.end_position()));
3278 } else if Some(capture.index) == config.end_capture_ix {
3279 end = Some(Point::from_ts_point(capture.node.start_position()));
3280 } else if Some(capture.index) == config.outdent_capture_ix {
3281 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3282 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3283 start_positions.push(StartPosition {
3284 start: Point::from_ts_point(capture.node.start_position()),
3285 suffix: suffix.clone(),
3286 language: mat.language.clone(),
3287 });
3288 }
3289 }
3290
3291 matches.advance();
3292 if let Some((start, end)) = start.zip(end) {
3293 if start.row == end.row {
3294 continue;
3295 }
3296 let range = start..end;
3297 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3298 Err(ix) => indent_ranges.insert(ix, range),
3299 Ok(ix) => {
3300 let prev_range = &mut indent_ranges[ix];
3301 prev_range.end = prev_range.end.max(range.end);
3302 }
3303 }
3304 }
3305 }
3306
3307 let mut error_ranges = Vec::<Range<Point>>::new();
3308 let mut matches = self
3309 .syntax
3310 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3311 while let Some(mat) = matches.peek() {
3312 let node = mat.captures[0].node;
3313 let start = Point::from_ts_point(node.start_position());
3314 let end = Point::from_ts_point(node.end_position());
3315 let range = start..end;
3316 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3317 Ok(ix) | Err(ix) => ix,
3318 };
3319 let mut end_ix = ix;
3320 while let Some(existing_range) = error_ranges.get(end_ix) {
3321 if existing_range.end < end {
3322 end_ix += 1;
3323 } else {
3324 break;
3325 }
3326 }
3327 error_ranges.splice(ix..end_ix, [range]);
3328 matches.advance();
3329 }
3330
3331 outdent_positions.sort();
3332 for outdent_position in outdent_positions {
3333 // find the innermost indent range containing this outdent_position
3334 // set its end to the outdent position
3335 if let Some(range_to_truncate) = indent_ranges
3336 .iter_mut()
3337 .rfind(|indent_range| indent_range.contains(&outdent_position))
3338 {
3339 range_to_truncate.end = outdent_position;
3340 }
3341 }
3342
3343 start_positions.sort_by_key(|b| b.start);
3344
3345 // Find the suggested indentation increases and decreased based on regexes.
3346 let mut regex_outdent_map = HashMap::default();
3347 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3348 let mut start_positions_iter = start_positions.iter().peekable();
3349
3350 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3351 self.for_each_line(
3352 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3353 ..Point::new(row_range.end, 0),
3354 |row, line| {
3355 let indent_len = self.indent_size_for_line(row).len;
3356 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3357 let row_language_config = row_language
3358 .as_ref()
3359 .map(|lang| lang.config())
3360 .unwrap_or(config);
3361
3362 if row_language_config
3363 .decrease_indent_pattern
3364 .as_ref()
3365 .is_some_and(|regex| regex.is_match(line))
3366 {
3367 indent_change_rows.push((row, Ordering::Less));
3368 }
3369 if row_language_config
3370 .increase_indent_pattern
3371 .as_ref()
3372 .is_some_and(|regex| regex.is_match(line))
3373 {
3374 indent_change_rows.push((row + 1, Ordering::Greater));
3375 }
3376 while let Some(pos) = start_positions_iter.peek() {
3377 if pos.start.row < row {
3378 let pos = start_positions_iter.next().unwrap().clone();
3379 last_seen_suffix
3380 .entry(pos.suffix.to_string())
3381 .or_default()
3382 .push(pos);
3383 } else {
3384 break;
3385 }
3386 }
3387 for rule in &row_language_config.decrease_indent_patterns {
3388 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3389 let row_start_column = self.indent_size_for_line(row).len;
3390 let basis_row = rule
3391 .valid_after
3392 .iter()
3393 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3394 .flatten()
3395 .filter(|pos| {
3396 row_language
3397 .as_ref()
3398 .or(self.language.as_ref())
3399 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3400 })
3401 .filter(|pos| pos.start.column <= row_start_column)
3402 .max_by_key(|pos| pos.start.row);
3403 if let Some(outdent_to) = basis_row {
3404 regex_outdent_map.insert(row, outdent_to.start.row);
3405 }
3406 break;
3407 }
3408 }
3409 },
3410 );
3411
3412 let mut indent_changes = indent_change_rows.into_iter().peekable();
3413 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3414 prev_non_blank_row.unwrap_or(0)
3415 } else {
3416 row_range.start.saturating_sub(1)
3417 };
3418
3419 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3420 Some(row_range.map(move |row| {
3421 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3422
3423 let mut indent_from_prev_row = false;
3424 let mut outdent_from_prev_row = false;
3425 let mut outdent_to_row = u32::MAX;
3426 let mut from_regex = false;
3427
3428 while let Some((indent_row, delta)) = indent_changes.peek() {
3429 match indent_row.cmp(&row) {
3430 Ordering::Equal => match delta {
3431 Ordering::Less => {
3432 from_regex = true;
3433 outdent_from_prev_row = true
3434 }
3435 Ordering::Greater => {
3436 indent_from_prev_row = true;
3437 from_regex = true
3438 }
3439 _ => {}
3440 },
3441
3442 Ordering::Greater => break,
3443 Ordering::Less => {}
3444 }
3445
3446 indent_changes.next();
3447 }
3448
3449 for range in &indent_ranges {
3450 if range.start.row >= row {
3451 break;
3452 }
3453 if range.start.row == prev_row && range.end > row_start {
3454 indent_from_prev_row = true;
3455 }
3456 if range.end > prev_row_start && range.end <= row_start {
3457 outdent_to_row = outdent_to_row.min(range.start.row);
3458 }
3459 }
3460
3461 if let Some(basis_row) = regex_outdent_map.get(&row) {
3462 indent_from_prev_row = false;
3463 outdent_to_row = *basis_row;
3464 from_regex = true;
3465 }
3466
3467 let within_error = error_ranges
3468 .iter()
3469 .any(|e| e.start.row < row && e.end > row_start);
3470
3471 let suggestion = if outdent_to_row == prev_row
3472 || (outdent_from_prev_row && indent_from_prev_row)
3473 {
3474 Some(IndentSuggestion {
3475 basis_row: prev_row,
3476 delta: Ordering::Equal,
3477 within_error: within_error && !from_regex,
3478 })
3479 } else if indent_from_prev_row {
3480 Some(IndentSuggestion {
3481 basis_row: prev_row,
3482 delta: Ordering::Greater,
3483 within_error: within_error && !from_regex,
3484 })
3485 } else if outdent_to_row < prev_row {
3486 Some(IndentSuggestion {
3487 basis_row: outdent_to_row,
3488 delta: Ordering::Equal,
3489 within_error: within_error && !from_regex,
3490 })
3491 } else if outdent_from_prev_row {
3492 Some(IndentSuggestion {
3493 basis_row: prev_row,
3494 delta: Ordering::Less,
3495 within_error: within_error && !from_regex,
3496 })
3497 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3498 {
3499 Some(IndentSuggestion {
3500 basis_row: prev_row,
3501 delta: Ordering::Equal,
3502 within_error: within_error && !from_regex,
3503 })
3504 } else {
3505 None
3506 };
3507
3508 prev_row = row;
3509 prev_row_start = row_start;
3510 suggestion
3511 }))
3512 }
3513
3514 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3515 while row > 0 {
3516 row -= 1;
3517 if !self.is_line_blank(row) {
3518 return Some(row);
3519 }
3520 }
3521 None
3522 }
3523
3524 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3525 let captures = self.syntax.captures(range, &self.text, |grammar| {
3526 grammar
3527 .highlights_config
3528 .as_ref()
3529 .map(|config| &config.query)
3530 });
3531 let highlight_maps = captures
3532 .grammars()
3533 .iter()
3534 .map(|grammar| grammar.highlight_map())
3535 .collect();
3536 (captures, highlight_maps)
3537 }
3538
3539 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3540 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3541 /// returned in chunks where each chunk has a single syntax highlighting style and
3542 /// diagnostic status.
3543 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3544 let range = range.start.to_offset(self)..range.end.to_offset(self);
3545
3546 let mut syntax = None;
3547 if language_aware {
3548 syntax = Some(self.get_highlights(range.clone()));
3549 }
3550 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3551 let diagnostics = language_aware;
3552 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3553 }
3554
3555 pub fn highlighted_text_for_range<T: ToOffset>(
3556 &self,
3557 range: Range<T>,
3558 override_style: Option<HighlightStyle>,
3559 syntax_theme: &SyntaxTheme,
3560 ) -> HighlightedText {
3561 HighlightedText::from_buffer_range(
3562 range,
3563 &self.text,
3564 &self.syntax,
3565 override_style,
3566 syntax_theme,
3567 )
3568 }
3569
3570 /// Invokes the given callback for each line of text in the given range of the buffer.
3571 /// Uses callback to avoid allocating a string for each line.
3572 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3573 let mut line = String::new();
3574 let mut row = range.start.row;
3575 for chunk in self
3576 .as_rope()
3577 .chunks_in_range(range.to_offset(self))
3578 .chain(["\n"])
3579 {
3580 for (newline_ix, text) in chunk.split('\n').enumerate() {
3581 if newline_ix > 0 {
3582 callback(row, &line);
3583 row += 1;
3584 line.clear();
3585 }
3586 line.push_str(text);
3587 }
3588 }
3589 }
3590
3591 /// Iterates over every [`SyntaxLayer`] in the buffer.
3592 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3593 self.syntax_layers_for_range(0..self.len(), true)
3594 }
3595
3596 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3597 let offset = position.to_offset(self);
3598 self.syntax_layers_for_range(offset..offset, false)
3599 .filter(|l| {
3600 if let Some(ranges) = l.included_sub_ranges {
3601 ranges.iter().any(|range| {
3602 let start = range.start.to_offset(self);
3603 start <= offset && {
3604 let end = range.end.to_offset(self);
3605 offset < end
3606 }
3607 })
3608 } else {
3609 l.node().start_byte() <= offset && l.node().end_byte() > offset
3610 }
3611 })
3612 .last()
3613 }
3614
3615 pub fn syntax_layers_for_range<D: ToOffset>(
3616 &self,
3617 range: Range<D>,
3618 include_hidden: bool,
3619 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3620 self.syntax
3621 .layers_for_range(range, &self.text, include_hidden)
3622 }
3623
3624 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3625 &self,
3626 range: Range<D>,
3627 ) -> Option<SyntaxLayer<'_>> {
3628 let range = range.to_offset(self);
3629 self.syntax
3630 .layers_for_range(range, &self.text, false)
3631 .max_by(|a, b| {
3632 if a.depth != b.depth {
3633 a.depth.cmp(&b.depth)
3634 } else if a.offset.0 != b.offset.0 {
3635 a.offset.0.cmp(&b.offset.0)
3636 } else {
3637 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3638 }
3639 })
3640 }
3641
3642 /// Returns the main [`Language`].
3643 pub fn language(&self) -> Option<&Arc<Language>> {
3644 self.language.as_ref()
3645 }
3646
3647 /// Returns the [`Language`] at the given location.
3648 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3649 self.syntax_layer_at(position)
3650 .map(|info| info.language)
3651 .or(self.language.as_ref())
3652 }
3653
3654 /// Returns the settings for the language at the given location.
3655 pub fn settings_at<'a, D: ToOffset>(
3656 &'a self,
3657 position: D,
3658 cx: &'a App,
3659 ) -> Cow<'a, LanguageSettings> {
3660 language_settings(
3661 self.language_at(position).map(|l| l.name()),
3662 self.file.as_ref(),
3663 cx,
3664 )
3665 }
3666
3667 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3668 CharClassifier::new(self.language_scope_at(point))
3669 }
3670
3671 /// Returns the [`LanguageScope`] at the given location.
3672 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3673 let offset = position.to_offset(self);
3674 let mut scope = None;
3675 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3676
3677 // Use the layer that has the smallest node intersecting the given point.
3678 for layer in self
3679 .syntax
3680 .layers_for_range(offset..offset, &self.text, false)
3681 {
3682 let mut cursor = layer.node().walk();
3683
3684 let mut range = None;
3685 loop {
3686 let child_range = cursor.node().byte_range();
3687 if !child_range.contains(&offset) {
3688 break;
3689 }
3690
3691 range = Some(child_range);
3692 if cursor.goto_first_child_for_byte(offset).is_none() {
3693 break;
3694 }
3695 }
3696
3697 if let Some(range) = range
3698 && smallest_range_and_depth.as_ref().is_none_or(
3699 |(smallest_range, smallest_range_depth)| {
3700 if layer.depth > *smallest_range_depth {
3701 true
3702 } else if layer.depth == *smallest_range_depth {
3703 range.len() < smallest_range.len()
3704 } else {
3705 false
3706 }
3707 },
3708 )
3709 {
3710 smallest_range_and_depth = Some((range, layer.depth));
3711 scope = Some(LanguageScope {
3712 language: layer.language.clone(),
3713 override_id: layer.override_id(offset, &self.text),
3714 });
3715 }
3716 }
3717
3718 scope.or_else(|| {
3719 self.language.clone().map(|language| LanguageScope {
3720 language,
3721 override_id: None,
3722 })
3723 })
3724 }
3725
3726 /// Returns a tuple of the range and character kind of the word
3727 /// surrounding the given position.
3728 pub fn surrounding_word<T: ToOffset>(
3729 &self,
3730 start: T,
3731 scope_context: Option<CharScopeContext>,
3732 ) -> (Range<usize>, Option<CharKind>) {
3733 let mut start = start.to_offset(self);
3734 let mut end = start;
3735 let mut next_chars = self.chars_at(start).take(128).peekable();
3736 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3737
3738 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3739 let word_kind = cmp::max(
3740 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3741 next_chars.peek().copied().map(|c| classifier.kind(c)),
3742 );
3743
3744 for ch in prev_chars {
3745 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3746 start -= ch.len_utf8();
3747 } else {
3748 break;
3749 }
3750 }
3751
3752 for ch in next_chars {
3753 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3754 end += ch.len_utf8();
3755 } else {
3756 break;
3757 }
3758 }
3759
3760 (start..end, word_kind)
3761 }
3762
3763 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3764 /// range. When `require_larger` is true, the node found must be larger than the query range.
3765 ///
3766 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3767 /// be moved to the root of the tree.
3768 fn goto_node_enclosing_range(
3769 cursor: &mut tree_sitter::TreeCursor,
3770 query_range: &Range<usize>,
3771 require_larger: bool,
3772 ) -> bool {
3773 let mut ascending = false;
3774 loop {
3775 let mut range = cursor.node().byte_range();
3776 if query_range.is_empty() {
3777 // When the query range is empty and the current node starts after it, move to the
3778 // previous sibling to find the node the containing node.
3779 if range.start > query_range.start {
3780 cursor.goto_previous_sibling();
3781 range = cursor.node().byte_range();
3782 }
3783 } else {
3784 // When the query range is non-empty and the current node ends exactly at the start,
3785 // move to the next sibling to find a node that extends beyond the start.
3786 if range.end == query_range.start {
3787 cursor.goto_next_sibling();
3788 range = cursor.node().byte_range();
3789 }
3790 }
3791
3792 let encloses = range.contains_inclusive(query_range)
3793 && (!require_larger || range.len() > query_range.len());
3794 if !encloses {
3795 ascending = true;
3796 if !cursor.goto_parent() {
3797 return false;
3798 }
3799 continue;
3800 } else if ascending {
3801 return true;
3802 }
3803
3804 // Descend into the current node.
3805 if cursor
3806 .goto_first_child_for_byte(query_range.start)
3807 .is_none()
3808 {
3809 return true;
3810 }
3811 }
3812 }
3813
3814 pub fn syntax_ancestor<'a, T: ToOffset>(
3815 &'a self,
3816 range: Range<T>,
3817 ) -> Option<tree_sitter::Node<'a>> {
3818 let range = range.start.to_offset(self)..range.end.to_offset(self);
3819 let mut result: Option<tree_sitter::Node<'a>> = None;
3820 for layer in self
3821 .syntax
3822 .layers_for_range(range.clone(), &self.text, true)
3823 {
3824 let mut cursor = layer.node().walk();
3825
3826 // Find the node that both contains the range and is larger than it.
3827 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3828 continue;
3829 }
3830
3831 let left_node = cursor.node();
3832 let mut layer_result = left_node;
3833
3834 // For an empty range, try to find another node immediately to the right of the range.
3835 if left_node.end_byte() == range.start {
3836 let mut right_node = None;
3837 while !cursor.goto_next_sibling() {
3838 if !cursor.goto_parent() {
3839 break;
3840 }
3841 }
3842
3843 while cursor.node().start_byte() == range.start {
3844 right_node = Some(cursor.node());
3845 if !cursor.goto_first_child() {
3846 break;
3847 }
3848 }
3849
3850 // If there is a candidate node on both sides of the (empty) range, then
3851 // decide between the two by favoring a named node over an anonymous token.
3852 // If both nodes are the same in that regard, favor the right one.
3853 if let Some(right_node) = right_node
3854 && (right_node.is_named() || !left_node.is_named())
3855 {
3856 layer_result = right_node;
3857 }
3858 }
3859
3860 if let Some(previous_result) = &result
3861 && previous_result.byte_range().len() < layer_result.byte_range().len()
3862 {
3863 continue;
3864 }
3865 result = Some(layer_result);
3866 }
3867
3868 result
3869 }
3870
3871 /// Find the previous sibling syntax node at the given range.
3872 ///
3873 /// This function locates the syntax node that precedes the node containing
3874 /// the given range. It searches hierarchically by:
3875 /// 1. Finding the node that contains the given range
3876 /// 2. Looking for the previous sibling at the same tree level
3877 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3878 ///
3879 /// Returns `None` if there is no previous sibling at any ancestor level.
3880 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3881 &'a self,
3882 range: Range<T>,
3883 ) -> Option<tree_sitter::Node<'a>> {
3884 let range = range.start.to_offset(self)..range.end.to_offset(self);
3885 let mut result: Option<tree_sitter::Node<'a>> = None;
3886
3887 for layer in self
3888 .syntax
3889 .layers_for_range(range.clone(), &self.text, true)
3890 {
3891 let mut cursor = layer.node().walk();
3892
3893 // Find the node that contains the range
3894 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3895 continue;
3896 }
3897
3898 // Look for the previous sibling, moving up ancestor levels if needed
3899 loop {
3900 if cursor.goto_previous_sibling() {
3901 let layer_result = cursor.node();
3902
3903 if let Some(previous_result) = &result {
3904 if previous_result.byte_range().end < layer_result.byte_range().end {
3905 continue;
3906 }
3907 }
3908 result = Some(layer_result);
3909 break;
3910 }
3911
3912 // No sibling found at this level, try moving up to parent
3913 if !cursor.goto_parent() {
3914 break;
3915 }
3916 }
3917 }
3918
3919 result
3920 }
3921
3922 /// Find the next sibling syntax node at the given range.
3923 ///
3924 /// This function locates the syntax node that follows the node containing
3925 /// the given range. It searches hierarchically by:
3926 /// 1. Finding the node that contains the given range
3927 /// 2. Looking for the next sibling at the same tree level
3928 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3929 ///
3930 /// Returns `None` if there is no next sibling at any ancestor level.
3931 pub fn syntax_next_sibling<'a, T: ToOffset>(
3932 &'a self,
3933 range: Range<T>,
3934 ) -> Option<tree_sitter::Node<'a>> {
3935 let range = range.start.to_offset(self)..range.end.to_offset(self);
3936 let mut result: Option<tree_sitter::Node<'a>> = None;
3937
3938 for layer in self
3939 .syntax
3940 .layers_for_range(range.clone(), &self.text, true)
3941 {
3942 let mut cursor = layer.node().walk();
3943
3944 // Find the node that contains the range
3945 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3946 continue;
3947 }
3948
3949 // Look for the next sibling, moving up ancestor levels if needed
3950 loop {
3951 if cursor.goto_next_sibling() {
3952 let layer_result = cursor.node();
3953
3954 if let Some(previous_result) = &result {
3955 if previous_result.byte_range().start > layer_result.byte_range().start {
3956 continue;
3957 }
3958 }
3959 result = Some(layer_result);
3960 break;
3961 }
3962
3963 // No sibling found at this level, try moving up to parent
3964 if !cursor.goto_parent() {
3965 break;
3966 }
3967 }
3968 }
3969
3970 result
3971 }
3972
3973 /// Returns the root syntax node within the given row
3974 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3975 let start_offset = position.to_offset(self);
3976
3977 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3978
3979 let layer = self
3980 .syntax
3981 .layers_for_range(start_offset..start_offset, &self.text, true)
3982 .next()?;
3983
3984 let mut cursor = layer.node().walk();
3985
3986 // Descend to the first leaf that touches the start of the range.
3987 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3988 if cursor.node().end_byte() == start_offset {
3989 cursor.goto_next_sibling();
3990 }
3991 }
3992
3993 // Ascend to the root node within the same row.
3994 while cursor.goto_parent() {
3995 if cursor.node().start_position().row != row {
3996 break;
3997 }
3998 }
3999
4000 Some(cursor.node())
4001 }
4002
4003 /// Returns the outline for the buffer.
4004 ///
4005 /// This method allows passing an optional [`SyntaxTheme`] to
4006 /// syntax-highlight the returned symbols.
4007 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4008 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4009 }
4010
4011 /// Returns all the symbols that contain the given position.
4012 ///
4013 /// This method allows passing an optional [`SyntaxTheme`] to
4014 /// syntax-highlight the returned symbols.
4015 pub fn symbols_containing<T: ToOffset>(
4016 &self,
4017 position: T,
4018 theme: Option<&SyntaxTheme>,
4019 ) -> Vec<OutlineItem<Anchor>> {
4020 let position = position.to_offset(self);
4021 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4022 let end = self.clip_offset(position + 1, Bias::Right);
4023 let mut items = self.outline_items_containing(start..end, false, theme);
4024 let mut prev_depth = None;
4025 items.retain(|item| {
4026 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4027 prev_depth = Some(item.depth);
4028 result
4029 });
4030 items
4031 }
4032
4033 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4034 let range = range.to_offset(self);
4035 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4036 grammar.outline_config.as_ref().map(|c| &c.query)
4037 });
4038 let configs = matches
4039 .grammars()
4040 .iter()
4041 .map(|g| g.outline_config.as_ref().unwrap())
4042 .collect::<Vec<_>>();
4043
4044 while let Some(mat) = matches.peek() {
4045 let config = &configs[mat.grammar_index];
4046 let containing_item_node = maybe!({
4047 let item_node = mat.captures.iter().find_map(|cap| {
4048 if cap.index == config.item_capture_ix {
4049 Some(cap.node)
4050 } else {
4051 None
4052 }
4053 })?;
4054
4055 let item_byte_range = item_node.byte_range();
4056 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4057 None
4058 } else {
4059 Some(item_node)
4060 }
4061 });
4062
4063 if let Some(item_node) = containing_item_node {
4064 return Some(
4065 Point::from_ts_point(item_node.start_position())
4066 ..Point::from_ts_point(item_node.end_position()),
4067 );
4068 }
4069
4070 matches.advance();
4071 }
4072 None
4073 }
4074
4075 pub fn outline_items_containing<T: ToOffset>(
4076 &self,
4077 range: Range<T>,
4078 include_extra_context: bool,
4079 theme: Option<&SyntaxTheme>,
4080 ) -> Vec<OutlineItem<Anchor>> {
4081 self.outline_items_containing_internal(
4082 range,
4083 include_extra_context,
4084 theme,
4085 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4086 )
4087 }
4088
4089 pub fn outline_items_as_points_containing<T: ToOffset>(
4090 &self,
4091 range: Range<T>,
4092 include_extra_context: bool,
4093 theme: Option<&SyntaxTheme>,
4094 ) -> Vec<OutlineItem<Point>> {
4095 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4096 range
4097 })
4098 }
4099
4100 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4101 &self,
4102 range: Range<T>,
4103 include_extra_context: bool,
4104 theme: Option<&SyntaxTheme>,
4105 ) -> Vec<OutlineItem<usize>> {
4106 self.outline_items_containing_internal(
4107 range,
4108 include_extra_context,
4109 theme,
4110 |buffer, range| range.to_offset(buffer),
4111 )
4112 }
4113
4114 fn outline_items_containing_internal<T: ToOffset, U>(
4115 &self,
4116 range: Range<T>,
4117 include_extra_context: bool,
4118 theme: Option<&SyntaxTheme>,
4119 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4120 ) -> Vec<OutlineItem<U>> {
4121 let range = range.to_offset(self);
4122 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4123 grammar.outline_config.as_ref().map(|c| &c.query)
4124 });
4125
4126 let mut items = Vec::new();
4127 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4128 while let Some(mat) = matches.peek() {
4129 let config = matches.grammars()[mat.grammar_index]
4130 .outline_config
4131 .as_ref()
4132 .unwrap();
4133 if let Some(item) =
4134 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4135 {
4136 items.push(item);
4137 } else if let Some(capture) = mat
4138 .captures
4139 .iter()
4140 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4141 {
4142 let capture_range = capture.node.start_position()..capture.node.end_position();
4143 let mut capture_row_range =
4144 capture_range.start.row as u32..capture_range.end.row as u32;
4145 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4146 {
4147 capture_row_range.end -= 1;
4148 }
4149 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4150 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4151 last_row_range.end = capture_row_range.end;
4152 } else {
4153 annotation_row_ranges.push(capture_row_range);
4154 }
4155 } else {
4156 annotation_row_ranges.push(capture_row_range);
4157 }
4158 }
4159 matches.advance();
4160 }
4161
4162 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4163
4164 // Assign depths based on containment relationships and convert to anchors.
4165 let mut item_ends_stack = Vec::<Point>::new();
4166 let mut anchor_items = Vec::new();
4167 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4168 for item in items {
4169 while let Some(last_end) = item_ends_stack.last().copied() {
4170 if last_end < item.range.end {
4171 item_ends_stack.pop();
4172 } else {
4173 break;
4174 }
4175 }
4176
4177 let mut annotation_row_range = None;
4178 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4179 let row_preceding_item = item.range.start.row.saturating_sub(1);
4180 if next_annotation_row_range.end < row_preceding_item {
4181 annotation_row_ranges.next();
4182 } else {
4183 if next_annotation_row_range.end == row_preceding_item {
4184 annotation_row_range = Some(next_annotation_row_range.clone());
4185 annotation_row_ranges.next();
4186 }
4187 break;
4188 }
4189 }
4190
4191 anchor_items.push(OutlineItem {
4192 depth: item_ends_stack.len(),
4193 range: range_callback(self, item.range.clone()),
4194 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4195 text: item.text,
4196 highlight_ranges: item.highlight_ranges,
4197 name_ranges: item.name_ranges,
4198 body_range: item.body_range.map(|r| range_callback(self, r)),
4199 annotation_range: annotation_row_range.map(|annotation_range| {
4200 let point_range = Point::new(annotation_range.start, 0)
4201 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4202 range_callback(self, point_range)
4203 }),
4204 });
4205 item_ends_stack.push(item.range.end);
4206 }
4207
4208 anchor_items
4209 }
4210
4211 fn next_outline_item(
4212 &self,
4213 config: &OutlineConfig,
4214 mat: &SyntaxMapMatch,
4215 range: &Range<usize>,
4216 include_extra_context: bool,
4217 theme: Option<&SyntaxTheme>,
4218 ) -> Option<OutlineItem<Point>> {
4219 let item_node = mat.captures.iter().find_map(|cap| {
4220 if cap.index == config.item_capture_ix {
4221 Some(cap.node)
4222 } else {
4223 None
4224 }
4225 })?;
4226
4227 let item_byte_range = item_node.byte_range();
4228 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4229 return None;
4230 }
4231 let item_point_range = Point::from_ts_point(item_node.start_position())
4232 ..Point::from_ts_point(item_node.end_position());
4233
4234 let mut open_point = None;
4235 let mut close_point = None;
4236
4237 let mut buffer_ranges = Vec::new();
4238 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4239 let mut range = node.start_byte()..node.end_byte();
4240 let start = node.start_position();
4241 if node.end_position().row > start.row {
4242 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4243 }
4244
4245 if !range.is_empty() {
4246 buffer_ranges.push((range, node_is_name));
4247 }
4248 };
4249
4250 for capture in mat.captures {
4251 if capture.index == config.name_capture_ix {
4252 add_to_buffer_ranges(capture.node, true);
4253 } else if Some(capture.index) == config.context_capture_ix
4254 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4255 {
4256 add_to_buffer_ranges(capture.node, false);
4257 } else {
4258 if Some(capture.index) == config.open_capture_ix {
4259 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4260 } else if Some(capture.index) == config.close_capture_ix {
4261 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4262 }
4263 }
4264 }
4265
4266 if buffer_ranges.is_empty() {
4267 return None;
4268 }
4269 let source_range_for_text =
4270 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4271
4272 let mut text = String::new();
4273 let mut highlight_ranges = Vec::new();
4274 let mut name_ranges = Vec::new();
4275 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4276 let mut last_buffer_range_end = 0;
4277 for (buffer_range, is_name) in buffer_ranges {
4278 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4279 if space_added {
4280 text.push(' ');
4281 }
4282 let before_append_len = text.len();
4283 let mut offset = buffer_range.start;
4284 chunks.seek(buffer_range.clone());
4285 for mut chunk in chunks.by_ref() {
4286 if chunk.text.len() > buffer_range.end - offset {
4287 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4288 offset = buffer_range.end;
4289 } else {
4290 offset += chunk.text.len();
4291 }
4292 let style = chunk
4293 .syntax_highlight_id
4294 .zip(theme)
4295 .and_then(|(highlight, theme)| highlight.style(theme));
4296 if let Some(style) = style {
4297 let start = text.len();
4298 let end = start + chunk.text.len();
4299 highlight_ranges.push((start..end, style));
4300 }
4301 text.push_str(chunk.text);
4302 if offset >= buffer_range.end {
4303 break;
4304 }
4305 }
4306 if is_name {
4307 let after_append_len = text.len();
4308 let start = if space_added && !name_ranges.is_empty() {
4309 before_append_len - 1
4310 } else {
4311 before_append_len
4312 };
4313 name_ranges.push(start..after_append_len);
4314 }
4315 last_buffer_range_end = buffer_range.end;
4316 }
4317
4318 Some(OutlineItem {
4319 depth: 0, // We'll calculate the depth later
4320 range: item_point_range,
4321 source_range_for_text: source_range_for_text.to_point(self),
4322 text,
4323 highlight_ranges,
4324 name_ranges,
4325 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4326 annotation_range: None,
4327 })
4328 }
4329
4330 pub fn function_body_fold_ranges<T: ToOffset>(
4331 &self,
4332 within: Range<T>,
4333 ) -> impl Iterator<Item = Range<usize>> + '_ {
4334 self.text_object_ranges(within, TreeSitterOptions::default())
4335 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4336 }
4337
4338 /// For each grammar in the language, runs the provided
4339 /// [`tree_sitter::Query`] against the given range.
4340 pub fn matches(
4341 &self,
4342 range: Range<usize>,
4343 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4344 ) -> SyntaxMapMatches<'_> {
4345 self.syntax.matches(range, self, query)
4346 }
4347
4348 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4349 /// Hence, may return more bracket pairs than the range contains.
4350 ///
4351 /// Will omit known chunks.
4352 /// The resulting bracket match collections are not ordered.
4353 pub fn fetch_bracket_ranges(
4354 &self,
4355 range: Range<usize>,
4356 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4357 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4358 let mut all_bracket_matches = HashMap::default();
4359
4360 for chunk in self
4361 .tree_sitter_data
4362 .chunks
4363 .applicable_chunks(&[range.to_point(self)])
4364 {
4365 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4366 continue;
4367 }
4368 let chunk_range = chunk.anchor_range();
4369 let chunk_range = chunk_range.to_offset(&self);
4370
4371 if let Some(cached_brackets) =
4372 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4373 {
4374 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4375 continue;
4376 }
4377
4378 let mut all_brackets = Vec::new();
4379 let mut opens = Vec::new();
4380 let mut color_pairs = Vec::new();
4381
4382 let mut matches = self.syntax.matches_with_options(
4383 chunk_range.clone(),
4384 &self.text,
4385 TreeSitterOptions {
4386 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4387 max_start_depth: None,
4388 },
4389 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4390 );
4391 let configs = matches
4392 .grammars()
4393 .iter()
4394 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4395 .collect::<Vec<_>>();
4396
4397 while let Some(mat) = matches.peek() {
4398 let mut open = None;
4399 let mut close = None;
4400 let syntax_layer_depth = mat.depth;
4401 let config = configs[mat.grammar_index];
4402 let pattern = &config.patterns[mat.pattern_index];
4403 for capture in mat.captures {
4404 if capture.index == config.open_capture_ix {
4405 open = Some(capture.node.byte_range());
4406 } else if capture.index == config.close_capture_ix {
4407 close = Some(capture.node.byte_range());
4408 }
4409 }
4410
4411 matches.advance();
4412
4413 let Some((open_range, close_range)) = open.zip(close) else {
4414 continue;
4415 };
4416
4417 let bracket_range = open_range.start..=close_range.end;
4418 if !bracket_range.overlaps(&chunk_range) {
4419 continue;
4420 }
4421
4422 let index = all_brackets.len();
4423 all_brackets.push(BracketMatch {
4424 open_range: open_range.clone(),
4425 close_range: close_range.clone(),
4426 newline_only: pattern.newline_only,
4427 syntax_layer_depth,
4428 color_index: None,
4429 });
4430
4431 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4432 // bracket will match the entire tag with all text inside.
4433 // For now, avoid highlighting any pair that has more than single char in each bracket.
4434 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4435 let should_color =
4436 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4437 if should_color {
4438 opens.push(open_range.clone());
4439 color_pairs.push((open_range, close_range, index));
4440 }
4441 }
4442
4443 opens.sort_by_key(|r| (r.start, r.end));
4444 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4445 color_pairs.sort_by_key(|(_, close, _)| close.end);
4446
4447 let mut open_stack = Vec::new();
4448 let mut open_index = 0;
4449 for (open, close, index) in color_pairs {
4450 while open_index < opens.len() && opens[open_index].start < close.start {
4451 open_stack.push(opens[open_index].clone());
4452 open_index += 1;
4453 }
4454
4455 if open_stack.last() == Some(&open) {
4456 let depth_index = open_stack.len() - 1;
4457 all_brackets[index].color_index = Some(depth_index);
4458 open_stack.pop();
4459 }
4460 }
4461
4462 all_brackets.sort_by_key(|bracket_match| {
4463 (bracket_match.open_range.start, bracket_match.open_range.end)
4464 });
4465
4466 if let empty_slot @ None =
4467 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4468 {
4469 *empty_slot = Some(all_brackets.clone());
4470 }
4471 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4472 }
4473
4474 all_bracket_matches
4475 }
4476
4477 pub fn all_bracket_ranges(
4478 &self,
4479 range: Range<usize>,
4480 ) -> impl Iterator<Item = BracketMatch<usize>> {
4481 self.fetch_bracket_ranges(range.clone(), None)
4482 .into_values()
4483 .flatten()
4484 .filter(move |bracket_match| {
4485 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4486 bracket_range.overlaps(&range)
4487 })
4488 }
4489
4490 /// Returns bracket range pairs overlapping or adjacent to `range`
4491 pub fn bracket_ranges<T: ToOffset>(
4492 &self,
4493 range: Range<T>,
4494 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4495 // Find bracket pairs that *inclusively* contain the given range.
4496 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4497 self.all_bracket_ranges(range)
4498 .filter(|pair| !pair.newline_only)
4499 }
4500
4501 pub fn debug_variables_query<T: ToOffset>(
4502 &self,
4503 range: Range<T>,
4504 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4505 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4506
4507 let mut matches = self.syntax.matches_with_options(
4508 range.clone(),
4509 &self.text,
4510 TreeSitterOptions::default(),
4511 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4512 );
4513
4514 let configs = matches
4515 .grammars()
4516 .iter()
4517 .map(|grammar| grammar.debug_variables_config.as_ref())
4518 .collect::<Vec<_>>();
4519
4520 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4521
4522 iter::from_fn(move || {
4523 loop {
4524 while let Some(capture) = captures.pop() {
4525 if capture.0.overlaps(&range) {
4526 return Some(capture);
4527 }
4528 }
4529
4530 let mat = matches.peek()?;
4531
4532 let Some(config) = configs[mat.grammar_index].as_ref() else {
4533 matches.advance();
4534 continue;
4535 };
4536
4537 for capture in mat.captures {
4538 let Some(ix) = config
4539 .objects_by_capture_ix
4540 .binary_search_by_key(&capture.index, |e| e.0)
4541 .ok()
4542 else {
4543 continue;
4544 };
4545 let text_object = config.objects_by_capture_ix[ix].1;
4546 let byte_range = capture.node.byte_range();
4547
4548 let mut found = false;
4549 for (range, existing) in captures.iter_mut() {
4550 if existing == &text_object {
4551 range.start = range.start.min(byte_range.start);
4552 range.end = range.end.max(byte_range.end);
4553 found = true;
4554 break;
4555 }
4556 }
4557
4558 if !found {
4559 captures.push((byte_range, text_object));
4560 }
4561 }
4562
4563 matches.advance();
4564 }
4565 })
4566 }
4567
4568 pub fn text_object_ranges<T: ToOffset>(
4569 &self,
4570 range: Range<T>,
4571 options: TreeSitterOptions,
4572 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4573 let range =
4574 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4575
4576 let mut matches =
4577 self.syntax
4578 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4579 grammar.text_object_config.as_ref().map(|c| &c.query)
4580 });
4581
4582 let configs = matches
4583 .grammars()
4584 .iter()
4585 .map(|grammar| grammar.text_object_config.as_ref())
4586 .collect::<Vec<_>>();
4587
4588 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4589
4590 iter::from_fn(move || {
4591 loop {
4592 while let Some(capture) = captures.pop() {
4593 if capture.0.overlaps(&range) {
4594 return Some(capture);
4595 }
4596 }
4597
4598 let mat = matches.peek()?;
4599
4600 let Some(config) = configs[mat.grammar_index].as_ref() else {
4601 matches.advance();
4602 continue;
4603 };
4604
4605 for capture in mat.captures {
4606 let Some(ix) = config
4607 .text_objects_by_capture_ix
4608 .binary_search_by_key(&capture.index, |e| e.0)
4609 .ok()
4610 else {
4611 continue;
4612 };
4613 let text_object = config.text_objects_by_capture_ix[ix].1;
4614 let byte_range = capture.node.byte_range();
4615
4616 let mut found = false;
4617 for (range, existing) in captures.iter_mut() {
4618 if existing == &text_object {
4619 range.start = range.start.min(byte_range.start);
4620 range.end = range.end.max(byte_range.end);
4621 found = true;
4622 break;
4623 }
4624 }
4625
4626 if !found {
4627 captures.push((byte_range, text_object));
4628 }
4629 }
4630
4631 matches.advance();
4632 }
4633 })
4634 }
4635
4636 /// Returns enclosing bracket ranges containing the given range
4637 pub fn enclosing_bracket_ranges<T: ToOffset>(
4638 &self,
4639 range: Range<T>,
4640 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4641 let range = range.start.to_offset(self)..range.end.to_offset(self);
4642
4643 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4644 let max_depth = result
4645 .iter()
4646 .map(|mat| mat.syntax_layer_depth)
4647 .max()
4648 .unwrap_or(0);
4649 result.into_iter().filter(move |pair| {
4650 pair.open_range.start <= range.start
4651 && pair.close_range.end >= range.end
4652 && pair.syntax_layer_depth == max_depth
4653 })
4654 }
4655
4656 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4657 ///
4658 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4659 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4660 &self,
4661 range: Range<T>,
4662 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4663 ) -> Option<(Range<usize>, Range<usize>)> {
4664 let range = range.start.to_offset(self)..range.end.to_offset(self);
4665
4666 // Get the ranges of the innermost pair of brackets.
4667 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4668
4669 for pair in self.enclosing_bracket_ranges(range) {
4670 if let Some(range_filter) = range_filter
4671 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4672 {
4673 continue;
4674 }
4675
4676 let len = pair.close_range.end - pair.open_range.start;
4677
4678 if let Some((existing_open, existing_close)) = &result {
4679 let existing_len = existing_close.end - existing_open.start;
4680 if len > existing_len {
4681 continue;
4682 }
4683 }
4684
4685 result = Some((pair.open_range, pair.close_range));
4686 }
4687
4688 result
4689 }
4690
4691 /// Returns anchor ranges for any matches of the redaction query.
4692 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4693 /// will be run on the relevant section of the buffer.
4694 pub fn redacted_ranges<T: ToOffset>(
4695 &self,
4696 range: Range<T>,
4697 ) -> impl Iterator<Item = Range<usize>> + '_ {
4698 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4699 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4700 grammar
4701 .redactions_config
4702 .as_ref()
4703 .map(|config| &config.query)
4704 });
4705
4706 let configs = syntax_matches
4707 .grammars()
4708 .iter()
4709 .map(|grammar| grammar.redactions_config.as_ref())
4710 .collect::<Vec<_>>();
4711
4712 iter::from_fn(move || {
4713 let redacted_range = syntax_matches
4714 .peek()
4715 .and_then(|mat| {
4716 configs[mat.grammar_index].and_then(|config| {
4717 mat.captures
4718 .iter()
4719 .find(|capture| capture.index == config.redaction_capture_ix)
4720 })
4721 })
4722 .map(|mat| mat.node.byte_range());
4723 syntax_matches.advance();
4724 redacted_range
4725 })
4726 }
4727
4728 pub fn injections_intersecting_range<T: ToOffset>(
4729 &self,
4730 range: Range<T>,
4731 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4732 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4733
4734 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4735 grammar
4736 .injection_config
4737 .as_ref()
4738 .map(|config| &config.query)
4739 });
4740
4741 let configs = syntax_matches
4742 .grammars()
4743 .iter()
4744 .map(|grammar| grammar.injection_config.as_ref())
4745 .collect::<Vec<_>>();
4746
4747 iter::from_fn(move || {
4748 let ranges = syntax_matches.peek().and_then(|mat| {
4749 let config = &configs[mat.grammar_index]?;
4750 let content_capture_range = mat.captures.iter().find_map(|capture| {
4751 if capture.index == config.content_capture_ix {
4752 Some(capture.node.byte_range())
4753 } else {
4754 None
4755 }
4756 })?;
4757 let language = self.language_at(content_capture_range.start)?;
4758 Some((content_capture_range, language))
4759 });
4760 syntax_matches.advance();
4761 ranges
4762 })
4763 }
4764
4765 pub fn runnable_ranges(
4766 &self,
4767 offset_range: Range<usize>,
4768 ) -> impl Iterator<Item = RunnableRange> + '_ {
4769 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4770 grammar.runnable_config.as_ref().map(|config| &config.query)
4771 });
4772
4773 let test_configs = syntax_matches
4774 .grammars()
4775 .iter()
4776 .map(|grammar| grammar.runnable_config.as_ref())
4777 .collect::<Vec<_>>();
4778
4779 iter::from_fn(move || {
4780 loop {
4781 let mat = syntax_matches.peek()?;
4782
4783 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4784 let mut run_range = None;
4785 let full_range = mat.captures.iter().fold(
4786 Range {
4787 start: usize::MAX,
4788 end: 0,
4789 },
4790 |mut acc, next| {
4791 let byte_range = next.node.byte_range();
4792 if acc.start > byte_range.start {
4793 acc.start = byte_range.start;
4794 }
4795 if acc.end < byte_range.end {
4796 acc.end = byte_range.end;
4797 }
4798 acc
4799 },
4800 );
4801 if full_range.start > full_range.end {
4802 // We did not find a full spanning range of this match.
4803 return None;
4804 }
4805 let extra_captures: SmallVec<[_; 1]> =
4806 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4807 test_configs
4808 .extra_captures
4809 .get(capture.index as usize)
4810 .cloned()
4811 .and_then(|tag_name| match tag_name {
4812 RunnableCapture::Named(name) => {
4813 Some((capture.node.byte_range(), name))
4814 }
4815 RunnableCapture::Run => {
4816 let _ = run_range.insert(capture.node.byte_range());
4817 None
4818 }
4819 })
4820 }));
4821 let run_range = run_range?;
4822 let tags = test_configs
4823 .query
4824 .property_settings(mat.pattern_index)
4825 .iter()
4826 .filter_map(|property| {
4827 if *property.key == *"tag" {
4828 property
4829 .value
4830 .as_ref()
4831 .map(|value| RunnableTag(value.to_string().into()))
4832 } else {
4833 None
4834 }
4835 })
4836 .collect();
4837 let extra_captures = extra_captures
4838 .into_iter()
4839 .map(|(range, name)| {
4840 (
4841 name.to_string(),
4842 self.text_for_range(range).collect::<String>(),
4843 )
4844 })
4845 .collect();
4846 // All tags should have the same range.
4847 Some(RunnableRange {
4848 run_range,
4849 full_range,
4850 runnable: Runnable {
4851 tags,
4852 language: mat.language,
4853 buffer: self.remote_id(),
4854 },
4855 extra_captures,
4856 buffer_id: self.remote_id(),
4857 })
4858 });
4859
4860 syntax_matches.advance();
4861 if test_range.is_some() {
4862 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4863 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4864 return test_range;
4865 }
4866 }
4867 })
4868 }
4869
4870 /// Returns selections for remote peers intersecting the given range.
4871 #[allow(clippy::type_complexity)]
4872 pub fn selections_in_range(
4873 &self,
4874 range: Range<Anchor>,
4875 include_local: bool,
4876 ) -> impl Iterator<
4877 Item = (
4878 ReplicaId,
4879 bool,
4880 CursorShape,
4881 impl Iterator<Item = &Selection<Anchor>> + '_,
4882 ),
4883 > + '_ {
4884 self.remote_selections
4885 .iter()
4886 .filter(move |(replica_id, set)| {
4887 (include_local || **replica_id != self.text.replica_id())
4888 && !set.selections.is_empty()
4889 })
4890 .map(move |(replica_id, set)| {
4891 let start_ix = match set.selections.binary_search_by(|probe| {
4892 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4893 }) {
4894 Ok(ix) | Err(ix) => ix,
4895 };
4896 let end_ix = match set.selections.binary_search_by(|probe| {
4897 probe.start.cmp(&range.end, self).then(Ordering::Less)
4898 }) {
4899 Ok(ix) | Err(ix) => ix,
4900 };
4901
4902 (
4903 *replica_id,
4904 set.line_mode,
4905 set.cursor_shape,
4906 set.selections[start_ix..end_ix].iter(),
4907 )
4908 })
4909 }
4910
4911 /// Returns if the buffer contains any diagnostics.
4912 pub fn has_diagnostics(&self) -> bool {
4913 !self.diagnostics.is_empty()
4914 }
4915
4916 /// Returns all the diagnostics intersecting the given range.
4917 pub fn diagnostics_in_range<'a, T, O>(
4918 &'a self,
4919 search_range: Range<T>,
4920 reversed: bool,
4921 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4922 where
4923 T: 'a + Clone + ToOffset,
4924 O: 'a + FromAnchor,
4925 {
4926 let mut iterators: Vec<_> = self
4927 .diagnostics
4928 .iter()
4929 .map(|(_, collection)| {
4930 collection
4931 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4932 .peekable()
4933 })
4934 .collect();
4935
4936 std::iter::from_fn(move || {
4937 let (next_ix, _) = iterators
4938 .iter_mut()
4939 .enumerate()
4940 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4941 .min_by(|(_, a), (_, b)| {
4942 let cmp = a
4943 .range
4944 .start
4945 .cmp(&b.range.start, self)
4946 // when range is equal, sort by diagnostic severity
4947 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4948 // and stabilize order with group_id
4949 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4950 if reversed { cmp.reverse() } else { cmp }
4951 })?;
4952 iterators[next_ix]
4953 .next()
4954 .map(
4955 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4956 diagnostic,
4957 range: FromAnchor::from_anchor(&range.start, self)
4958 ..FromAnchor::from_anchor(&range.end, self),
4959 },
4960 )
4961 })
4962 }
4963
4964 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4965 /// should be used instead.
4966 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4967 &self.diagnostics
4968 }
4969
4970 /// Returns all the diagnostic groups associated with the given
4971 /// language server ID. If no language server ID is provided,
4972 /// all diagnostics groups are returned.
4973 pub fn diagnostic_groups(
4974 &self,
4975 language_server_id: Option<LanguageServerId>,
4976 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4977 let mut groups = Vec::new();
4978
4979 if let Some(language_server_id) = language_server_id {
4980 if let Ok(ix) = self
4981 .diagnostics
4982 .binary_search_by_key(&language_server_id, |e| e.0)
4983 {
4984 self.diagnostics[ix]
4985 .1
4986 .groups(language_server_id, &mut groups, self);
4987 }
4988 } else {
4989 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4990 diagnostics.groups(*language_server_id, &mut groups, self);
4991 }
4992 }
4993
4994 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4995 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4996 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4997 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4998 });
4999
5000 groups
5001 }
5002
5003 /// Returns an iterator over the diagnostics for the given group.
5004 pub fn diagnostic_group<O>(
5005 &self,
5006 group_id: usize,
5007 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5008 where
5009 O: FromAnchor + 'static,
5010 {
5011 self.diagnostics
5012 .iter()
5013 .flat_map(move |(_, set)| set.group(group_id, self))
5014 }
5015
5016 /// An integer version number that accounts for all updates besides
5017 /// the buffer's text itself (which is versioned via a version vector).
5018 pub fn non_text_state_update_count(&self) -> usize {
5019 self.non_text_state_update_count
5020 }
5021
5022 /// An integer version that changes when the buffer's syntax changes.
5023 pub fn syntax_update_count(&self) -> usize {
5024 self.syntax.update_count()
5025 }
5026
5027 /// Returns a snapshot of underlying file.
5028 pub fn file(&self) -> Option<&Arc<dyn File>> {
5029 self.file.as_ref()
5030 }
5031
5032 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5033 if let Some(file) = self.file() {
5034 if file.path().file_name().is_none() || include_root {
5035 Some(file.full_path(cx).to_string_lossy().into_owned())
5036 } else {
5037 Some(file.path().display(file.path_style(cx)).to_string())
5038 }
5039 } else {
5040 None
5041 }
5042 }
5043
5044 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5045 let query_str = query.fuzzy_contents;
5046 if query_str.is_some_and(|query| query.is_empty()) {
5047 return BTreeMap::default();
5048 }
5049
5050 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5051 language,
5052 override_id: None,
5053 }));
5054
5055 let mut query_ix = 0;
5056 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5057 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5058
5059 let mut words = BTreeMap::default();
5060 let mut current_word_start_ix = None;
5061 let mut chunk_ix = query.range.start;
5062 for chunk in self.chunks(query.range, false) {
5063 for (i, c) in chunk.text.char_indices() {
5064 let ix = chunk_ix + i;
5065 if classifier.is_word(c) {
5066 if current_word_start_ix.is_none() {
5067 current_word_start_ix = Some(ix);
5068 }
5069
5070 if let Some(query_chars) = &query_chars
5071 && query_ix < query_len
5072 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5073 {
5074 query_ix += 1;
5075 }
5076 continue;
5077 } else if let Some(word_start) = current_word_start_ix.take()
5078 && query_ix == query_len
5079 {
5080 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5081 let mut word_text = self.text_for_range(word_start..ix).peekable();
5082 let first_char = word_text
5083 .peek()
5084 .and_then(|first_chunk| first_chunk.chars().next());
5085 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5086 if !query.skip_digits
5087 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5088 {
5089 words.insert(word_text.collect(), word_range);
5090 }
5091 }
5092 query_ix = 0;
5093 }
5094 chunk_ix += chunk.text.len();
5095 }
5096
5097 words
5098 }
5099}
5100
5101pub struct WordsQuery<'a> {
5102 /// Only returns words with all chars from the fuzzy string in them.
5103 pub fuzzy_contents: Option<&'a str>,
5104 /// Skips words that start with a digit.
5105 pub skip_digits: bool,
5106 /// Buffer offset range, to look for words.
5107 pub range: Range<usize>,
5108}
5109
5110fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5111 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5112}
5113
5114fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5115 let mut result = IndentSize::spaces(0);
5116 for c in text {
5117 let kind = match c {
5118 ' ' => IndentKind::Space,
5119 '\t' => IndentKind::Tab,
5120 _ => break,
5121 };
5122 if result.len == 0 {
5123 result.kind = kind;
5124 }
5125 result.len += 1;
5126 }
5127 result
5128}
5129
5130impl Clone for BufferSnapshot {
5131 fn clone(&self) -> Self {
5132 Self {
5133 text: self.text.clone(),
5134 syntax: self.syntax.clone(),
5135 file: self.file.clone(),
5136 remote_selections: self.remote_selections.clone(),
5137 diagnostics: self.diagnostics.clone(),
5138 language: self.language.clone(),
5139 tree_sitter_data: self.tree_sitter_data.clone(),
5140 non_text_state_update_count: self.non_text_state_update_count,
5141 }
5142 }
5143}
5144
5145impl Deref for BufferSnapshot {
5146 type Target = text::BufferSnapshot;
5147
5148 fn deref(&self) -> &Self::Target {
5149 &self.text
5150 }
5151}
5152
5153unsafe impl Send for BufferChunks<'_> {}
5154
5155impl<'a> BufferChunks<'a> {
5156 pub(crate) fn new(
5157 text: &'a Rope,
5158 range: Range<usize>,
5159 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5160 diagnostics: bool,
5161 buffer_snapshot: Option<&'a BufferSnapshot>,
5162 ) -> Self {
5163 let mut highlights = None;
5164 if let Some((captures, highlight_maps)) = syntax {
5165 highlights = Some(BufferChunkHighlights {
5166 captures,
5167 next_capture: None,
5168 stack: Default::default(),
5169 highlight_maps,
5170 })
5171 }
5172
5173 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5174 let chunks = text.chunks_in_range(range.clone());
5175
5176 let mut this = BufferChunks {
5177 range,
5178 buffer_snapshot,
5179 chunks,
5180 diagnostic_endpoints,
5181 error_depth: 0,
5182 warning_depth: 0,
5183 information_depth: 0,
5184 hint_depth: 0,
5185 unnecessary_depth: 0,
5186 underline: true,
5187 highlights,
5188 };
5189 this.initialize_diagnostic_endpoints();
5190 this
5191 }
5192
5193 /// Seeks to the given byte offset in the buffer.
5194 pub fn seek(&mut self, range: Range<usize>) {
5195 let old_range = std::mem::replace(&mut self.range, range.clone());
5196 self.chunks.set_range(self.range.clone());
5197 if let Some(highlights) = self.highlights.as_mut() {
5198 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5199 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5200 highlights
5201 .stack
5202 .retain(|(end_offset, _)| *end_offset > range.start);
5203 if let Some(capture) = &highlights.next_capture
5204 && range.start >= capture.node.start_byte()
5205 {
5206 let next_capture_end = capture.node.end_byte();
5207 if range.start < next_capture_end {
5208 highlights.stack.push((
5209 next_capture_end,
5210 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5211 ));
5212 }
5213 highlights.next_capture.take();
5214 }
5215 } else if let Some(snapshot) = self.buffer_snapshot {
5216 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5217 *highlights = BufferChunkHighlights {
5218 captures,
5219 next_capture: None,
5220 stack: Default::default(),
5221 highlight_maps,
5222 };
5223 } else {
5224 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5225 // Seeking such BufferChunks is not supported.
5226 debug_assert!(
5227 false,
5228 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5229 );
5230 }
5231
5232 highlights.captures.set_byte_range(self.range.clone());
5233 self.initialize_diagnostic_endpoints();
5234 }
5235 }
5236
5237 fn initialize_diagnostic_endpoints(&mut self) {
5238 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5239 && let Some(buffer) = self.buffer_snapshot
5240 {
5241 let mut diagnostic_endpoints = Vec::new();
5242 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5243 diagnostic_endpoints.push(DiagnosticEndpoint {
5244 offset: entry.range.start,
5245 is_start: true,
5246 severity: entry.diagnostic.severity,
5247 is_unnecessary: entry.diagnostic.is_unnecessary,
5248 underline: entry.diagnostic.underline,
5249 });
5250 diagnostic_endpoints.push(DiagnosticEndpoint {
5251 offset: entry.range.end,
5252 is_start: false,
5253 severity: entry.diagnostic.severity,
5254 is_unnecessary: entry.diagnostic.is_unnecessary,
5255 underline: entry.diagnostic.underline,
5256 });
5257 }
5258 diagnostic_endpoints
5259 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5260 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5261 self.hint_depth = 0;
5262 self.error_depth = 0;
5263 self.warning_depth = 0;
5264 self.information_depth = 0;
5265 }
5266 }
5267
5268 /// The current byte offset in the buffer.
5269 pub fn offset(&self) -> usize {
5270 self.range.start
5271 }
5272
5273 pub fn range(&self) -> Range<usize> {
5274 self.range.clone()
5275 }
5276
5277 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5278 let depth = match endpoint.severity {
5279 DiagnosticSeverity::ERROR => &mut self.error_depth,
5280 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5281 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5282 DiagnosticSeverity::HINT => &mut self.hint_depth,
5283 _ => return,
5284 };
5285 if endpoint.is_start {
5286 *depth += 1;
5287 } else {
5288 *depth -= 1;
5289 }
5290
5291 if endpoint.is_unnecessary {
5292 if endpoint.is_start {
5293 self.unnecessary_depth += 1;
5294 } else {
5295 self.unnecessary_depth -= 1;
5296 }
5297 }
5298 }
5299
5300 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5301 if self.error_depth > 0 {
5302 Some(DiagnosticSeverity::ERROR)
5303 } else if self.warning_depth > 0 {
5304 Some(DiagnosticSeverity::WARNING)
5305 } else if self.information_depth > 0 {
5306 Some(DiagnosticSeverity::INFORMATION)
5307 } else if self.hint_depth > 0 {
5308 Some(DiagnosticSeverity::HINT)
5309 } else {
5310 None
5311 }
5312 }
5313
5314 fn current_code_is_unnecessary(&self) -> bool {
5315 self.unnecessary_depth > 0
5316 }
5317}
5318
5319impl<'a> Iterator for BufferChunks<'a> {
5320 type Item = Chunk<'a>;
5321
5322 fn next(&mut self) -> Option<Self::Item> {
5323 let mut next_capture_start = usize::MAX;
5324 let mut next_diagnostic_endpoint = usize::MAX;
5325
5326 if let Some(highlights) = self.highlights.as_mut() {
5327 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5328 if *parent_capture_end <= self.range.start {
5329 highlights.stack.pop();
5330 } else {
5331 break;
5332 }
5333 }
5334
5335 if highlights.next_capture.is_none() {
5336 highlights.next_capture = highlights.captures.next();
5337 }
5338
5339 while let Some(capture) = highlights.next_capture.as_ref() {
5340 if self.range.start < capture.node.start_byte() {
5341 next_capture_start = capture.node.start_byte();
5342 break;
5343 } else {
5344 let highlight_id =
5345 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5346 highlights
5347 .stack
5348 .push((capture.node.end_byte(), highlight_id));
5349 highlights.next_capture = highlights.captures.next();
5350 }
5351 }
5352 }
5353
5354 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5355 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5356 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5357 if endpoint.offset <= self.range.start {
5358 self.update_diagnostic_depths(endpoint);
5359 diagnostic_endpoints.next();
5360 self.underline = endpoint.underline;
5361 } else {
5362 next_diagnostic_endpoint = endpoint.offset;
5363 break;
5364 }
5365 }
5366 }
5367 self.diagnostic_endpoints = diagnostic_endpoints;
5368
5369 if let Some(ChunkBitmaps {
5370 text: chunk,
5371 chars: chars_map,
5372 tabs,
5373 }) = self.chunks.peek_with_bitmaps()
5374 {
5375 let chunk_start = self.range.start;
5376 let mut chunk_end = (self.chunks.offset() + chunk.len())
5377 .min(next_capture_start)
5378 .min(next_diagnostic_endpoint);
5379 let mut highlight_id = None;
5380 if let Some(highlights) = self.highlights.as_ref()
5381 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5382 {
5383 chunk_end = chunk_end.min(*parent_capture_end);
5384 highlight_id = Some(*parent_highlight_id);
5385 }
5386 let bit_start = chunk_start - self.chunks.offset();
5387 let bit_end = chunk_end - self.chunks.offset();
5388
5389 let slice = &chunk[bit_start..bit_end];
5390
5391 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5392 let tabs = (tabs >> bit_start) & mask;
5393 let chars = (chars_map >> bit_start) & mask;
5394
5395 self.range.start = chunk_end;
5396 if self.range.start == self.chunks.offset() + chunk.len() {
5397 self.chunks.next().unwrap();
5398 }
5399
5400 Some(Chunk {
5401 text: slice,
5402 syntax_highlight_id: highlight_id,
5403 underline: self.underline,
5404 diagnostic_severity: self.current_diagnostic_severity(),
5405 is_unnecessary: self.current_code_is_unnecessary(),
5406 tabs,
5407 chars,
5408 ..Chunk::default()
5409 })
5410 } else {
5411 None
5412 }
5413 }
5414}
5415
5416impl operation_queue::Operation for Operation {
5417 fn lamport_timestamp(&self) -> clock::Lamport {
5418 match self {
5419 Operation::Buffer(_) => {
5420 unreachable!("buffer operations should never be deferred at this layer")
5421 }
5422 Operation::UpdateDiagnostics {
5423 lamport_timestamp, ..
5424 }
5425 | Operation::UpdateSelections {
5426 lamport_timestamp, ..
5427 }
5428 | Operation::UpdateCompletionTriggers {
5429 lamport_timestamp, ..
5430 }
5431 | Operation::UpdateLineEnding {
5432 lamport_timestamp, ..
5433 } => *lamport_timestamp,
5434 }
5435 }
5436}
5437
5438impl Default for Diagnostic {
5439 fn default() -> Self {
5440 Self {
5441 source: Default::default(),
5442 source_kind: DiagnosticSourceKind::Other,
5443 code: None,
5444 code_description: None,
5445 severity: DiagnosticSeverity::ERROR,
5446 message: Default::default(),
5447 markdown: None,
5448 group_id: 0,
5449 is_primary: false,
5450 is_disk_based: false,
5451 is_unnecessary: false,
5452 underline: true,
5453 data: None,
5454 registration_id: None,
5455 }
5456 }
5457}
5458
5459impl IndentSize {
5460 /// Returns an [`IndentSize`] representing the given spaces.
5461 pub fn spaces(len: u32) -> Self {
5462 Self {
5463 len,
5464 kind: IndentKind::Space,
5465 }
5466 }
5467
5468 /// Returns an [`IndentSize`] representing a tab.
5469 pub fn tab() -> Self {
5470 Self {
5471 len: 1,
5472 kind: IndentKind::Tab,
5473 }
5474 }
5475
5476 /// An iterator over the characters represented by this [`IndentSize`].
5477 pub fn chars(&self) -> impl Iterator<Item = char> {
5478 iter::repeat(self.char()).take(self.len as usize)
5479 }
5480
5481 /// The character representation of this [`IndentSize`].
5482 pub fn char(&self) -> char {
5483 match self.kind {
5484 IndentKind::Space => ' ',
5485 IndentKind::Tab => '\t',
5486 }
5487 }
5488
5489 /// Consumes the current [`IndentSize`] and returns a new one that has
5490 /// been shrunk or enlarged by the given size along the given direction.
5491 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5492 match direction {
5493 Ordering::Less => {
5494 if self.kind == size.kind && self.len >= size.len {
5495 self.len -= size.len;
5496 }
5497 }
5498 Ordering::Equal => {}
5499 Ordering::Greater => {
5500 if self.len == 0 {
5501 self = size;
5502 } else if self.kind == size.kind {
5503 self.len += size.len;
5504 }
5505 }
5506 }
5507 self
5508 }
5509
5510 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5511 match self.kind {
5512 IndentKind::Space => self.len as usize,
5513 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5514 }
5515 }
5516}
5517
5518#[cfg(any(test, feature = "test-support"))]
5519pub struct TestFile {
5520 pub path: Arc<RelPath>,
5521 pub root_name: String,
5522 pub local_root: Option<PathBuf>,
5523}
5524
5525#[cfg(any(test, feature = "test-support"))]
5526impl File for TestFile {
5527 fn path(&self) -> &Arc<RelPath> {
5528 &self.path
5529 }
5530
5531 fn full_path(&self, _: &gpui::App) -> PathBuf {
5532 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5533 }
5534
5535 fn as_local(&self) -> Option<&dyn LocalFile> {
5536 if self.local_root.is_some() {
5537 Some(self)
5538 } else {
5539 None
5540 }
5541 }
5542
5543 fn disk_state(&self) -> DiskState {
5544 unimplemented!()
5545 }
5546
5547 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5548 self.path().file_name().unwrap_or(self.root_name.as_ref())
5549 }
5550
5551 fn worktree_id(&self, _: &App) -> WorktreeId {
5552 WorktreeId::from_usize(0)
5553 }
5554
5555 fn to_proto(&self, _: &App) -> rpc::proto::File {
5556 unimplemented!()
5557 }
5558
5559 fn is_private(&self) -> bool {
5560 false
5561 }
5562
5563 fn path_style(&self, _cx: &App) -> PathStyle {
5564 PathStyle::local()
5565 }
5566}
5567
5568#[cfg(any(test, feature = "test-support"))]
5569impl LocalFile for TestFile {
5570 fn abs_path(&self, _cx: &App) -> PathBuf {
5571 PathBuf::from(self.local_root.as_ref().unwrap())
5572 .join(&self.root_name)
5573 .join(self.path.as_std_path())
5574 }
5575
5576 fn load(&self, _cx: &App) -> Task<Result<String>> {
5577 unimplemented!()
5578 }
5579
5580 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5581 unimplemented!()
5582 }
5583}
5584
5585pub(crate) fn contiguous_ranges(
5586 values: impl Iterator<Item = u32>,
5587 max_len: usize,
5588) -> impl Iterator<Item = Range<u32>> {
5589 let mut values = values;
5590 let mut current_range: Option<Range<u32>> = None;
5591 std::iter::from_fn(move || {
5592 loop {
5593 if let Some(value) = values.next() {
5594 if let Some(range) = &mut current_range
5595 && value == range.end
5596 && range.len() < max_len
5597 {
5598 range.end += 1;
5599 continue;
5600 }
5601
5602 let prev_range = current_range.clone();
5603 current_range = Some(value..(value + 1));
5604 if prev_range.is_some() {
5605 return prev_range;
5606 }
5607 } else {
5608 return current_range.take();
5609 }
5610 }
5611 })
5612}
5613
5614#[derive(Default, Debug)]
5615pub struct CharClassifier {
5616 scope: Option<LanguageScope>,
5617 scope_context: Option<CharScopeContext>,
5618 ignore_punctuation: bool,
5619}
5620
5621impl CharClassifier {
5622 pub fn new(scope: Option<LanguageScope>) -> Self {
5623 Self {
5624 scope,
5625 scope_context: None,
5626 ignore_punctuation: false,
5627 }
5628 }
5629
5630 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5631 Self {
5632 scope_context,
5633 ..self
5634 }
5635 }
5636
5637 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5638 Self {
5639 ignore_punctuation,
5640 ..self
5641 }
5642 }
5643
5644 pub fn is_whitespace(&self, c: char) -> bool {
5645 self.kind(c) == CharKind::Whitespace
5646 }
5647
5648 pub fn is_word(&self, c: char) -> bool {
5649 self.kind(c) == CharKind::Word
5650 }
5651
5652 pub fn is_punctuation(&self, c: char) -> bool {
5653 self.kind(c) == CharKind::Punctuation
5654 }
5655
5656 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5657 if c.is_alphanumeric() || c == '_' {
5658 return CharKind::Word;
5659 }
5660
5661 if let Some(scope) = &self.scope {
5662 let characters = match self.scope_context {
5663 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5664 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5665 None => scope.word_characters(),
5666 };
5667 if let Some(characters) = characters
5668 && characters.contains(&c)
5669 {
5670 return CharKind::Word;
5671 }
5672 }
5673
5674 if c.is_whitespace() {
5675 return CharKind::Whitespace;
5676 }
5677
5678 if ignore_punctuation {
5679 CharKind::Word
5680 } else {
5681 CharKind::Punctuation
5682 }
5683 }
5684
5685 pub fn kind(&self, c: char) -> CharKind {
5686 self.kind_with(c, self.ignore_punctuation)
5687 }
5688}
5689
5690/// Find all of the ranges of whitespace that occur at the ends of lines
5691/// in the given rope.
5692///
5693/// This could also be done with a regex search, but this implementation
5694/// avoids copying text.
5695pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5696 let mut ranges = Vec::new();
5697
5698 let mut offset = 0;
5699 let mut prev_chunk_trailing_whitespace_range = 0..0;
5700 for chunk in rope.chunks() {
5701 let mut prev_line_trailing_whitespace_range = 0..0;
5702 for (i, line) in chunk.split('\n').enumerate() {
5703 let line_end_offset = offset + line.len();
5704 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5705 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5706
5707 if i == 0 && trimmed_line_len == 0 {
5708 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5709 }
5710 if !prev_line_trailing_whitespace_range.is_empty() {
5711 ranges.push(prev_line_trailing_whitespace_range);
5712 }
5713
5714 offset = line_end_offset + 1;
5715 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5716 }
5717
5718 offset -= 1;
5719 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5720 }
5721
5722 if !prev_chunk_trailing_whitespace_range.is_empty() {
5723 ranges.push(prev_chunk_trailing_whitespace_range);
5724 }
5725
5726 ranges
5727}