1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TaskLabel, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<TreeSitterData>,
135 encoding: &'static Encoding,
136 has_bom: bool,
137}
138
139#[derive(Debug)]
140pub struct TreeSitterData {
141 chunks: RowChunks,
142 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
143}
144
145const MAX_ROWS_IN_A_CHUNK: u32 = 50;
146
147impl TreeSitterData {
148 fn clear(&mut self, snapshot: text::BufferSnapshot) {
149 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 self.brackets_by_chunks.get_mut().clear();
151 self.brackets_by_chunks
152 .get_mut()
153 .resize(self.chunks.len(), None);
154 }
155
156 fn new(snapshot: text::BufferSnapshot) -> Self {
157 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
158 Self {
159 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
160 chunks,
161 }
162 }
163
164 fn version(&self) -> &clock::Global {
165 self.chunks.version()
166 }
167}
168
169#[derive(Copy, Clone, Debug, PartialEq, Eq)]
170pub enum ParseStatus {
171 Idle,
172 Parsing,
173}
174
175struct BufferBranchState {
176 base_buffer: Entity<Buffer>,
177 merged_operations: Vec<Lamport>,
178}
179
180/// An immutable, cheaply cloneable representation of a fixed
181/// state of a buffer.
182pub struct BufferSnapshot {
183 pub text: text::BufferSnapshot,
184 pub syntax: SyntaxSnapshot,
185 file: Option<Arc<dyn File>>,
186 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
187 remote_selections: TreeMap<ReplicaId, SelectionSet>,
188 language: Option<Arc<Language>>,
189 non_text_state_update_count: usize,
190 tree_sitter_data: Arc<TreeSitterData>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// A diagnostic associated with a certain range of a buffer.
247#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
248pub struct Diagnostic {
249 /// The name of the service that produced this diagnostic.
250 pub source: Option<String>,
251 /// The ID provided by the dynamic registration that produced this diagnostic.
252 pub registration_id: Option<SharedString>,
253 /// A machine-readable code that identifies this diagnostic.
254 pub code: Option<NumberOrString>,
255 pub code_description: Option<lsp::Uri>,
256 /// Whether this diagnostic is a hint, warning, or error.
257 pub severity: DiagnosticSeverity,
258 /// The human-readable message associated with this diagnostic.
259 pub message: String,
260 /// The human-readable message (in markdown format)
261 pub markdown: Option<String>,
262 /// An id that identifies the group to which this diagnostic belongs.
263 ///
264 /// When a language server produces a diagnostic with
265 /// one or more associated diagnostics, those diagnostics are all
266 /// assigned a single group ID.
267 pub group_id: usize,
268 /// Whether this diagnostic is the primary diagnostic for its group.
269 ///
270 /// In a given group, the primary diagnostic is the top-level diagnostic
271 /// returned by the language server. The non-primary diagnostics are the
272 /// associated diagnostics.
273 pub is_primary: bool,
274 /// Whether this diagnostic is considered to originate from an analysis of
275 /// files on disk, as opposed to any unsaved buffer contents. This is a
276 /// property of a given diagnostic source, and is configured for a given
277 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
278 /// for the language server.
279 pub is_disk_based: bool,
280 /// Whether this diagnostic marks unnecessary code.
281 pub is_unnecessary: bool,
282 /// Quick separation of diagnostics groups based by their source.
283 pub source_kind: DiagnosticSourceKind,
284 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
285 pub data: Option<Value>,
286 /// Whether to underline the corresponding text range in the editor.
287 pub underline: bool,
288}
289
290#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
291pub enum DiagnosticSourceKind {
292 Pulled,
293 Pushed,
294 Other,
295}
296
297/// An operation used to synchronize this buffer with its other replicas.
298#[derive(Clone, Debug, PartialEq)]
299pub enum Operation {
300 /// A text operation.
301 Buffer(text::Operation),
302
303 /// An update to the buffer's diagnostics.
304 UpdateDiagnostics {
305 /// The id of the language server that produced the new diagnostics.
306 server_id: LanguageServerId,
307 /// The diagnostics.
308 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
309 /// The buffer's lamport timestamp.
310 lamport_timestamp: clock::Lamport,
311 },
312
313 /// An update to the most recent selections in this buffer.
314 UpdateSelections {
315 /// The selections.
316 selections: Arc<[Selection<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// Whether the selections are in 'line mode'.
320 line_mode: bool,
321 /// The [`CursorShape`] associated with these selections.
322 cursor_shape: CursorShape,
323 },
324
325 /// An update to the characters that should trigger autocompletion
326 /// for this buffer.
327 UpdateCompletionTriggers {
328 /// The characters that trigger autocompletion.
329 triggers: Vec<String>,
330 /// The buffer's lamport timestamp.
331 lamport_timestamp: clock::Lamport,
332 /// The language server ID.
333 server_id: LanguageServerId,
334 },
335
336 /// An update to the line ending type of this buffer.
337 UpdateLineEnding {
338 /// The line ending type.
339 line_ending: LineEnding,
340 /// The buffer's lamport timestamp.
341 lamport_timestamp: clock::Lamport,
342 },
343}
344
345/// An event that occurs in a buffer.
346#[derive(Clone, Debug, PartialEq)]
347pub enum BufferEvent {
348 /// The buffer was changed in a way that must be
349 /// propagated to its other replicas.
350 Operation {
351 operation: Operation,
352 is_local: bool,
353 },
354 /// The buffer was edited.
355 Edited,
356 /// The buffer's `dirty` bit changed.
357 DirtyChanged,
358 /// The buffer was saved.
359 Saved,
360 /// The buffer's file was changed on disk.
361 FileHandleChanged,
362 /// The buffer was reloaded.
363 Reloaded,
364 /// The buffer is in need of a reload
365 ReloadNeeded,
366 /// The buffer's language was changed.
367 /// The boolean indicates whether this buffer did not have a language before, but does now.
368 LanguageChanged(bool),
369 /// The buffer's syntax trees were updated.
370 Reparsed,
371 /// The buffer's diagnostics were updated.
372 DiagnosticsUpdated,
373 /// The buffer gained or lost editing capabilities.
374 CapabilityChanged,
375}
376
377/// The file associated with a buffer.
378pub trait File: Send + Sync + Any {
379 /// Returns the [`LocalFile`] associated with this file, if the
380 /// file is local.
381 fn as_local(&self) -> Option<&dyn LocalFile>;
382
383 /// Returns whether this file is local.
384 fn is_local(&self) -> bool {
385 self.as_local().is_some()
386 }
387
388 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
389 /// only available in some states, such as modification time.
390 fn disk_state(&self) -> DiskState;
391
392 /// Returns the path of this file relative to the worktree's root directory.
393 fn path(&self) -> &Arc<RelPath>;
394
395 /// Returns the path of this file relative to the worktree's parent directory (this means it
396 /// includes the name of the worktree's root folder).
397 fn full_path(&self, cx: &App) -> PathBuf;
398
399 /// Returns the path style of this file.
400 fn path_style(&self, cx: &App) -> PathStyle;
401
402 /// Returns the last component of this handle's absolute path. If this handle refers to the root
403 /// of its worktree, then this method will return the name of the worktree itself.
404 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
405
406 /// Returns the id of the worktree to which this file belongs.
407 ///
408 /// This is needed for looking up project-specific settings.
409 fn worktree_id(&self, cx: &App) -> WorktreeId;
410
411 /// Converts this file into a protobuf message.
412 fn to_proto(&self, cx: &App) -> rpc::proto::File;
413
414 /// Return whether Zed considers this to be a private file.
415 fn is_private(&self) -> bool;
416}
417
418/// The file's storage status - whether it's stored (`Present`), and if so when it was last
419/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
420/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
421/// indicator for new files.
422#[derive(Copy, Clone, Debug, PartialEq)]
423pub enum DiskState {
424 /// File created in Zed that has not been saved.
425 New,
426 /// File present on the filesystem.
427 Present { mtime: MTime },
428 /// Deleted file that was previously present.
429 Deleted,
430 /// An old version of a file that was previously present
431 /// usually from a version control system. e.g. A git blob
432 Historic { was_deleted: bool },
433}
434
435impl DiskState {
436 /// Returns the file's last known modification time on disk.
437 pub fn mtime(self) -> Option<MTime> {
438 match self {
439 DiskState::New => None,
440 DiskState::Present { mtime } => Some(mtime),
441 DiskState::Deleted => None,
442 DiskState::Historic { .. } => None,
443 }
444 }
445
446 pub fn exists(&self) -> bool {
447 match self {
448 DiskState::New => false,
449 DiskState::Present { .. } => true,
450 DiskState::Deleted => false,
451 DiskState::Historic { .. } => false,
452 }
453 }
454
455 /// Returns true if this state represents a deleted file.
456 pub fn is_deleted(&self) -> bool {
457 match self {
458 DiskState::Deleted => true,
459 DiskState::Historic { was_deleted } => *was_deleted,
460 _ => false,
461 }
462 }
463}
464
465/// The file associated with a buffer, in the case where the file is on the local disk.
466pub trait LocalFile: File {
467 /// Returns the absolute path of this file
468 fn abs_path(&self, cx: &App) -> PathBuf;
469
470 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
471 fn load(&self, cx: &App) -> Task<Result<String>>;
472
473 /// Loads the file's contents from disk.
474 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
475}
476
477/// The auto-indent behavior associated with an editing operation.
478/// For some editing operations, each affected line of text has its
479/// indentation recomputed. For other operations, the entire block
480/// of edited text is adjusted uniformly.
481#[derive(Clone, Debug)]
482pub enum AutoindentMode {
483 /// Indent each line of inserted text.
484 EachLine,
485 /// Apply the same indentation adjustment to all of the lines
486 /// in a given insertion.
487 Block {
488 /// The original indentation column of the first line of each
489 /// insertion, if it has been copied.
490 ///
491 /// Knowing this makes it possible to preserve the relative indentation
492 /// of every line in the insertion from when it was copied.
493 ///
494 /// If the original indent column is `a`, and the first line of insertion
495 /// is then auto-indented to column `b`, then every other line of
496 /// the insertion will be auto-indented to column `b - a`
497 original_indent_columns: Vec<Option<u32>>,
498 },
499}
500
501#[derive(Clone)]
502struct AutoindentRequest {
503 before_edit: BufferSnapshot,
504 entries: Vec<AutoindentRequestEntry>,
505 is_block_mode: bool,
506 ignore_empty_lines: bool,
507}
508
509#[derive(Debug, Clone)]
510struct AutoindentRequestEntry {
511 /// A range of the buffer whose indentation should be adjusted.
512 range: Range<Anchor>,
513 /// Whether or not these lines should be considered brand new, for the
514 /// purpose of auto-indent. When text is not new, its indentation will
515 /// only be adjusted if the suggested indentation level has *changed*
516 /// since the edit was made.
517 first_line_is_new: bool,
518 indent_size: IndentSize,
519 original_indent_column: Option<u32>,
520}
521
522#[derive(Debug)]
523struct IndentSuggestion {
524 basis_row: u32,
525 delta: Ordering,
526 within_error: bool,
527}
528
529struct BufferChunkHighlights<'a> {
530 captures: SyntaxMapCaptures<'a>,
531 next_capture: Option<SyntaxMapCapture<'a>>,
532 stack: Vec<(usize, HighlightId)>,
533 highlight_maps: Vec<HighlightMap>,
534}
535
536/// An iterator that yields chunks of a buffer's text, along with their
537/// syntax highlights and diagnostic status.
538pub struct BufferChunks<'a> {
539 buffer_snapshot: Option<&'a BufferSnapshot>,
540 range: Range<usize>,
541 chunks: text::Chunks<'a>,
542 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
543 error_depth: usize,
544 warning_depth: usize,
545 information_depth: usize,
546 hint_depth: usize,
547 unnecessary_depth: usize,
548 underline: bool,
549 highlights: Option<BufferChunkHighlights<'a>>,
550}
551
552/// A chunk of a buffer's text, along with its syntax highlight and
553/// diagnostic status.
554#[derive(Clone, Debug, Default)]
555pub struct Chunk<'a> {
556 /// The text of the chunk.
557 pub text: &'a str,
558 /// The syntax highlighting style of the chunk.
559 pub syntax_highlight_id: Option<HighlightId>,
560 /// The highlight style that has been applied to this chunk in
561 /// the editor.
562 pub highlight_style: Option<HighlightStyle>,
563 /// The severity of diagnostic associated with this chunk, if any.
564 pub diagnostic_severity: Option<DiagnosticSeverity>,
565 /// A bitset of which characters are tabs in this string.
566 pub tabs: u128,
567 /// Bitmap of character indices in this chunk
568 pub chars: u128,
569 /// Whether this chunk of text is marked as unnecessary.
570 pub is_unnecessary: bool,
571 /// Whether this chunk of text was originally a tab character.
572 pub is_tab: bool,
573 /// Whether this chunk of text was originally an inlay.
574 pub is_inlay: bool,
575 /// Whether to underline the corresponding text range in the editor.
576 pub underline: bool,
577}
578
579/// A set of edits to a given version of a buffer, computed asynchronously.
580#[derive(Debug)]
581pub struct Diff {
582 pub base_version: clock::Global,
583 pub line_ending: LineEnding,
584 pub edits: Vec<(Range<usize>, Arc<str>)>,
585}
586
587#[derive(Debug, Clone, Copy)]
588pub(crate) struct DiagnosticEndpoint {
589 offset: usize,
590 is_start: bool,
591 underline: bool,
592 severity: DiagnosticSeverity,
593 is_unnecessary: bool,
594}
595
596/// A class of characters, used for characterizing a run of text.
597#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
598pub enum CharKind {
599 /// Whitespace.
600 Whitespace,
601 /// Punctuation.
602 Punctuation,
603 /// Word.
604 Word,
605}
606
607/// Context for character classification within a specific scope.
608#[derive(Copy, Clone, Eq, PartialEq, Debug)]
609pub enum CharScopeContext {
610 /// Character classification for completion queries.
611 ///
612 /// This context treats certain characters as word constituents that would
613 /// normally be considered punctuation, such as '-' in Tailwind classes
614 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
615 Completion,
616 /// Character classification for linked edits.
617 ///
618 /// This context handles characters that should be treated as part of
619 /// identifiers during linked editing operations, such as '.' in JSX
620 /// component names like `<Animated.View>`.
621 LinkedEdit,
622}
623
624/// A runnable is a set of data about a region that could be resolved into a task
625pub struct Runnable {
626 pub tags: SmallVec<[RunnableTag; 1]>,
627 pub language: Arc<Language>,
628 pub buffer: BufferId,
629}
630
631#[derive(Default, Clone, Debug)]
632pub struct HighlightedText {
633 pub text: SharedString,
634 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
635}
636
637#[derive(Default, Debug)]
638struct HighlightedTextBuilder {
639 pub text: String,
640 highlights: Vec<(Range<usize>, HighlightStyle)>,
641}
642
643impl HighlightedText {
644 pub fn from_buffer_range<T: ToOffset>(
645 range: Range<T>,
646 snapshot: &text::BufferSnapshot,
647 syntax_snapshot: &SyntaxSnapshot,
648 override_style: Option<HighlightStyle>,
649 syntax_theme: &SyntaxTheme,
650 ) -> Self {
651 let mut highlighted_text = HighlightedTextBuilder::default();
652 highlighted_text.add_text_from_buffer_range(
653 range,
654 snapshot,
655 syntax_snapshot,
656 override_style,
657 syntax_theme,
658 );
659 highlighted_text.build()
660 }
661
662 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
663 gpui::StyledText::new(self.text.clone())
664 .with_default_highlights(default_style, self.highlights.iter().cloned())
665 }
666
667 /// Returns the first line without leading whitespace unless highlighted
668 /// and a boolean indicating if there are more lines after
669 pub fn first_line_preview(self) -> (Self, bool) {
670 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
671 let first_line = &self.text[..newline_ix];
672
673 // Trim leading whitespace, unless an edit starts prior to it.
674 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
675 if let Some((first_highlight_range, _)) = self.highlights.first() {
676 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
677 }
678
679 let preview_text = &first_line[preview_start_ix..];
680 let preview_highlights = self
681 .highlights
682 .into_iter()
683 .skip_while(|(range, _)| range.end <= preview_start_ix)
684 .take_while(|(range, _)| range.start < newline_ix)
685 .filter_map(|(mut range, highlight)| {
686 range.start = range.start.saturating_sub(preview_start_ix);
687 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
688 if range.is_empty() {
689 None
690 } else {
691 Some((range, highlight))
692 }
693 });
694
695 let preview = Self {
696 text: SharedString::new(preview_text),
697 highlights: preview_highlights.collect(),
698 };
699
700 (preview, self.text.len() > newline_ix)
701 }
702}
703
704impl HighlightedTextBuilder {
705 pub fn build(self) -> HighlightedText {
706 HighlightedText {
707 text: self.text.into(),
708 highlights: self.highlights,
709 }
710 }
711
712 pub fn add_text_from_buffer_range<T: ToOffset>(
713 &mut self,
714 range: Range<T>,
715 snapshot: &text::BufferSnapshot,
716 syntax_snapshot: &SyntaxSnapshot,
717 override_style: Option<HighlightStyle>,
718 syntax_theme: &SyntaxTheme,
719 ) {
720 let range = range.to_offset(snapshot);
721 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
722 let start = self.text.len();
723 self.text.push_str(chunk.text);
724 let end = self.text.len();
725
726 if let Some(highlight_style) = chunk
727 .syntax_highlight_id
728 .and_then(|id| id.style(syntax_theme))
729 {
730 let highlight_style = override_style.map_or(highlight_style, |override_style| {
731 highlight_style.highlight(override_style)
732 });
733 self.highlights.push((start..end, highlight_style));
734 } else if let Some(override_style) = override_style {
735 self.highlights.push((start..end, override_style));
736 }
737 }
738 }
739
740 fn highlighted_chunks<'a>(
741 range: Range<usize>,
742 snapshot: &'a text::BufferSnapshot,
743 syntax_snapshot: &'a SyntaxSnapshot,
744 ) -> BufferChunks<'a> {
745 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
746 grammar
747 .highlights_config
748 .as_ref()
749 .map(|config| &config.query)
750 });
751
752 let highlight_maps = captures
753 .grammars()
754 .iter()
755 .map(|grammar| grammar.highlight_map())
756 .collect();
757
758 BufferChunks::new(
759 snapshot.as_rope(),
760 range,
761 Some((captures, highlight_maps)),
762 false,
763 None,
764 )
765 }
766}
767
768#[derive(Clone)]
769pub struct EditPreview {
770 old_snapshot: text::BufferSnapshot,
771 applied_edits_snapshot: text::BufferSnapshot,
772 syntax_snapshot: SyntaxSnapshot,
773}
774
775impl EditPreview {
776 pub fn as_unified_diff(
777 &self,
778 file: Option<&Arc<dyn File>>,
779 edits: &[(Range<Anchor>, impl AsRef<str>)],
780 ) -> Option<String> {
781 let (first, _) = edits.first()?;
782 let (last, _) = edits.last()?;
783
784 let start = first.start.to_point(&self.old_snapshot);
785 let old_end = last.end.to_point(&self.old_snapshot);
786 let new_end = last
787 .end
788 .bias_right(&self.old_snapshot)
789 .to_point(&self.applied_edits_snapshot);
790
791 let start = Point::new(start.row.saturating_sub(3), 0);
792 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
793 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
794
795 let diff_body = unified_diff_with_offsets(
796 &self
797 .old_snapshot
798 .text_for_range(start..old_end)
799 .collect::<String>(),
800 &self
801 .applied_edits_snapshot
802 .text_for_range(start..new_end)
803 .collect::<String>(),
804 start.row,
805 start.row,
806 );
807
808 let path = file.map(|f| f.path().as_unix_str());
809 let header = match path {
810 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
811 None => String::new(),
812 };
813
814 Some(format!("{}{}", header, diff_body))
815 }
816
817 pub fn highlight_edits(
818 &self,
819 current_snapshot: &BufferSnapshot,
820 edits: &[(Range<Anchor>, impl AsRef<str>)],
821 include_deletions: bool,
822 cx: &App,
823 ) -> HighlightedText {
824 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
825 return HighlightedText::default();
826 };
827
828 let mut highlighted_text = HighlightedTextBuilder::default();
829
830 let visible_range_in_preview_snapshot =
831 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
832 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
833
834 let insertion_highlight_style = HighlightStyle {
835 background_color: Some(cx.theme().status().created_background),
836 ..Default::default()
837 };
838 let deletion_highlight_style = HighlightStyle {
839 background_color: Some(cx.theme().status().deleted_background),
840 ..Default::default()
841 };
842 let syntax_theme = cx.theme().syntax();
843
844 for (range, edit_text) in edits {
845 let edit_new_end_in_preview_snapshot = range
846 .end
847 .bias_right(&self.old_snapshot)
848 .to_offset(&self.applied_edits_snapshot);
849 let edit_start_in_preview_snapshot =
850 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
851
852 let unchanged_range_in_preview_snapshot =
853 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
854 if !unchanged_range_in_preview_snapshot.is_empty() {
855 highlighted_text.add_text_from_buffer_range(
856 unchanged_range_in_preview_snapshot,
857 &self.applied_edits_snapshot,
858 &self.syntax_snapshot,
859 None,
860 syntax_theme,
861 );
862 }
863
864 let range_in_current_snapshot = range.to_offset(current_snapshot);
865 if include_deletions && !range_in_current_snapshot.is_empty() {
866 highlighted_text.add_text_from_buffer_range(
867 range_in_current_snapshot,
868 ¤t_snapshot.text,
869 ¤t_snapshot.syntax,
870 Some(deletion_highlight_style),
871 syntax_theme,
872 );
873 }
874
875 if !edit_text.as_ref().is_empty() {
876 highlighted_text.add_text_from_buffer_range(
877 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
878 &self.applied_edits_snapshot,
879 &self.syntax_snapshot,
880 Some(insertion_highlight_style),
881 syntax_theme,
882 );
883 }
884
885 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
886 }
887
888 highlighted_text.add_text_from_buffer_range(
889 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
890 &self.applied_edits_snapshot,
891 &self.syntax_snapshot,
892 None,
893 syntax_theme,
894 );
895
896 highlighted_text.build()
897 }
898
899 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
900 cx.new(|cx| {
901 let mut buffer = Buffer::local_normalized(
902 self.applied_edits_snapshot.as_rope().clone(),
903 self.applied_edits_snapshot.line_ending(),
904 cx,
905 );
906 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
907 buffer
908 })
909 }
910
911 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
912 let (first, _) = edits.first()?;
913 let (last, _) = edits.last()?;
914
915 let start = first
916 .start
917 .bias_left(&self.old_snapshot)
918 .to_point(&self.applied_edits_snapshot);
919 let end = last
920 .end
921 .bias_right(&self.old_snapshot)
922 .to_point(&self.applied_edits_snapshot);
923
924 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
925 let range = Point::new(start.row, 0)
926 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
927
928 Some(range)
929 }
930}
931
932#[derive(Clone, Debug, PartialEq, Eq)]
933pub struct BracketMatch<T> {
934 pub open_range: Range<T>,
935 pub close_range: Range<T>,
936 pub newline_only: bool,
937 pub syntax_layer_depth: usize,
938 pub color_index: Option<usize>,
939}
940
941impl<T> BracketMatch<T> {
942 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
943 (self.open_range, self.close_range)
944 }
945}
946
947impl Buffer {
948 /// Create a new buffer with the given base text.
949 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
950 Self::build(
951 TextBuffer::new(
952 ReplicaId::LOCAL,
953 cx.entity_id().as_non_zero_u64().into(),
954 base_text.into(),
955 ),
956 None,
957 Capability::ReadWrite,
958 )
959 }
960
961 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
962 pub fn local_normalized(
963 base_text_normalized: Rope,
964 line_ending: LineEnding,
965 cx: &Context<Self>,
966 ) -> Self {
967 Self::build(
968 TextBuffer::new_normalized(
969 ReplicaId::LOCAL,
970 cx.entity_id().as_non_zero_u64().into(),
971 line_ending,
972 base_text_normalized,
973 ),
974 None,
975 Capability::ReadWrite,
976 )
977 }
978
979 /// Create a new buffer that is a replica of a remote buffer.
980 pub fn remote(
981 remote_id: BufferId,
982 replica_id: ReplicaId,
983 capability: Capability,
984 base_text: impl Into<String>,
985 ) -> Self {
986 Self::build(
987 TextBuffer::new(replica_id, remote_id, base_text.into()),
988 None,
989 capability,
990 )
991 }
992
993 /// Create a new buffer that is a replica of a remote buffer, populating its
994 /// state from the given protobuf message.
995 pub fn from_proto(
996 replica_id: ReplicaId,
997 capability: Capability,
998 message: proto::BufferState,
999 file: Option<Arc<dyn File>>,
1000 ) -> Result<Self> {
1001 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1002 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1003 let mut this = Self::build(buffer, file, capability);
1004 this.text.set_line_ending(proto::deserialize_line_ending(
1005 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1006 ));
1007 this.saved_version = proto::deserialize_version(&message.saved_version);
1008 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1009 Ok(this)
1010 }
1011
1012 /// Serialize the buffer's state to a protobuf message.
1013 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1014 proto::BufferState {
1015 id: self.remote_id().into(),
1016 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1017 base_text: self.base_text().to_string(),
1018 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1019 saved_version: proto::serialize_version(&self.saved_version),
1020 saved_mtime: self.saved_mtime.map(|time| time.into()),
1021 }
1022 }
1023
1024 /// Serialize as protobufs all of the changes to the buffer since the given version.
1025 pub fn serialize_ops(
1026 &self,
1027 since: Option<clock::Global>,
1028 cx: &App,
1029 ) -> Task<Vec<proto::Operation>> {
1030 let mut operations = Vec::new();
1031 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1032
1033 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1034 proto::serialize_operation(&Operation::UpdateSelections {
1035 selections: set.selections.clone(),
1036 lamport_timestamp: set.lamport_timestamp,
1037 line_mode: set.line_mode,
1038 cursor_shape: set.cursor_shape,
1039 })
1040 }));
1041
1042 for (server_id, diagnostics) in &self.diagnostics {
1043 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1044 lamport_timestamp: self.diagnostics_timestamp,
1045 server_id: *server_id,
1046 diagnostics: diagnostics.iter().cloned().collect(),
1047 }));
1048 }
1049
1050 for (server_id, completions) in &self.completion_triggers_per_language_server {
1051 operations.push(proto::serialize_operation(
1052 &Operation::UpdateCompletionTriggers {
1053 triggers: completions.iter().cloned().collect(),
1054 lamport_timestamp: self.completion_triggers_timestamp,
1055 server_id: *server_id,
1056 },
1057 ));
1058 }
1059
1060 let text_operations = self.text.operations().clone();
1061 cx.background_spawn(async move {
1062 let since = since.unwrap_or_default();
1063 operations.extend(
1064 text_operations
1065 .iter()
1066 .filter(|(_, op)| !since.observed(op.timestamp()))
1067 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1068 );
1069 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1070 operations
1071 })
1072 }
1073
1074 /// Assign a language to the buffer, returning the buffer.
1075 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1076 self.set_language_async(Some(language), cx);
1077 self
1078 }
1079
1080 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1081 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1082 self.set_language(Some(language), cx);
1083 self
1084 }
1085
1086 /// Returns the [`Capability`] of this buffer.
1087 pub fn capability(&self) -> Capability {
1088 self.capability
1089 }
1090
1091 /// Whether this buffer can only be read.
1092 pub fn read_only(&self) -> bool {
1093 self.capability == Capability::ReadOnly
1094 }
1095
1096 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1097 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1098 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1099 let snapshot = buffer.snapshot();
1100 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1101 let tree_sitter_data = TreeSitterData::new(snapshot);
1102 Self {
1103 saved_mtime,
1104 tree_sitter_data: Arc::new(tree_sitter_data),
1105 saved_version: buffer.version(),
1106 preview_version: buffer.version(),
1107 reload_task: None,
1108 transaction_depth: 0,
1109 was_dirty_before_starting_transaction: None,
1110 has_unsaved_edits: Cell::new((buffer.version(), false)),
1111 text: buffer,
1112 branch_state: None,
1113 file,
1114 capability,
1115 syntax_map,
1116 reparse: None,
1117 non_text_state_update_count: 0,
1118 sync_parse_timeout: Duration::from_millis(1),
1119 parse_status: watch::channel(ParseStatus::Idle),
1120 autoindent_requests: Default::default(),
1121 wait_for_autoindent_txs: Default::default(),
1122 pending_autoindent: Default::default(),
1123 language: None,
1124 remote_selections: Default::default(),
1125 diagnostics: Default::default(),
1126 diagnostics_timestamp: Lamport::MIN,
1127 completion_triggers: Default::default(),
1128 completion_triggers_per_language_server: Default::default(),
1129 completion_triggers_timestamp: Lamport::MIN,
1130 deferred_ops: OperationQueue::new(),
1131 has_conflict: false,
1132 change_bits: Default::default(),
1133 _subscriptions: Vec::new(),
1134 encoding: encoding_rs::UTF_8,
1135 has_bom: false,
1136 }
1137 }
1138
1139 pub fn build_snapshot(
1140 text: Rope,
1141 language: Option<Arc<Language>>,
1142 language_registry: Option<Arc<LanguageRegistry>>,
1143 cx: &mut App,
1144 ) -> impl Future<Output = BufferSnapshot> + use<> {
1145 let entity_id = cx.reserve_entity::<Self>().entity_id();
1146 let buffer_id = entity_id.as_non_zero_u64().into();
1147 async move {
1148 let text =
1149 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1150 .snapshot();
1151 let mut syntax = SyntaxMap::new(&text).snapshot();
1152 if let Some(language) = language.clone() {
1153 let language_registry = language_registry.clone();
1154 syntax.reparse(&text, language_registry, language);
1155 }
1156 let tree_sitter_data = TreeSitterData::new(text.clone());
1157 BufferSnapshot {
1158 text,
1159 syntax,
1160 file: None,
1161 diagnostics: Default::default(),
1162 remote_selections: Default::default(),
1163 tree_sitter_data: Arc::new(tree_sitter_data),
1164 language,
1165 non_text_state_update_count: 0,
1166 }
1167 }
1168 }
1169
1170 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1171 let entity_id = cx.reserve_entity::<Self>().entity_id();
1172 let buffer_id = entity_id.as_non_zero_u64().into();
1173 let text = TextBuffer::new_normalized(
1174 ReplicaId::LOCAL,
1175 buffer_id,
1176 Default::default(),
1177 Rope::new(),
1178 )
1179 .snapshot();
1180 let syntax = SyntaxMap::new(&text).snapshot();
1181 let tree_sitter_data = TreeSitterData::new(text.clone());
1182 BufferSnapshot {
1183 text,
1184 syntax,
1185 tree_sitter_data: Arc::new(tree_sitter_data),
1186 file: None,
1187 diagnostics: Default::default(),
1188 remote_selections: Default::default(),
1189 language: None,
1190 non_text_state_update_count: 0,
1191 }
1192 }
1193
1194 #[cfg(any(test, feature = "test-support"))]
1195 pub fn build_snapshot_sync(
1196 text: Rope,
1197 language: Option<Arc<Language>>,
1198 language_registry: Option<Arc<LanguageRegistry>>,
1199 cx: &mut App,
1200 ) -> BufferSnapshot {
1201 let entity_id = cx.reserve_entity::<Self>().entity_id();
1202 let buffer_id = entity_id.as_non_zero_u64().into();
1203 let text =
1204 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1205 .snapshot();
1206 let mut syntax = SyntaxMap::new(&text).snapshot();
1207 if let Some(language) = language.clone() {
1208 syntax.reparse(&text, language_registry, language);
1209 }
1210 let tree_sitter_data = TreeSitterData::new(text.clone());
1211 BufferSnapshot {
1212 text,
1213 syntax,
1214 tree_sitter_data: Arc::new(tree_sitter_data),
1215 file: None,
1216 diagnostics: Default::default(),
1217 remote_selections: Default::default(),
1218 language,
1219 non_text_state_update_count: 0,
1220 }
1221 }
1222
1223 /// Retrieve a snapshot of the buffer's current state. This is computationally
1224 /// cheap, and allows reading from the buffer on a background thread.
1225 pub fn snapshot(&self) -> BufferSnapshot {
1226 let text = self.text.snapshot();
1227 let mut syntax_map = self.syntax_map.lock();
1228 syntax_map.interpolate(&text);
1229 let syntax = syntax_map.snapshot();
1230
1231 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1232 Arc::new(TreeSitterData::new(text.clone()))
1233 } else {
1234 self.tree_sitter_data.clone()
1235 };
1236
1237 BufferSnapshot {
1238 text,
1239 syntax,
1240 tree_sitter_data,
1241 file: self.file.clone(),
1242 remote_selections: self.remote_selections.clone(),
1243 diagnostics: self.diagnostics.clone(),
1244 language: self.language.clone(),
1245 non_text_state_update_count: self.non_text_state_update_count,
1246 }
1247 }
1248
1249 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1250 let this = cx.entity();
1251 cx.new(|cx| {
1252 let mut branch = Self {
1253 branch_state: Some(BufferBranchState {
1254 base_buffer: this.clone(),
1255 merged_operations: Default::default(),
1256 }),
1257 language: self.language.clone(),
1258 has_conflict: self.has_conflict,
1259 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1260 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1261 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1262 };
1263 if let Some(language_registry) = self.language_registry() {
1264 branch.set_language_registry(language_registry);
1265 }
1266
1267 // Reparse the branch buffer so that we get syntax highlighting immediately.
1268 branch.reparse(cx, true);
1269
1270 branch
1271 })
1272 }
1273
1274 pub fn preview_edits(
1275 &self,
1276 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1277 cx: &App,
1278 ) -> Task<EditPreview> {
1279 let registry = self.language_registry();
1280 let language = self.language().cloned();
1281 let old_snapshot = self.text.snapshot();
1282 let mut branch_buffer = self.text.branch();
1283 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1284 cx.background_spawn(async move {
1285 if !edits.is_empty() {
1286 if let Some(language) = language.clone() {
1287 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1288 }
1289
1290 branch_buffer.edit(edits.iter().cloned());
1291 let snapshot = branch_buffer.snapshot();
1292 syntax_snapshot.interpolate(&snapshot);
1293
1294 if let Some(language) = language {
1295 syntax_snapshot.reparse(&snapshot, registry, language);
1296 }
1297 }
1298 EditPreview {
1299 old_snapshot,
1300 applied_edits_snapshot: branch_buffer.snapshot(),
1301 syntax_snapshot,
1302 }
1303 })
1304 }
1305
1306 /// Applies all of the changes in this buffer that intersect any of the
1307 /// given `ranges` to its base buffer.
1308 ///
1309 /// If `ranges` is empty, then all changes will be applied. This buffer must
1310 /// be a branch buffer to call this method.
1311 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1312 let Some(base_buffer) = self.base_buffer() else {
1313 debug_panic!("not a branch buffer");
1314 return;
1315 };
1316
1317 let mut ranges = if ranges.is_empty() {
1318 &[0..usize::MAX]
1319 } else {
1320 ranges.as_slice()
1321 }
1322 .iter()
1323 .peekable();
1324
1325 let mut edits = Vec::new();
1326 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1327 let mut is_included = false;
1328 while let Some(range) = ranges.peek() {
1329 if range.end < edit.new.start {
1330 ranges.next().unwrap();
1331 } else {
1332 if range.start <= edit.new.end {
1333 is_included = true;
1334 }
1335 break;
1336 }
1337 }
1338
1339 if is_included {
1340 edits.push((
1341 edit.old.clone(),
1342 self.text_for_range(edit.new.clone()).collect::<String>(),
1343 ));
1344 }
1345 }
1346
1347 let operation = base_buffer.update(cx, |base_buffer, cx| {
1348 // cx.emit(BufferEvent::DiffBaseChanged);
1349 base_buffer.edit(edits, None, cx)
1350 });
1351
1352 if let Some(operation) = operation
1353 && let Some(BufferBranchState {
1354 merged_operations, ..
1355 }) = &mut self.branch_state
1356 {
1357 merged_operations.push(operation);
1358 }
1359 }
1360
1361 fn on_base_buffer_event(
1362 &mut self,
1363 _: Entity<Buffer>,
1364 event: &BufferEvent,
1365 cx: &mut Context<Self>,
1366 ) {
1367 let BufferEvent::Operation { operation, .. } = event else {
1368 return;
1369 };
1370 let Some(BufferBranchState {
1371 merged_operations, ..
1372 }) = &mut self.branch_state
1373 else {
1374 return;
1375 };
1376
1377 let mut operation_to_undo = None;
1378 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1379 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1380 {
1381 merged_operations.remove(ix);
1382 operation_to_undo = Some(operation.timestamp);
1383 }
1384
1385 self.apply_ops([operation.clone()], cx);
1386
1387 if let Some(timestamp) = operation_to_undo {
1388 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1389 self.undo_operations(counts, cx);
1390 }
1391 }
1392
1393 #[cfg(test)]
1394 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1395 &self.text
1396 }
1397
1398 /// Retrieve a snapshot of the buffer's raw text, without any
1399 /// language-related state like the syntax tree or diagnostics.
1400 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1401 self.text.snapshot()
1402 }
1403
1404 /// The file associated with the buffer, if any.
1405 pub fn file(&self) -> Option<&Arc<dyn File>> {
1406 self.file.as_ref()
1407 }
1408
1409 /// The version of the buffer that was last saved or reloaded from disk.
1410 pub fn saved_version(&self) -> &clock::Global {
1411 &self.saved_version
1412 }
1413
1414 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1415 pub fn saved_mtime(&self) -> Option<MTime> {
1416 self.saved_mtime
1417 }
1418
1419 /// Returns the character encoding of the buffer's file.
1420 pub fn encoding(&self) -> &'static Encoding {
1421 self.encoding
1422 }
1423
1424 /// Sets the character encoding of the buffer.
1425 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1426 self.encoding = encoding;
1427 }
1428
1429 /// Returns whether the buffer has a Byte Order Mark.
1430 pub fn has_bom(&self) -> bool {
1431 self.has_bom
1432 }
1433
1434 /// Sets whether the buffer has a Byte Order Mark.
1435 pub fn set_has_bom(&mut self, has_bom: bool) {
1436 self.has_bom = has_bom;
1437 }
1438
1439 /// Assign a language to the buffer.
1440 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1441 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1442 }
1443
1444 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1445 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1446 self.set_language_(language, true, cx);
1447 }
1448
1449 fn set_language_(
1450 &mut self,
1451 language: Option<Arc<Language>>,
1452 may_block: bool,
1453 cx: &mut Context<Self>,
1454 ) {
1455 self.non_text_state_update_count += 1;
1456 self.syntax_map.lock().clear(&self.text);
1457 let old_language = std::mem::replace(&mut self.language, language);
1458 self.was_changed();
1459 self.reparse(cx, may_block);
1460 let has_fresh_language =
1461 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1462 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1463 }
1464
1465 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1466 /// other languages if parts of the buffer are written in different languages.
1467 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1468 self.syntax_map
1469 .lock()
1470 .set_language_registry(language_registry);
1471 }
1472
1473 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1474 self.syntax_map.lock().language_registry()
1475 }
1476
1477 /// Assign the line ending type to the buffer.
1478 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1479 self.text.set_line_ending(line_ending);
1480
1481 let lamport_timestamp = self.text.lamport_clock.tick();
1482 self.send_operation(
1483 Operation::UpdateLineEnding {
1484 line_ending,
1485 lamport_timestamp,
1486 },
1487 true,
1488 cx,
1489 );
1490 }
1491
1492 /// Assign the buffer a new [`Capability`].
1493 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1494 if self.capability != capability {
1495 self.capability = capability;
1496 cx.emit(BufferEvent::CapabilityChanged)
1497 }
1498 }
1499
1500 /// This method is called to signal that the buffer has been saved.
1501 pub fn did_save(
1502 &mut self,
1503 version: clock::Global,
1504 mtime: Option<MTime>,
1505 cx: &mut Context<Self>,
1506 ) {
1507 self.saved_version = version.clone();
1508 self.has_unsaved_edits.set((version, false));
1509 self.has_conflict = false;
1510 self.saved_mtime = mtime;
1511 self.was_changed();
1512 cx.emit(BufferEvent::Saved);
1513 cx.notify();
1514 }
1515
1516 /// Reloads the contents of the buffer from disk.
1517 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1518 let (tx, rx) = futures::channel::oneshot::channel();
1519 let prev_version = self.text.version();
1520 self.reload_task = Some(cx.spawn(async move |this, cx| {
1521 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1522 let file = this.file.as_ref()?.as_local()?;
1523 Some((
1524 file.disk_state().mtime(),
1525 file.load_bytes(cx),
1526 this.encoding,
1527 ))
1528 })?
1529 else {
1530 return Ok(());
1531 };
1532
1533 let bytes = load_bytes_task.await?;
1534 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1535 let new_text = cow.into_owned();
1536
1537 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1538 this.update(cx, |this, cx| {
1539 if this.version() == diff.base_version {
1540 this.finalize_last_transaction();
1541 this.apply_diff(diff, cx);
1542 tx.send(this.finalize_last_transaction().cloned()).ok();
1543 this.has_conflict = false;
1544 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1545 } else {
1546 if !diff.edits.is_empty()
1547 || this
1548 .edits_since::<usize>(&diff.base_version)
1549 .next()
1550 .is_some()
1551 {
1552 this.has_conflict = true;
1553 }
1554
1555 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1556 }
1557
1558 this.reload_task.take();
1559 })
1560 }));
1561 rx
1562 }
1563
1564 /// This method is called to signal that the buffer has been reloaded.
1565 pub fn did_reload(
1566 &mut self,
1567 version: clock::Global,
1568 line_ending: LineEnding,
1569 mtime: Option<MTime>,
1570 cx: &mut Context<Self>,
1571 ) {
1572 self.saved_version = version;
1573 self.has_unsaved_edits
1574 .set((self.saved_version.clone(), false));
1575 self.text.set_line_ending(line_ending);
1576 self.saved_mtime = mtime;
1577 cx.emit(BufferEvent::Reloaded);
1578 cx.notify();
1579 }
1580
1581 /// Updates the [`File`] backing this buffer. This should be called when
1582 /// the file has changed or has been deleted.
1583 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1584 let was_dirty = self.is_dirty();
1585 let mut file_changed = false;
1586
1587 if let Some(old_file) = self.file.as_ref() {
1588 if new_file.path() != old_file.path() {
1589 file_changed = true;
1590 }
1591
1592 let old_state = old_file.disk_state();
1593 let new_state = new_file.disk_state();
1594 if old_state != new_state {
1595 file_changed = true;
1596 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1597 cx.emit(BufferEvent::ReloadNeeded)
1598 }
1599 }
1600 } else {
1601 file_changed = true;
1602 };
1603
1604 self.file = Some(new_file);
1605 if file_changed {
1606 self.was_changed();
1607 self.non_text_state_update_count += 1;
1608 if was_dirty != self.is_dirty() {
1609 cx.emit(BufferEvent::DirtyChanged);
1610 }
1611 cx.emit(BufferEvent::FileHandleChanged);
1612 cx.notify();
1613 }
1614 }
1615
1616 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1617 Some(self.branch_state.as_ref()?.base_buffer.clone())
1618 }
1619
1620 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1621 pub fn language(&self) -> Option<&Arc<Language>> {
1622 self.language.as_ref()
1623 }
1624
1625 /// Returns the [`Language`] at the given location.
1626 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1627 let offset = position.to_offset(self);
1628 let mut is_first = true;
1629 let start_anchor = self.anchor_before(offset);
1630 let end_anchor = self.anchor_after(offset);
1631 self.syntax_map
1632 .lock()
1633 .layers_for_range(offset..offset, &self.text, false)
1634 .filter(|layer| {
1635 if is_first {
1636 is_first = false;
1637 return true;
1638 }
1639
1640 layer
1641 .included_sub_ranges
1642 .map(|sub_ranges| {
1643 sub_ranges.iter().any(|sub_range| {
1644 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1645 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1646 !is_before_start && !is_after_end
1647 })
1648 })
1649 .unwrap_or(true)
1650 })
1651 .last()
1652 .map(|info| info.language.clone())
1653 .or_else(|| self.language.clone())
1654 }
1655
1656 /// Returns each [`Language`] for the active syntax layers at the given location.
1657 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1658 let offset = position.to_offset(self);
1659 let mut languages: Vec<Arc<Language>> = self
1660 .syntax_map
1661 .lock()
1662 .layers_for_range(offset..offset, &self.text, false)
1663 .map(|info| info.language.clone())
1664 .collect();
1665
1666 if languages.is_empty()
1667 && let Some(buffer_language) = self.language()
1668 {
1669 languages.push(buffer_language.clone());
1670 }
1671
1672 languages
1673 }
1674
1675 /// An integer version number that accounts for all updates besides
1676 /// the buffer's text itself (which is versioned via a version vector).
1677 pub fn non_text_state_update_count(&self) -> usize {
1678 self.non_text_state_update_count
1679 }
1680
1681 /// Whether the buffer is being parsed in the background.
1682 #[cfg(any(test, feature = "test-support"))]
1683 pub fn is_parsing(&self) -> bool {
1684 self.reparse.is_some()
1685 }
1686
1687 /// Indicates whether the buffer contains any regions that may be
1688 /// written in a language that hasn't been loaded yet.
1689 pub fn contains_unknown_injections(&self) -> bool {
1690 self.syntax_map.lock().contains_unknown_injections()
1691 }
1692
1693 #[cfg(any(test, feature = "test-support"))]
1694 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1695 self.sync_parse_timeout = timeout;
1696 }
1697
1698 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1699 match Arc::get_mut(&mut self.tree_sitter_data) {
1700 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1701 None => {
1702 let tree_sitter_data = TreeSitterData::new(snapshot);
1703 self.tree_sitter_data = Arc::new(tree_sitter_data)
1704 }
1705 }
1706 }
1707
1708 /// Called after an edit to synchronize the buffer's main parse tree with
1709 /// the buffer's new underlying state.
1710 ///
1711 /// Locks the syntax map and interpolates the edits since the last reparse
1712 /// into the foreground syntax tree.
1713 ///
1714 /// Then takes a stable snapshot of the syntax map before unlocking it.
1715 /// The snapshot with the interpolated edits is sent to a background thread,
1716 /// where we ask Tree-sitter to perform an incremental parse.
1717 ///
1718 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1719 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1720 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1721 ///
1722 /// If we time out waiting on the parse, we spawn a second task waiting
1723 /// until the parse does complete and return with the interpolated tree still
1724 /// in the foreground. When the background parse completes, call back into
1725 /// the main thread and assign the foreground parse state.
1726 ///
1727 /// If the buffer or grammar changed since the start of the background parse,
1728 /// initiate an additional reparse recursively. To avoid concurrent parses
1729 /// for the same buffer, we only initiate a new parse if we are not already
1730 /// parsing in the background.
1731 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1732 if self.text.version() != *self.tree_sitter_data.version() {
1733 self.invalidate_tree_sitter_data(self.text.snapshot());
1734 }
1735 if self.reparse.is_some() {
1736 return;
1737 }
1738 let language = if let Some(language) = self.language.clone() {
1739 language
1740 } else {
1741 return;
1742 };
1743
1744 let text = self.text_snapshot();
1745 let parsed_version = self.version();
1746
1747 let mut syntax_map = self.syntax_map.lock();
1748 syntax_map.interpolate(&text);
1749 let language_registry = syntax_map.language_registry();
1750 let mut syntax_snapshot = syntax_map.snapshot();
1751 drop(syntax_map);
1752
1753 let parse_task = cx.background_spawn({
1754 let language = language.clone();
1755 let language_registry = language_registry.clone();
1756 async move {
1757 syntax_snapshot.reparse(&text, language_registry, language);
1758 syntax_snapshot
1759 }
1760 });
1761
1762 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1763 if may_block {
1764 match cx
1765 .background_executor()
1766 .block_with_timeout(self.sync_parse_timeout, parse_task)
1767 {
1768 Ok(new_syntax_snapshot) => {
1769 self.did_finish_parsing(new_syntax_snapshot, cx);
1770 self.reparse = None;
1771 }
1772 Err(parse_task) => {
1773 self.reparse = Some(cx.spawn(async move |this, cx| {
1774 let new_syntax_map = cx.background_spawn(parse_task).await;
1775 this.update(cx, move |this, cx| {
1776 let grammar_changed = || {
1777 this.language.as_ref().is_none_or(|current_language| {
1778 !Arc::ptr_eq(&language, current_language)
1779 })
1780 };
1781 let language_registry_changed = || {
1782 new_syntax_map.contains_unknown_injections()
1783 && language_registry.is_some_and(|registry| {
1784 registry.version()
1785 != new_syntax_map.language_registry_version()
1786 })
1787 };
1788 let parse_again = this.version.changed_since(&parsed_version)
1789 || language_registry_changed()
1790 || grammar_changed();
1791 this.did_finish_parsing(new_syntax_map, cx);
1792 this.reparse = None;
1793 if parse_again {
1794 this.reparse(cx, false);
1795 }
1796 })
1797 .ok();
1798 }));
1799 }
1800 }
1801 } else {
1802 self.reparse = Some(cx.spawn(async move |this, cx| {
1803 let new_syntax_map = cx.background_spawn(parse_task).await;
1804 this.update(cx, move |this, cx| {
1805 let grammar_changed = || {
1806 this.language.as_ref().is_none_or(|current_language| {
1807 !Arc::ptr_eq(&language, current_language)
1808 })
1809 };
1810 let language_registry_changed = || {
1811 new_syntax_map.contains_unknown_injections()
1812 && language_registry.is_some_and(|registry| {
1813 registry.version() != new_syntax_map.language_registry_version()
1814 })
1815 };
1816 let parse_again = this.version.changed_since(&parsed_version)
1817 || language_registry_changed()
1818 || grammar_changed();
1819 this.did_finish_parsing(new_syntax_map, cx);
1820 this.reparse = None;
1821 if parse_again {
1822 this.reparse(cx, false);
1823 }
1824 })
1825 .ok();
1826 }));
1827 }
1828 }
1829
1830 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1831 self.was_changed();
1832 self.non_text_state_update_count += 1;
1833 self.syntax_map.lock().did_parse(syntax_snapshot);
1834 self.request_autoindent(cx);
1835 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1836 self.invalidate_tree_sitter_data(self.text.snapshot());
1837 cx.emit(BufferEvent::Reparsed);
1838 cx.notify();
1839 }
1840
1841 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1842 self.parse_status.1.clone()
1843 }
1844
1845 /// Wait until the buffer is no longer parsing
1846 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1847 let mut parse_status = self.parse_status();
1848 async move {
1849 while *parse_status.borrow() != ParseStatus::Idle {
1850 if parse_status.changed().await.is_err() {
1851 break;
1852 }
1853 }
1854 }
1855 }
1856
1857 /// Assign to the buffer a set of diagnostics created by a given language server.
1858 pub fn update_diagnostics(
1859 &mut self,
1860 server_id: LanguageServerId,
1861 diagnostics: DiagnosticSet,
1862 cx: &mut Context<Self>,
1863 ) {
1864 let lamport_timestamp = self.text.lamport_clock.tick();
1865 let op = Operation::UpdateDiagnostics {
1866 server_id,
1867 diagnostics: diagnostics.iter().cloned().collect(),
1868 lamport_timestamp,
1869 };
1870
1871 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1872 self.send_operation(op, true, cx);
1873 }
1874
1875 pub fn buffer_diagnostics(
1876 &self,
1877 for_server: Option<LanguageServerId>,
1878 ) -> Vec<&DiagnosticEntry<Anchor>> {
1879 match for_server {
1880 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1881 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1882 Err(_) => Vec::new(),
1883 },
1884 None => self
1885 .diagnostics
1886 .iter()
1887 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1888 .collect(),
1889 }
1890 }
1891
1892 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1893 if let Some(indent_sizes) = self.compute_autoindents() {
1894 let indent_sizes = cx.background_spawn(indent_sizes);
1895 match cx
1896 .background_executor()
1897 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1898 {
1899 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1900 Err(indent_sizes) => {
1901 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1902 let indent_sizes = indent_sizes.await;
1903 this.update(cx, |this, cx| {
1904 this.apply_autoindents(indent_sizes, cx);
1905 })
1906 .ok();
1907 }));
1908 }
1909 }
1910 } else {
1911 self.autoindent_requests.clear();
1912 for tx in self.wait_for_autoindent_txs.drain(..) {
1913 tx.send(()).ok();
1914 }
1915 }
1916 }
1917
1918 fn compute_autoindents(
1919 &self,
1920 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1921 let max_rows_between_yields = 100;
1922 let snapshot = self.snapshot();
1923 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1924 return None;
1925 }
1926
1927 let autoindent_requests = self.autoindent_requests.clone();
1928 Some(async move {
1929 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1930 for request in autoindent_requests {
1931 // Resolve each edited range to its row in the current buffer and in the
1932 // buffer before this batch of edits.
1933 let mut row_ranges = Vec::new();
1934 let mut old_to_new_rows = BTreeMap::new();
1935 let mut language_indent_sizes_by_new_row = Vec::new();
1936 for entry in &request.entries {
1937 let position = entry.range.start;
1938 let new_row = position.to_point(&snapshot).row;
1939 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1940 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1941
1942 if !entry.first_line_is_new {
1943 let old_row = position.to_point(&request.before_edit).row;
1944 old_to_new_rows.insert(old_row, new_row);
1945 }
1946 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1947 }
1948
1949 // Build a map containing the suggested indentation for each of the edited lines
1950 // with respect to the state of the buffer before these edits. This map is keyed
1951 // by the rows for these lines in the current state of the buffer.
1952 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1953 let old_edited_ranges =
1954 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1955 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1956 let mut language_indent_size = IndentSize::default();
1957 for old_edited_range in old_edited_ranges {
1958 let suggestions = request
1959 .before_edit
1960 .suggest_autoindents(old_edited_range.clone())
1961 .into_iter()
1962 .flatten();
1963 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1964 if let Some(suggestion) = suggestion {
1965 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1966
1967 // Find the indent size based on the language for this row.
1968 while let Some((row, size)) = language_indent_sizes.peek() {
1969 if *row > new_row {
1970 break;
1971 }
1972 language_indent_size = *size;
1973 language_indent_sizes.next();
1974 }
1975
1976 let suggested_indent = old_to_new_rows
1977 .get(&suggestion.basis_row)
1978 .and_then(|from_row| {
1979 Some(old_suggestions.get(from_row).copied()?.0)
1980 })
1981 .unwrap_or_else(|| {
1982 request
1983 .before_edit
1984 .indent_size_for_line(suggestion.basis_row)
1985 })
1986 .with_delta(suggestion.delta, language_indent_size);
1987 old_suggestions
1988 .insert(new_row, (suggested_indent, suggestion.within_error));
1989 }
1990 }
1991 yield_now().await;
1992 }
1993
1994 // Compute new suggestions for each line, but only include them in the result
1995 // if they differ from the old suggestion for that line.
1996 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1997 let mut language_indent_size = IndentSize::default();
1998 for (row_range, original_indent_column) in row_ranges {
1999 let new_edited_row_range = if request.is_block_mode {
2000 row_range.start..row_range.start + 1
2001 } else {
2002 row_range.clone()
2003 };
2004
2005 let suggestions = snapshot
2006 .suggest_autoindents(new_edited_row_range.clone())
2007 .into_iter()
2008 .flatten();
2009 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2010 if let Some(suggestion) = suggestion {
2011 // Find the indent size based on the language for this row.
2012 while let Some((row, size)) = language_indent_sizes.peek() {
2013 if *row > new_row {
2014 break;
2015 }
2016 language_indent_size = *size;
2017 language_indent_sizes.next();
2018 }
2019
2020 let suggested_indent = indent_sizes
2021 .get(&suggestion.basis_row)
2022 .copied()
2023 .map(|e| e.0)
2024 .unwrap_or_else(|| {
2025 snapshot.indent_size_for_line(suggestion.basis_row)
2026 })
2027 .with_delta(suggestion.delta, language_indent_size);
2028
2029 if old_suggestions.get(&new_row).is_none_or(
2030 |(old_indentation, was_within_error)| {
2031 suggested_indent != *old_indentation
2032 && (!suggestion.within_error || *was_within_error)
2033 },
2034 ) {
2035 indent_sizes.insert(
2036 new_row,
2037 (suggested_indent, request.ignore_empty_lines),
2038 );
2039 }
2040 }
2041 }
2042
2043 if let (true, Some(original_indent_column)) =
2044 (request.is_block_mode, original_indent_column)
2045 {
2046 let new_indent =
2047 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2048 *indent
2049 } else {
2050 snapshot.indent_size_for_line(row_range.start)
2051 };
2052 let delta = new_indent.len as i64 - original_indent_column as i64;
2053 if delta != 0 {
2054 for row in row_range.skip(1) {
2055 indent_sizes.entry(row).or_insert_with(|| {
2056 let mut size = snapshot.indent_size_for_line(row);
2057 if size.kind == new_indent.kind {
2058 match delta.cmp(&0) {
2059 Ordering::Greater => size.len += delta as u32,
2060 Ordering::Less => {
2061 size.len = size.len.saturating_sub(-delta as u32)
2062 }
2063 Ordering::Equal => {}
2064 }
2065 }
2066 (size, request.ignore_empty_lines)
2067 });
2068 }
2069 }
2070 }
2071
2072 yield_now().await;
2073 }
2074 }
2075
2076 indent_sizes
2077 .into_iter()
2078 .filter_map(|(row, (indent, ignore_empty_lines))| {
2079 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2080 None
2081 } else {
2082 Some((row, indent))
2083 }
2084 })
2085 .collect()
2086 })
2087 }
2088
2089 fn apply_autoindents(
2090 &mut self,
2091 indent_sizes: BTreeMap<u32, IndentSize>,
2092 cx: &mut Context<Self>,
2093 ) {
2094 self.autoindent_requests.clear();
2095 for tx in self.wait_for_autoindent_txs.drain(..) {
2096 tx.send(()).ok();
2097 }
2098
2099 let edits: Vec<_> = indent_sizes
2100 .into_iter()
2101 .filter_map(|(row, indent_size)| {
2102 let current_size = indent_size_for_line(self, row);
2103 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2104 })
2105 .collect();
2106
2107 let preserve_preview = self.preserve_preview();
2108 self.edit(edits, None, cx);
2109 if preserve_preview {
2110 self.refresh_preview();
2111 }
2112 }
2113
2114 /// Create a minimal edit that will cause the given row to be indented
2115 /// with the given size. After applying this edit, the length of the line
2116 /// will always be at least `new_size.len`.
2117 pub fn edit_for_indent_size_adjustment(
2118 row: u32,
2119 current_size: IndentSize,
2120 new_size: IndentSize,
2121 ) -> Option<(Range<Point>, String)> {
2122 if new_size.kind == current_size.kind {
2123 match new_size.len.cmp(¤t_size.len) {
2124 Ordering::Greater => {
2125 let point = Point::new(row, 0);
2126 Some((
2127 point..point,
2128 iter::repeat(new_size.char())
2129 .take((new_size.len - current_size.len) as usize)
2130 .collect::<String>(),
2131 ))
2132 }
2133
2134 Ordering::Less => Some((
2135 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2136 String::new(),
2137 )),
2138
2139 Ordering::Equal => None,
2140 }
2141 } else {
2142 Some((
2143 Point::new(row, 0)..Point::new(row, current_size.len),
2144 iter::repeat(new_size.char())
2145 .take(new_size.len as usize)
2146 .collect::<String>(),
2147 ))
2148 }
2149 }
2150
2151 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2152 /// and the given new text.
2153 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2154 let old_text = self.as_rope().clone();
2155 let base_version = self.version();
2156 cx.background_executor()
2157 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2158 let old_text = old_text.to_string();
2159 let line_ending = LineEnding::detect(&new_text);
2160 LineEnding::normalize(&mut new_text);
2161 let edits = text_diff(&old_text, &new_text);
2162 Diff {
2163 base_version,
2164 line_ending,
2165 edits,
2166 }
2167 })
2168 }
2169
2170 /// Spawns a background task that searches the buffer for any whitespace
2171 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2172 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2173 let old_text = self.as_rope().clone();
2174 let line_ending = self.line_ending();
2175 let base_version = self.version();
2176 cx.background_spawn(async move {
2177 let ranges = trailing_whitespace_ranges(&old_text);
2178 let empty = Arc::<str>::from("");
2179 Diff {
2180 base_version,
2181 line_ending,
2182 edits: ranges
2183 .into_iter()
2184 .map(|range| (range, empty.clone()))
2185 .collect(),
2186 }
2187 })
2188 }
2189
2190 /// Ensures that the buffer ends with a single newline character, and
2191 /// no other whitespace. Skips if the buffer is empty.
2192 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2193 let len = self.len();
2194 if len == 0 {
2195 return;
2196 }
2197 let mut offset = len;
2198 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2199 let non_whitespace_len = chunk
2200 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2201 .len();
2202 offset -= chunk.len();
2203 offset += non_whitespace_len;
2204 if non_whitespace_len != 0 {
2205 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2206 return;
2207 }
2208 break;
2209 }
2210 }
2211 self.edit([(offset..len, "\n")], None, cx);
2212 }
2213
2214 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2215 /// calculated, then adjust the diff to account for those changes, and discard any
2216 /// parts of the diff that conflict with those changes.
2217 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2218 let snapshot = self.snapshot();
2219 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2220 let mut delta = 0;
2221 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2222 while let Some(edit_since) = edits_since.peek() {
2223 // If the edit occurs after a diff hunk, then it does not
2224 // affect that hunk.
2225 if edit_since.old.start > range.end {
2226 break;
2227 }
2228 // If the edit precedes the diff hunk, then adjust the hunk
2229 // to reflect the edit.
2230 else if edit_since.old.end < range.start {
2231 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2232 edits_since.next();
2233 }
2234 // If the edit intersects a diff hunk, then discard that hunk.
2235 else {
2236 return None;
2237 }
2238 }
2239
2240 let start = (range.start as i64 + delta) as usize;
2241 let end = (range.end as i64 + delta) as usize;
2242 Some((start..end, new_text))
2243 });
2244
2245 self.start_transaction();
2246 self.text.set_line_ending(diff.line_ending);
2247 self.edit(adjusted_edits, None, cx);
2248 self.end_transaction(cx)
2249 }
2250
2251 pub fn has_unsaved_edits(&self) -> bool {
2252 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2253
2254 if last_version == self.version {
2255 self.has_unsaved_edits
2256 .set((last_version, has_unsaved_edits));
2257 return has_unsaved_edits;
2258 }
2259
2260 let has_edits = self.has_edits_since(&self.saved_version);
2261 self.has_unsaved_edits
2262 .set((self.version.clone(), has_edits));
2263 has_edits
2264 }
2265
2266 /// Checks if the buffer has unsaved changes.
2267 pub fn is_dirty(&self) -> bool {
2268 if self.capability == Capability::ReadOnly {
2269 return false;
2270 }
2271 if self.has_conflict {
2272 return true;
2273 }
2274 match self.file.as_ref().map(|f| f.disk_state()) {
2275 Some(DiskState::New) | Some(DiskState::Deleted) => {
2276 !self.is_empty() && self.has_unsaved_edits()
2277 }
2278 _ => self.has_unsaved_edits(),
2279 }
2280 }
2281
2282 /// Marks the buffer as having a conflict regardless of current buffer state.
2283 pub fn set_conflict(&mut self) {
2284 self.has_conflict = true;
2285 }
2286
2287 /// Checks if the buffer and its file have both changed since the buffer
2288 /// was last saved or reloaded.
2289 pub fn has_conflict(&self) -> bool {
2290 if self.has_conflict {
2291 return true;
2292 }
2293 let Some(file) = self.file.as_ref() else {
2294 return false;
2295 };
2296 match file.disk_state() {
2297 DiskState::New => false,
2298 DiskState::Present { mtime } => match self.saved_mtime {
2299 Some(saved_mtime) => {
2300 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2301 }
2302 None => true,
2303 },
2304 DiskState::Deleted => false,
2305 DiskState::Historic { .. } => false,
2306 }
2307 }
2308
2309 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2310 pub fn subscribe(&mut self) -> Subscription<usize> {
2311 self.text.subscribe()
2312 }
2313
2314 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2315 ///
2316 /// This allows downstream code to check if the buffer's text has changed without
2317 /// waiting for an effect cycle, which would be required if using eents.
2318 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2319 if let Err(ix) = self
2320 .change_bits
2321 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2322 {
2323 self.change_bits.insert(ix, bit);
2324 }
2325 }
2326
2327 /// Set the change bit for all "listeners".
2328 fn was_changed(&mut self) {
2329 self.change_bits.retain(|change_bit| {
2330 change_bit
2331 .upgrade()
2332 .inspect(|bit| {
2333 _ = bit.replace(true);
2334 })
2335 .is_some()
2336 });
2337 }
2338
2339 /// Starts a transaction, if one is not already in-progress. When undoing or
2340 /// redoing edits, all of the edits performed within a transaction are undone
2341 /// or redone together.
2342 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2343 self.start_transaction_at(Instant::now())
2344 }
2345
2346 /// Starts a transaction, providing the current time. Subsequent transactions
2347 /// that occur within a short period of time will be grouped together. This
2348 /// is controlled by the buffer's undo grouping duration.
2349 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2350 self.transaction_depth += 1;
2351 if self.was_dirty_before_starting_transaction.is_none() {
2352 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2353 }
2354 self.text.start_transaction_at(now)
2355 }
2356
2357 /// Terminates the current transaction, if this is the outermost transaction.
2358 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2359 self.end_transaction_at(Instant::now(), cx)
2360 }
2361
2362 /// Terminates the current transaction, providing the current time. Subsequent transactions
2363 /// that occur within a short period of time will be grouped together. This
2364 /// is controlled by the buffer's undo grouping duration.
2365 pub fn end_transaction_at(
2366 &mut self,
2367 now: Instant,
2368 cx: &mut Context<Self>,
2369 ) -> Option<TransactionId> {
2370 assert!(self.transaction_depth > 0);
2371 self.transaction_depth -= 1;
2372 let was_dirty = if self.transaction_depth == 0 {
2373 self.was_dirty_before_starting_transaction.take().unwrap()
2374 } else {
2375 false
2376 };
2377 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2378 self.did_edit(&start_version, was_dirty, cx);
2379 Some(transaction_id)
2380 } else {
2381 None
2382 }
2383 }
2384
2385 /// Manually add a transaction to the buffer's undo history.
2386 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2387 self.text.push_transaction(transaction, now);
2388 }
2389
2390 /// Differs from `push_transaction` in that it does not clear the redo
2391 /// stack. Intended to be used to create a parent transaction to merge
2392 /// potential child transactions into.
2393 ///
2394 /// The caller is responsible for removing it from the undo history using
2395 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2396 /// are merged into this transaction, the caller is responsible for ensuring
2397 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2398 /// cleared is to create transactions with the usual `start_transaction` and
2399 /// `end_transaction` methods and merging the resulting transactions into
2400 /// the transaction created by this method
2401 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2402 self.text.push_empty_transaction(now)
2403 }
2404
2405 /// Prevent the last transaction from being grouped with any subsequent transactions,
2406 /// even if they occur with the buffer's undo grouping duration.
2407 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2408 self.text.finalize_last_transaction()
2409 }
2410
2411 /// Manually group all changes since a given transaction.
2412 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2413 self.text.group_until_transaction(transaction_id);
2414 }
2415
2416 /// Manually remove a transaction from the buffer's undo history
2417 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2418 self.text.forget_transaction(transaction_id)
2419 }
2420
2421 /// Retrieve a transaction from the buffer's undo history
2422 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2423 self.text.get_transaction(transaction_id)
2424 }
2425
2426 /// Manually merge two transactions in the buffer's undo history.
2427 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2428 self.text.merge_transactions(transaction, destination);
2429 }
2430
2431 /// Waits for the buffer to receive operations with the given timestamps.
2432 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2433 &mut self,
2434 edit_ids: It,
2435 ) -> impl Future<Output = Result<()>> + use<It> {
2436 self.text.wait_for_edits(edit_ids)
2437 }
2438
2439 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2440 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2441 &mut self,
2442 anchors: It,
2443 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2444 self.text.wait_for_anchors(anchors)
2445 }
2446
2447 /// Waits for the buffer to receive operations up to the given version.
2448 pub fn wait_for_version(
2449 &mut self,
2450 version: clock::Global,
2451 ) -> impl Future<Output = Result<()>> + use<> {
2452 self.text.wait_for_version(version)
2453 }
2454
2455 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2456 /// [`Buffer::wait_for_version`] to resolve with an error.
2457 pub fn give_up_waiting(&mut self) {
2458 self.text.give_up_waiting();
2459 }
2460
2461 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2462 let mut rx = None;
2463 if !self.autoindent_requests.is_empty() {
2464 let channel = oneshot::channel();
2465 self.wait_for_autoindent_txs.push(channel.0);
2466 rx = Some(channel.1);
2467 }
2468 rx
2469 }
2470
2471 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2472 pub fn set_active_selections(
2473 &mut self,
2474 selections: Arc<[Selection<Anchor>]>,
2475 line_mode: bool,
2476 cursor_shape: CursorShape,
2477 cx: &mut Context<Self>,
2478 ) {
2479 let lamport_timestamp = self.text.lamport_clock.tick();
2480 self.remote_selections.insert(
2481 self.text.replica_id(),
2482 SelectionSet {
2483 selections: selections.clone(),
2484 lamport_timestamp,
2485 line_mode,
2486 cursor_shape,
2487 },
2488 );
2489 self.send_operation(
2490 Operation::UpdateSelections {
2491 selections,
2492 line_mode,
2493 lamport_timestamp,
2494 cursor_shape,
2495 },
2496 true,
2497 cx,
2498 );
2499 self.non_text_state_update_count += 1;
2500 cx.notify();
2501 }
2502
2503 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2504 /// this replica.
2505 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2506 if self
2507 .remote_selections
2508 .get(&self.text.replica_id())
2509 .is_none_or(|set| !set.selections.is_empty())
2510 {
2511 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2512 }
2513 }
2514
2515 pub fn set_agent_selections(
2516 &mut self,
2517 selections: Arc<[Selection<Anchor>]>,
2518 line_mode: bool,
2519 cursor_shape: CursorShape,
2520 cx: &mut Context<Self>,
2521 ) {
2522 let lamport_timestamp = self.text.lamport_clock.tick();
2523 self.remote_selections.insert(
2524 ReplicaId::AGENT,
2525 SelectionSet {
2526 selections,
2527 lamport_timestamp,
2528 line_mode,
2529 cursor_shape,
2530 },
2531 );
2532 self.non_text_state_update_count += 1;
2533 cx.notify();
2534 }
2535
2536 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2537 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2538 }
2539
2540 /// Replaces the buffer's entire text.
2541 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2542 where
2543 T: Into<Arc<str>>,
2544 {
2545 self.autoindent_requests.clear();
2546 self.edit([(0..self.len(), text)], None, cx)
2547 }
2548
2549 /// Appends the given text to the end of the buffer.
2550 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2551 where
2552 T: Into<Arc<str>>,
2553 {
2554 self.edit([(self.len()..self.len(), text)], None, cx)
2555 }
2556
2557 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2558 /// delete, and a string of text to insert at that location.
2559 ///
2560 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2561 /// request for the edited ranges, which will be processed when the buffer finishes
2562 /// parsing.
2563 ///
2564 /// Parsing takes place at the end of a transaction, and may compute synchronously
2565 /// or asynchronously, depending on the changes.
2566 pub fn edit<I, S, T>(
2567 &mut self,
2568 edits_iter: I,
2569 autoindent_mode: Option<AutoindentMode>,
2570 cx: &mut Context<Self>,
2571 ) -> Option<clock::Lamport>
2572 where
2573 I: IntoIterator<Item = (Range<S>, T)>,
2574 S: ToOffset,
2575 T: Into<Arc<str>>,
2576 {
2577 // Skip invalid edits and coalesce contiguous ones.
2578 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2579
2580 for (range, new_text) in edits_iter {
2581 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2582
2583 if range.start > range.end {
2584 mem::swap(&mut range.start, &mut range.end);
2585 }
2586 let new_text = new_text.into();
2587 if !new_text.is_empty() || !range.is_empty() {
2588 if let Some((prev_range, prev_text)) = edits.last_mut()
2589 && prev_range.end >= range.start
2590 {
2591 prev_range.end = cmp::max(prev_range.end, range.end);
2592 *prev_text = format!("{prev_text}{new_text}").into();
2593 } else {
2594 edits.push((range, new_text));
2595 }
2596 }
2597 }
2598 if edits.is_empty() {
2599 return None;
2600 }
2601
2602 self.start_transaction();
2603 self.pending_autoindent.take();
2604 let autoindent_request = autoindent_mode
2605 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2606
2607 let edit_operation = self.text.edit(edits.iter().cloned());
2608 let edit_id = edit_operation.timestamp();
2609
2610 if let Some((before_edit, mode)) = autoindent_request {
2611 let mut delta = 0isize;
2612 let mut previous_setting = None;
2613 let entries: Vec<_> = edits
2614 .into_iter()
2615 .enumerate()
2616 .zip(&edit_operation.as_edit().unwrap().new_text)
2617 .filter(|((_, (range, _)), _)| {
2618 let language = before_edit.language_at(range.start);
2619 let language_id = language.map(|l| l.id());
2620 if let Some((cached_language_id, auto_indent)) = previous_setting
2621 && cached_language_id == language_id
2622 {
2623 auto_indent
2624 } else {
2625 // The auto-indent setting is not present in editorconfigs, hence
2626 // we can avoid passing the file here.
2627 let auto_indent =
2628 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2629 previous_setting = Some((language_id, auto_indent));
2630 auto_indent
2631 }
2632 })
2633 .map(|((ix, (range, _)), new_text)| {
2634 let new_text_length = new_text.len();
2635 let old_start = range.start.to_point(&before_edit);
2636 let new_start = (delta + range.start as isize) as usize;
2637 let range_len = range.end - range.start;
2638 delta += new_text_length as isize - range_len as isize;
2639
2640 // Decide what range of the insertion to auto-indent, and whether
2641 // the first line of the insertion should be considered a newly-inserted line
2642 // or an edit to an existing line.
2643 let mut range_of_insertion_to_indent = 0..new_text_length;
2644 let mut first_line_is_new = true;
2645
2646 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2647 let old_line_end = before_edit.line_len(old_start.row);
2648
2649 if old_start.column > old_line_start {
2650 first_line_is_new = false;
2651 }
2652
2653 if !new_text.contains('\n')
2654 && (old_start.column + (range_len as u32) < old_line_end
2655 || old_line_end == old_line_start)
2656 {
2657 first_line_is_new = false;
2658 }
2659
2660 // When inserting text starting with a newline, avoid auto-indenting the
2661 // previous line.
2662 if new_text.starts_with('\n') {
2663 range_of_insertion_to_indent.start += 1;
2664 first_line_is_new = true;
2665 }
2666
2667 let mut original_indent_column = None;
2668 if let AutoindentMode::Block {
2669 original_indent_columns,
2670 } = &mode
2671 {
2672 original_indent_column = Some(if new_text.starts_with('\n') {
2673 indent_size_for_text(
2674 new_text[range_of_insertion_to_indent.clone()].chars(),
2675 )
2676 .len
2677 } else {
2678 original_indent_columns
2679 .get(ix)
2680 .copied()
2681 .flatten()
2682 .unwrap_or_else(|| {
2683 indent_size_for_text(
2684 new_text[range_of_insertion_to_indent.clone()].chars(),
2685 )
2686 .len
2687 })
2688 });
2689
2690 // Avoid auto-indenting the line after the edit.
2691 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2692 range_of_insertion_to_indent.end -= 1;
2693 }
2694 }
2695
2696 AutoindentRequestEntry {
2697 first_line_is_new,
2698 original_indent_column,
2699 indent_size: before_edit.language_indent_size_at(range.start, cx),
2700 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2701 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2702 }
2703 })
2704 .collect();
2705
2706 if !entries.is_empty() {
2707 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2708 before_edit,
2709 entries,
2710 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2711 ignore_empty_lines: false,
2712 }));
2713 }
2714 }
2715
2716 self.end_transaction(cx);
2717 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2718 Some(edit_id)
2719 }
2720
2721 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2722 self.was_changed();
2723
2724 if self.edits_since::<usize>(old_version).next().is_none() {
2725 return;
2726 }
2727
2728 self.reparse(cx, true);
2729 cx.emit(BufferEvent::Edited);
2730 if was_dirty != self.is_dirty() {
2731 cx.emit(BufferEvent::DirtyChanged);
2732 }
2733 cx.notify();
2734 }
2735
2736 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2737 where
2738 I: IntoIterator<Item = Range<T>>,
2739 T: ToOffset + Copy,
2740 {
2741 let before_edit = self.snapshot();
2742 let entries = ranges
2743 .into_iter()
2744 .map(|range| AutoindentRequestEntry {
2745 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2746 first_line_is_new: true,
2747 indent_size: before_edit.language_indent_size_at(range.start, cx),
2748 original_indent_column: None,
2749 })
2750 .collect();
2751 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2752 before_edit,
2753 entries,
2754 is_block_mode: false,
2755 ignore_empty_lines: true,
2756 }));
2757 self.request_autoindent(cx);
2758 }
2759
2760 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2761 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2762 pub fn insert_empty_line(
2763 &mut self,
2764 position: impl ToPoint,
2765 space_above: bool,
2766 space_below: bool,
2767 cx: &mut Context<Self>,
2768 ) -> Point {
2769 let mut position = position.to_point(self);
2770
2771 self.start_transaction();
2772
2773 self.edit(
2774 [(position..position, "\n")],
2775 Some(AutoindentMode::EachLine),
2776 cx,
2777 );
2778
2779 if position.column > 0 {
2780 position += Point::new(1, 0);
2781 }
2782
2783 if !self.is_line_blank(position.row) {
2784 self.edit(
2785 [(position..position, "\n")],
2786 Some(AutoindentMode::EachLine),
2787 cx,
2788 );
2789 }
2790
2791 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2792 self.edit(
2793 [(position..position, "\n")],
2794 Some(AutoindentMode::EachLine),
2795 cx,
2796 );
2797 position.row += 1;
2798 }
2799
2800 if space_below
2801 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2802 {
2803 self.edit(
2804 [(position..position, "\n")],
2805 Some(AutoindentMode::EachLine),
2806 cx,
2807 );
2808 }
2809
2810 self.end_transaction(cx);
2811
2812 position
2813 }
2814
2815 /// Applies the given remote operations to the buffer.
2816 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2817 self.pending_autoindent.take();
2818 let was_dirty = self.is_dirty();
2819 let old_version = self.version.clone();
2820 let mut deferred_ops = Vec::new();
2821 let buffer_ops = ops
2822 .into_iter()
2823 .filter_map(|op| match op {
2824 Operation::Buffer(op) => Some(op),
2825 _ => {
2826 if self.can_apply_op(&op) {
2827 self.apply_op(op, cx);
2828 } else {
2829 deferred_ops.push(op);
2830 }
2831 None
2832 }
2833 })
2834 .collect::<Vec<_>>();
2835 for operation in buffer_ops.iter() {
2836 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2837 }
2838 self.text.apply_ops(buffer_ops);
2839 self.deferred_ops.insert(deferred_ops);
2840 self.flush_deferred_ops(cx);
2841 self.did_edit(&old_version, was_dirty, cx);
2842 // Notify independently of whether the buffer was edited as the operations could include a
2843 // selection update.
2844 cx.notify();
2845 }
2846
2847 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2848 let mut deferred_ops = Vec::new();
2849 for op in self.deferred_ops.drain().iter().cloned() {
2850 if self.can_apply_op(&op) {
2851 self.apply_op(op, cx);
2852 } else {
2853 deferred_ops.push(op);
2854 }
2855 }
2856 self.deferred_ops.insert(deferred_ops);
2857 }
2858
2859 pub fn has_deferred_ops(&self) -> bool {
2860 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2861 }
2862
2863 fn can_apply_op(&self, operation: &Operation) -> bool {
2864 match operation {
2865 Operation::Buffer(_) => {
2866 unreachable!("buffer operations should never be applied at this layer")
2867 }
2868 Operation::UpdateDiagnostics {
2869 diagnostics: diagnostic_set,
2870 ..
2871 } => diagnostic_set.iter().all(|diagnostic| {
2872 self.text.can_resolve(&diagnostic.range.start)
2873 && self.text.can_resolve(&diagnostic.range.end)
2874 }),
2875 Operation::UpdateSelections { selections, .. } => selections
2876 .iter()
2877 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2878 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2879 }
2880 }
2881
2882 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2883 match operation {
2884 Operation::Buffer(_) => {
2885 unreachable!("buffer operations should never be applied at this layer")
2886 }
2887 Operation::UpdateDiagnostics {
2888 server_id,
2889 diagnostics: diagnostic_set,
2890 lamport_timestamp,
2891 } => {
2892 let snapshot = self.snapshot();
2893 self.apply_diagnostic_update(
2894 server_id,
2895 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2896 lamport_timestamp,
2897 cx,
2898 );
2899 }
2900 Operation::UpdateSelections {
2901 selections,
2902 lamport_timestamp,
2903 line_mode,
2904 cursor_shape,
2905 } => {
2906 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2907 && set.lamport_timestamp > lamport_timestamp
2908 {
2909 return;
2910 }
2911
2912 self.remote_selections.insert(
2913 lamport_timestamp.replica_id,
2914 SelectionSet {
2915 selections,
2916 lamport_timestamp,
2917 line_mode,
2918 cursor_shape,
2919 },
2920 );
2921 self.text.lamport_clock.observe(lamport_timestamp);
2922 self.non_text_state_update_count += 1;
2923 }
2924 Operation::UpdateCompletionTriggers {
2925 triggers,
2926 lamport_timestamp,
2927 server_id,
2928 } => {
2929 if triggers.is_empty() {
2930 self.completion_triggers_per_language_server
2931 .remove(&server_id);
2932 self.completion_triggers = self
2933 .completion_triggers_per_language_server
2934 .values()
2935 .flat_map(|triggers| triggers.iter().cloned())
2936 .collect();
2937 } else {
2938 self.completion_triggers_per_language_server
2939 .insert(server_id, triggers.iter().cloned().collect());
2940 self.completion_triggers.extend(triggers);
2941 }
2942 self.text.lamport_clock.observe(lamport_timestamp);
2943 }
2944 Operation::UpdateLineEnding {
2945 line_ending,
2946 lamport_timestamp,
2947 } => {
2948 self.text.set_line_ending(line_ending);
2949 self.text.lamport_clock.observe(lamport_timestamp);
2950 }
2951 }
2952 }
2953
2954 fn apply_diagnostic_update(
2955 &mut self,
2956 server_id: LanguageServerId,
2957 diagnostics: DiagnosticSet,
2958 lamport_timestamp: clock::Lamport,
2959 cx: &mut Context<Self>,
2960 ) {
2961 if lamport_timestamp > self.diagnostics_timestamp {
2962 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2963 if diagnostics.is_empty() {
2964 if let Ok(ix) = ix {
2965 self.diagnostics.remove(ix);
2966 }
2967 } else {
2968 match ix {
2969 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2970 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2971 };
2972 }
2973 self.diagnostics_timestamp = lamport_timestamp;
2974 self.non_text_state_update_count += 1;
2975 self.text.lamport_clock.observe(lamport_timestamp);
2976 cx.notify();
2977 cx.emit(BufferEvent::DiagnosticsUpdated);
2978 }
2979 }
2980
2981 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2982 self.was_changed();
2983 cx.emit(BufferEvent::Operation {
2984 operation,
2985 is_local,
2986 });
2987 }
2988
2989 /// Removes the selections for a given peer.
2990 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2991 self.remote_selections.remove(&replica_id);
2992 cx.notify();
2993 }
2994
2995 /// Undoes the most recent transaction.
2996 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2997 let was_dirty = self.is_dirty();
2998 let old_version = self.version.clone();
2999
3000 if let Some((transaction_id, operation)) = self.text.undo() {
3001 self.send_operation(Operation::Buffer(operation), true, cx);
3002 self.did_edit(&old_version, was_dirty, cx);
3003 Some(transaction_id)
3004 } else {
3005 None
3006 }
3007 }
3008
3009 /// Manually undoes a specific transaction in the buffer's undo history.
3010 pub fn undo_transaction(
3011 &mut self,
3012 transaction_id: TransactionId,
3013 cx: &mut Context<Self>,
3014 ) -> bool {
3015 let was_dirty = self.is_dirty();
3016 let old_version = self.version.clone();
3017 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3018 self.send_operation(Operation::Buffer(operation), true, cx);
3019 self.did_edit(&old_version, was_dirty, cx);
3020 true
3021 } else {
3022 false
3023 }
3024 }
3025
3026 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3027 pub fn undo_to_transaction(
3028 &mut self,
3029 transaction_id: TransactionId,
3030 cx: &mut Context<Self>,
3031 ) -> bool {
3032 let was_dirty = self.is_dirty();
3033 let old_version = self.version.clone();
3034
3035 let operations = self.text.undo_to_transaction(transaction_id);
3036 let undone = !operations.is_empty();
3037 for operation in operations {
3038 self.send_operation(Operation::Buffer(operation), true, cx);
3039 }
3040 if undone {
3041 self.did_edit(&old_version, was_dirty, cx)
3042 }
3043 undone
3044 }
3045
3046 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3047 let was_dirty = self.is_dirty();
3048 let operation = self.text.undo_operations(counts);
3049 let old_version = self.version.clone();
3050 self.send_operation(Operation::Buffer(operation), true, cx);
3051 self.did_edit(&old_version, was_dirty, cx);
3052 }
3053
3054 /// Manually redoes a specific transaction in the buffer's redo history.
3055 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3056 let was_dirty = self.is_dirty();
3057 let old_version = self.version.clone();
3058
3059 if let Some((transaction_id, operation)) = self.text.redo() {
3060 self.send_operation(Operation::Buffer(operation), true, cx);
3061 self.did_edit(&old_version, was_dirty, cx);
3062 Some(transaction_id)
3063 } else {
3064 None
3065 }
3066 }
3067
3068 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3069 pub fn redo_to_transaction(
3070 &mut self,
3071 transaction_id: TransactionId,
3072 cx: &mut Context<Self>,
3073 ) -> bool {
3074 let was_dirty = self.is_dirty();
3075 let old_version = self.version.clone();
3076
3077 let operations = self.text.redo_to_transaction(transaction_id);
3078 let redone = !operations.is_empty();
3079 for operation in operations {
3080 self.send_operation(Operation::Buffer(operation), true, cx);
3081 }
3082 if redone {
3083 self.did_edit(&old_version, was_dirty, cx)
3084 }
3085 redone
3086 }
3087
3088 /// Override current completion triggers with the user-provided completion triggers.
3089 pub fn set_completion_triggers(
3090 &mut self,
3091 server_id: LanguageServerId,
3092 triggers: BTreeSet<String>,
3093 cx: &mut Context<Self>,
3094 ) {
3095 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3096 if triggers.is_empty() {
3097 self.completion_triggers_per_language_server
3098 .remove(&server_id);
3099 self.completion_triggers = self
3100 .completion_triggers_per_language_server
3101 .values()
3102 .flat_map(|triggers| triggers.iter().cloned())
3103 .collect();
3104 } else {
3105 self.completion_triggers_per_language_server
3106 .insert(server_id, triggers.clone());
3107 self.completion_triggers.extend(triggers.iter().cloned());
3108 }
3109 self.send_operation(
3110 Operation::UpdateCompletionTriggers {
3111 triggers: triggers.into_iter().collect(),
3112 lamport_timestamp: self.completion_triggers_timestamp,
3113 server_id,
3114 },
3115 true,
3116 cx,
3117 );
3118 cx.notify();
3119 }
3120
3121 /// Returns a list of strings which trigger a completion menu for this language.
3122 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3123 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3124 &self.completion_triggers
3125 }
3126
3127 /// Call this directly after performing edits to prevent the preview tab
3128 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3129 /// to return false until there are additional edits.
3130 pub fn refresh_preview(&mut self) {
3131 self.preview_version = self.version.clone();
3132 }
3133
3134 /// Whether we should preserve the preview status of a tab containing this buffer.
3135 pub fn preserve_preview(&self) -> bool {
3136 !self.has_edits_since(&self.preview_version)
3137 }
3138}
3139
3140#[doc(hidden)]
3141#[cfg(any(test, feature = "test-support"))]
3142impl Buffer {
3143 pub fn edit_via_marked_text(
3144 &mut self,
3145 marked_string: &str,
3146 autoindent_mode: Option<AutoindentMode>,
3147 cx: &mut Context<Self>,
3148 ) {
3149 let edits = self.edits_for_marked_text(marked_string);
3150 self.edit(edits, autoindent_mode, cx);
3151 }
3152
3153 pub fn set_group_interval(&mut self, group_interval: Duration) {
3154 self.text.set_group_interval(group_interval);
3155 }
3156
3157 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3158 where
3159 T: rand::Rng,
3160 {
3161 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3162 let mut last_end = None;
3163 for _ in 0..old_range_count {
3164 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3165 break;
3166 }
3167
3168 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3169 let mut range = self.random_byte_range(new_start, rng);
3170 if rng.random_bool(0.2) {
3171 mem::swap(&mut range.start, &mut range.end);
3172 }
3173 last_end = Some(range.end);
3174
3175 let new_text_len = rng.random_range(0..10);
3176 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3177 new_text = new_text.to_uppercase();
3178
3179 edits.push((range, new_text));
3180 }
3181 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3182 self.edit(edits, None, cx);
3183 }
3184
3185 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3186 let was_dirty = self.is_dirty();
3187 let old_version = self.version.clone();
3188
3189 let ops = self.text.randomly_undo_redo(rng);
3190 if !ops.is_empty() {
3191 for op in ops {
3192 self.send_operation(Operation::Buffer(op), true, cx);
3193 self.did_edit(&old_version, was_dirty, cx);
3194 }
3195 }
3196 }
3197}
3198
3199impl EventEmitter<BufferEvent> for Buffer {}
3200
3201impl Deref for Buffer {
3202 type Target = TextBuffer;
3203
3204 fn deref(&self) -> &Self::Target {
3205 &self.text
3206 }
3207}
3208
3209impl BufferSnapshot {
3210 /// Returns [`IndentSize`] for a given line that respects user settings and
3211 /// language preferences.
3212 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3213 indent_size_for_line(self, row)
3214 }
3215
3216 /// Returns [`IndentSize`] for a given position that respects user settings
3217 /// and language preferences.
3218 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3219 let settings = language_settings(
3220 self.language_at(position).map(|l| l.name()),
3221 self.file(),
3222 cx,
3223 );
3224 if settings.hard_tabs {
3225 IndentSize::tab()
3226 } else {
3227 IndentSize::spaces(settings.tab_size.get())
3228 }
3229 }
3230
3231 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3232 /// is passed in as `single_indent_size`.
3233 pub fn suggested_indents(
3234 &self,
3235 rows: impl Iterator<Item = u32>,
3236 single_indent_size: IndentSize,
3237 ) -> BTreeMap<u32, IndentSize> {
3238 let mut result = BTreeMap::new();
3239
3240 for row_range in contiguous_ranges(rows, 10) {
3241 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3242 Some(suggestions) => suggestions,
3243 _ => break,
3244 };
3245
3246 for (row, suggestion) in row_range.zip(suggestions) {
3247 let indent_size = if let Some(suggestion) = suggestion {
3248 result
3249 .get(&suggestion.basis_row)
3250 .copied()
3251 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3252 .with_delta(suggestion.delta, single_indent_size)
3253 } else {
3254 self.indent_size_for_line(row)
3255 };
3256
3257 result.insert(row, indent_size);
3258 }
3259 }
3260
3261 result
3262 }
3263
3264 fn suggest_autoindents(
3265 &self,
3266 row_range: Range<u32>,
3267 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3268 let config = &self.language.as_ref()?.config;
3269 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3270
3271 #[derive(Debug, Clone)]
3272 struct StartPosition {
3273 start: Point,
3274 suffix: SharedString,
3275 language: Arc<Language>,
3276 }
3277
3278 // Find the suggested indentation ranges based on the syntax tree.
3279 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3280 let end = Point::new(row_range.end, 0);
3281 let range = (start..end).to_offset(&self.text);
3282 let mut matches = self.syntax.matches_with_options(
3283 range.clone(),
3284 &self.text,
3285 TreeSitterOptions {
3286 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3287 max_start_depth: None,
3288 },
3289 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3290 );
3291 let indent_configs = matches
3292 .grammars()
3293 .iter()
3294 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3295 .collect::<Vec<_>>();
3296
3297 let mut indent_ranges = Vec::<Range<Point>>::new();
3298 let mut start_positions = Vec::<StartPosition>::new();
3299 let mut outdent_positions = Vec::<Point>::new();
3300 while let Some(mat) = matches.peek() {
3301 let mut start: Option<Point> = None;
3302 let mut end: Option<Point> = None;
3303
3304 let config = indent_configs[mat.grammar_index];
3305 for capture in mat.captures {
3306 if capture.index == config.indent_capture_ix {
3307 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3308 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3309 } else if Some(capture.index) == config.start_capture_ix {
3310 start = Some(Point::from_ts_point(capture.node.end_position()));
3311 } else if Some(capture.index) == config.end_capture_ix {
3312 end = Some(Point::from_ts_point(capture.node.start_position()));
3313 } else if Some(capture.index) == config.outdent_capture_ix {
3314 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3315 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3316 start_positions.push(StartPosition {
3317 start: Point::from_ts_point(capture.node.start_position()),
3318 suffix: suffix.clone(),
3319 language: mat.language.clone(),
3320 });
3321 }
3322 }
3323
3324 matches.advance();
3325 if let Some((start, end)) = start.zip(end) {
3326 if start.row == end.row {
3327 continue;
3328 }
3329 let range = start..end;
3330 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3331 Err(ix) => indent_ranges.insert(ix, range),
3332 Ok(ix) => {
3333 let prev_range = &mut indent_ranges[ix];
3334 prev_range.end = prev_range.end.max(range.end);
3335 }
3336 }
3337 }
3338 }
3339
3340 let mut error_ranges = Vec::<Range<Point>>::new();
3341 let mut matches = self
3342 .syntax
3343 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3344 while let Some(mat) = matches.peek() {
3345 let node = mat.captures[0].node;
3346 let start = Point::from_ts_point(node.start_position());
3347 let end = Point::from_ts_point(node.end_position());
3348 let range = start..end;
3349 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3350 Ok(ix) | Err(ix) => ix,
3351 };
3352 let mut end_ix = ix;
3353 while let Some(existing_range) = error_ranges.get(end_ix) {
3354 if existing_range.end < end {
3355 end_ix += 1;
3356 } else {
3357 break;
3358 }
3359 }
3360 error_ranges.splice(ix..end_ix, [range]);
3361 matches.advance();
3362 }
3363
3364 outdent_positions.sort();
3365 for outdent_position in outdent_positions {
3366 // find the innermost indent range containing this outdent_position
3367 // set its end to the outdent position
3368 if let Some(range_to_truncate) = indent_ranges
3369 .iter_mut()
3370 .rfind(|indent_range| indent_range.contains(&outdent_position))
3371 {
3372 range_to_truncate.end = outdent_position;
3373 }
3374 }
3375
3376 start_positions.sort_by_key(|b| b.start);
3377
3378 // Find the suggested indentation increases and decreased based on regexes.
3379 let mut regex_outdent_map = HashMap::default();
3380 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3381 let mut start_positions_iter = start_positions.iter().peekable();
3382
3383 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3384 self.for_each_line(
3385 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3386 ..Point::new(row_range.end, 0),
3387 |row, line| {
3388 let indent_len = self.indent_size_for_line(row).len;
3389 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3390 let row_language_config = row_language
3391 .as_ref()
3392 .map(|lang| lang.config())
3393 .unwrap_or(config);
3394
3395 if row_language_config
3396 .decrease_indent_pattern
3397 .as_ref()
3398 .is_some_and(|regex| regex.is_match(line))
3399 {
3400 indent_change_rows.push((row, Ordering::Less));
3401 }
3402 if row_language_config
3403 .increase_indent_pattern
3404 .as_ref()
3405 .is_some_and(|regex| regex.is_match(line))
3406 {
3407 indent_change_rows.push((row + 1, Ordering::Greater));
3408 }
3409 while let Some(pos) = start_positions_iter.peek() {
3410 if pos.start.row < row {
3411 let pos = start_positions_iter.next().unwrap().clone();
3412 last_seen_suffix
3413 .entry(pos.suffix.to_string())
3414 .or_default()
3415 .push(pos);
3416 } else {
3417 break;
3418 }
3419 }
3420 for rule in &row_language_config.decrease_indent_patterns {
3421 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3422 let row_start_column = self.indent_size_for_line(row).len;
3423 let basis_row = rule
3424 .valid_after
3425 .iter()
3426 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3427 .flatten()
3428 .filter(|pos| {
3429 row_language
3430 .as_ref()
3431 .or(self.language.as_ref())
3432 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3433 })
3434 .filter(|pos| pos.start.column <= row_start_column)
3435 .max_by_key(|pos| pos.start.row);
3436 if let Some(outdent_to) = basis_row {
3437 regex_outdent_map.insert(row, outdent_to.start.row);
3438 }
3439 break;
3440 }
3441 }
3442 },
3443 );
3444
3445 let mut indent_changes = indent_change_rows.into_iter().peekable();
3446 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3447 prev_non_blank_row.unwrap_or(0)
3448 } else {
3449 row_range.start.saturating_sub(1)
3450 };
3451
3452 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3453 Some(row_range.map(move |row| {
3454 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3455
3456 let mut indent_from_prev_row = false;
3457 let mut outdent_from_prev_row = false;
3458 let mut outdent_to_row = u32::MAX;
3459 let mut from_regex = false;
3460
3461 while let Some((indent_row, delta)) = indent_changes.peek() {
3462 match indent_row.cmp(&row) {
3463 Ordering::Equal => match delta {
3464 Ordering::Less => {
3465 from_regex = true;
3466 outdent_from_prev_row = true
3467 }
3468 Ordering::Greater => {
3469 indent_from_prev_row = true;
3470 from_regex = true
3471 }
3472 _ => {}
3473 },
3474
3475 Ordering::Greater => break,
3476 Ordering::Less => {}
3477 }
3478
3479 indent_changes.next();
3480 }
3481
3482 for range in &indent_ranges {
3483 if range.start.row >= row {
3484 break;
3485 }
3486 if range.start.row == prev_row && range.end > row_start {
3487 indent_from_prev_row = true;
3488 }
3489 if range.end > prev_row_start && range.end <= row_start {
3490 outdent_to_row = outdent_to_row.min(range.start.row);
3491 }
3492 }
3493
3494 if let Some(basis_row) = regex_outdent_map.get(&row) {
3495 indent_from_prev_row = false;
3496 outdent_to_row = *basis_row;
3497 from_regex = true;
3498 }
3499
3500 let within_error = error_ranges
3501 .iter()
3502 .any(|e| e.start.row < row && e.end > row_start);
3503
3504 let suggestion = if outdent_to_row == prev_row
3505 || (outdent_from_prev_row && indent_from_prev_row)
3506 {
3507 Some(IndentSuggestion {
3508 basis_row: prev_row,
3509 delta: Ordering::Equal,
3510 within_error: within_error && !from_regex,
3511 })
3512 } else if indent_from_prev_row {
3513 Some(IndentSuggestion {
3514 basis_row: prev_row,
3515 delta: Ordering::Greater,
3516 within_error: within_error && !from_regex,
3517 })
3518 } else if outdent_to_row < prev_row {
3519 Some(IndentSuggestion {
3520 basis_row: outdent_to_row,
3521 delta: Ordering::Equal,
3522 within_error: within_error && !from_regex,
3523 })
3524 } else if outdent_from_prev_row {
3525 Some(IndentSuggestion {
3526 basis_row: prev_row,
3527 delta: Ordering::Less,
3528 within_error: within_error && !from_regex,
3529 })
3530 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3531 {
3532 Some(IndentSuggestion {
3533 basis_row: prev_row,
3534 delta: Ordering::Equal,
3535 within_error: within_error && !from_regex,
3536 })
3537 } else {
3538 None
3539 };
3540
3541 prev_row = row;
3542 prev_row_start = row_start;
3543 suggestion
3544 }))
3545 }
3546
3547 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3548 while row > 0 {
3549 row -= 1;
3550 if !self.is_line_blank(row) {
3551 return Some(row);
3552 }
3553 }
3554 None
3555 }
3556
3557 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3558 let captures = self.syntax.captures(range, &self.text, |grammar| {
3559 grammar
3560 .highlights_config
3561 .as_ref()
3562 .map(|config| &config.query)
3563 });
3564 let highlight_maps = captures
3565 .grammars()
3566 .iter()
3567 .map(|grammar| grammar.highlight_map())
3568 .collect();
3569 (captures, highlight_maps)
3570 }
3571
3572 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3573 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3574 /// returned in chunks where each chunk has a single syntax highlighting style and
3575 /// diagnostic status.
3576 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3577 let range = range.start.to_offset(self)..range.end.to_offset(self);
3578
3579 let mut syntax = None;
3580 if language_aware {
3581 syntax = Some(self.get_highlights(range.clone()));
3582 }
3583 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3584 let diagnostics = language_aware;
3585 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3586 }
3587
3588 pub fn highlighted_text_for_range<T: ToOffset>(
3589 &self,
3590 range: Range<T>,
3591 override_style: Option<HighlightStyle>,
3592 syntax_theme: &SyntaxTheme,
3593 ) -> HighlightedText {
3594 HighlightedText::from_buffer_range(
3595 range,
3596 &self.text,
3597 &self.syntax,
3598 override_style,
3599 syntax_theme,
3600 )
3601 }
3602
3603 /// Invokes the given callback for each line of text in the given range of the buffer.
3604 /// Uses callback to avoid allocating a string for each line.
3605 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3606 let mut line = String::new();
3607 let mut row = range.start.row;
3608 for chunk in self
3609 .as_rope()
3610 .chunks_in_range(range.to_offset(self))
3611 .chain(["\n"])
3612 {
3613 for (newline_ix, text) in chunk.split('\n').enumerate() {
3614 if newline_ix > 0 {
3615 callback(row, &line);
3616 row += 1;
3617 line.clear();
3618 }
3619 line.push_str(text);
3620 }
3621 }
3622 }
3623
3624 /// Iterates over every [`SyntaxLayer`] in the buffer.
3625 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3626 self.syntax_layers_for_range(0..self.len(), true)
3627 }
3628
3629 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3630 let offset = position.to_offset(self);
3631 self.syntax_layers_for_range(offset..offset, false)
3632 .filter(|l| {
3633 if let Some(ranges) = l.included_sub_ranges {
3634 ranges.iter().any(|range| {
3635 let start = range.start.to_offset(self);
3636 start <= offset && {
3637 let end = range.end.to_offset(self);
3638 offset < end
3639 }
3640 })
3641 } else {
3642 l.node().start_byte() <= offset && l.node().end_byte() > offset
3643 }
3644 })
3645 .last()
3646 }
3647
3648 pub fn syntax_layers_for_range<D: ToOffset>(
3649 &self,
3650 range: Range<D>,
3651 include_hidden: bool,
3652 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3653 self.syntax
3654 .layers_for_range(range, &self.text, include_hidden)
3655 }
3656
3657 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3658 &self,
3659 range: Range<D>,
3660 ) -> Option<SyntaxLayer<'_>> {
3661 let range = range.to_offset(self);
3662 self.syntax
3663 .layers_for_range(range, &self.text, false)
3664 .max_by(|a, b| {
3665 if a.depth != b.depth {
3666 a.depth.cmp(&b.depth)
3667 } else if a.offset.0 != b.offset.0 {
3668 a.offset.0.cmp(&b.offset.0)
3669 } else {
3670 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3671 }
3672 })
3673 }
3674
3675 /// Returns the main [`Language`].
3676 pub fn language(&self) -> Option<&Arc<Language>> {
3677 self.language.as_ref()
3678 }
3679
3680 /// Returns the [`Language`] at the given location.
3681 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3682 self.syntax_layer_at(position)
3683 .map(|info| info.language)
3684 .or(self.language.as_ref())
3685 }
3686
3687 /// Returns the settings for the language at the given location.
3688 pub fn settings_at<'a, D: ToOffset>(
3689 &'a self,
3690 position: D,
3691 cx: &'a App,
3692 ) -> Cow<'a, LanguageSettings> {
3693 language_settings(
3694 self.language_at(position).map(|l| l.name()),
3695 self.file.as_ref(),
3696 cx,
3697 )
3698 }
3699
3700 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3701 CharClassifier::new(self.language_scope_at(point))
3702 }
3703
3704 /// Returns the [`LanguageScope`] at the given location.
3705 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3706 let offset = position.to_offset(self);
3707 let mut scope = None;
3708 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3709
3710 // Use the layer that has the smallest node intersecting the given point.
3711 for layer in self
3712 .syntax
3713 .layers_for_range(offset..offset, &self.text, false)
3714 {
3715 let mut cursor = layer.node().walk();
3716
3717 let mut range = None;
3718 loop {
3719 let child_range = cursor.node().byte_range();
3720 if !child_range.contains(&offset) {
3721 break;
3722 }
3723
3724 range = Some(child_range);
3725 if cursor.goto_first_child_for_byte(offset).is_none() {
3726 break;
3727 }
3728 }
3729
3730 if let Some(range) = range
3731 && smallest_range_and_depth.as_ref().is_none_or(
3732 |(smallest_range, smallest_range_depth)| {
3733 if layer.depth > *smallest_range_depth {
3734 true
3735 } else if layer.depth == *smallest_range_depth {
3736 range.len() < smallest_range.len()
3737 } else {
3738 false
3739 }
3740 },
3741 )
3742 {
3743 smallest_range_and_depth = Some((range, layer.depth));
3744 scope = Some(LanguageScope {
3745 language: layer.language.clone(),
3746 override_id: layer.override_id(offset, &self.text),
3747 });
3748 }
3749 }
3750
3751 scope.or_else(|| {
3752 self.language.clone().map(|language| LanguageScope {
3753 language,
3754 override_id: None,
3755 })
3756 })
3757 }
3758
3759 /// Returns a tuple of the range and character kind of the word
3760 /// surrounding the given position.
3761 pub fn surrounding_word<T: ToOffset>(
3762 &self,
3763 start: T,
3764 scope_context: Option<CharScopeContext>,
3765 ) -> (Range<usize>, Option<CharKind>) {
3766 let mut start = start.to_offset(self);
3767 let mut end = start;
3768 let mut next_chars = self.chars_at(start).take(128).peekable();
3769 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3770
3771 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3772 let word_kind = cmp::max(
3773 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3774 next_chars.peek().copied().map(|c| classifier.kind(c)),
3775 );
3776
3777 for ch in prev_chars {
3778 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3779 start -= ch.len_utf8();
3780 } else {
3781 break;
3782 }
3783 }
3784
3785 for ch in next_chars {
3786 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3787 end += ch.len_utf8();
3788 } else {
3789 break;
3790 }
3791 }
3792
3793 (start..end, word_kind)
3794 }
3795
3796 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3797 /// range. When `require_larger` is true, the node found must be larger than the query range.
3798 ///
3799 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3800 /// be moved to the root of the tree.
3801 fn goto_node_enclosing_range(
3802 cursor: &mut tree_sitter::TreeCursor,
3803 query_range: &Range<usize>,
3804 require_larger: bool,
3805 ) -> bool {
3806 let mut ascending = false;
3807 loop {
3808 let mut range = cursor.node().byte_range();
3809 if query_range.is_empty() {
3810 // When the query range is empty and the current node starts after it, move to the
3811 // previous sibling to find the node the containing node.
3812 if range.start > query_range.start {
3813 cursor.goto_previous_sibling();
3814 range = cursor.node().byte_range();
3815 }
3816 } else {
3817 // When the query range is non-empty and the current node ends exactly at the start,
3818 // move to the next sibling to find a node that extends beyond the start.
3819 if range.end == query_range.start {
3820 cursor.goto_next_sibling();
3821 range = cursor.node().byte_range();
3822 }
3823 }
3824
3825 let encloses = range.contains_inclusive(query_range)
3826 && (!require_larger || range.len() > query_range.len());
3827 if !encloses {
3828 ascending = true;
3829 if !cursor.goto_parent() {
3830 return false;
3831 }
3832 continue;
3833 } else if ascending {
3834 return true;
3835 }
3836
3837 // Descend into the current node.
3838 if cursor
3839 .goto_first_child_for_byte(query_range.start)
3840 .is_none()
3841 {
3842 return true;
3843 }
3844 }
3845 }
3846
3847 pub fn syntax_ancestor<'a, T: ToOffset>(
3848 &'a self,
3849 range: Range<T>,
3850 ) -> Option<tree_sitter::Node<'a>> {
3851 let range = range.start.to_offset(self)..range.end.to_offset(self);
3852 let mut result: Option<tree_sitter::Node<'a>> = None;
3853 for layer in self
3854 .syntax
3855 .layers_for_range(range.clone(), &self.text, true)
3856 {
3857 let mut cursor = layer.node().walk();
3858
3859 // Find the node that both contains the range and is larger than it.
3860 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3861 continue;
3862 }
3863
3864 let left_node = cursor.node();
3865 let mut layer_result = left_node;
3866
3867 // For an empty range, try to find another node immediately to the right of the range.
3868 if left_node.end_byte() == range.start {
3869 let mut right_node = None;
3870 while !cursor.goto_next_sibling() {
3871 if !cursor.goto_parent() {
3872 break;
3873 }
3874 }
3875
3876 while cursor.node().start_byte() == range.start {
3877 right_node = Some(cursor.node());
3878 if !cursor.goto_first_child() {
3879 break;
3880 }
3881 }
3882
3883 // If there is a candidate node on both sides of the (empty) range, then
3884 // decide between the two by favoring a named node over an anonymous token.
3885 // If both nodes are the same in that regard, favor the right one.
3886 if let Some(right_node) = right_node
3887 && (right_node.is_named() || !left_node.is_named())
3888 {
3889 layer_result = right_node;
3890 }
3891 }
3892
3893 if let Some(previous_result) = &result
3894 && previous_result.byte_range().len() < layer_result.byte_range().len()
3895 {
3896 continue;
3897 }
3898 result = Some(layer_result);
3899 }
3900
3901 result
3902 }
3903
3904 /// Find the previous sibling syntax node at the given range.
3905 ///
3906 /// This function locates the syntax node that precedes the node containing
3907 /// the given range. It searches hierarchically by:
3908 /// 1. Finding the node that contains the given range
3909 /// 2. Looking for the previous sibling at the same tree level
3910 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3911 ///
3912 /// Returns `None` if there is no previous sibling at any ancestor level.
3913 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3914 &'a self,
3915 range: Range<T>,
3916 ) -> Option<tree_sitter::Node<'a>> {
3917 let range = range.start.to_offset(self)..range.end.to_offset(self);
3918 let mut result: Option<tree_sitter::Node<'a>> = None;
3919
3920 for layer in self
3921 .syntax
3922 .layers_for_range(range.clone(), &self.text, true)
3923 {
3924 let mut cursor = layer.node().walk();
3925
3926 // Find the node that contains the range
3927 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3928 continue;
3929 }
3930
3931 // Look for the previous sibling, moving up ancestor levels if needed
3932 loop {
3933 if cursor.goto_previous_sibling() {
3934 let layer_result = cursor.node();
3935
3936 if let Some(previous_result) = &result {
3937 if previous_result.byte_range().end < layer_result.byte_range().end {
3938 continue;
3939 }
3940 }
3941 result = Some(layer_result);
3942 break;
3943 }
3944
3945 // No sibling found at this level, try moving up to parent
3946 if !cursor.goto_parent() {
3947 break;
3948 }
3949 }
3950 }
3951
3952 result
3953 }
3954
3955 /// Find the next sibling syntax node at the given range.
3956 ///
3957 /// This function locates the syntax node that follows the node containing
3958 /// the given range. It searches hierarchically by:
3959 /// 1. Finding the node that contains the given range
3960 /// 2. Looking for the next sibling at the same tree level
3961 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3962 ///
3963 /// Returns `None` if there is no next sibling at any ancestor level.
3964 pub fn syntax_next_sibling<'a, T: ToOffset>(
3965 &'a self,
3966 range: Range<T>,
3967 ) -> Option<tree_sitter::Node<'a>> {
3968 let range = range.start.to_offset(self)..range.end.to_offset(self);
3969 let mut result: Option<tree_sitter::Node<'a>> = None;
3970
3971 for layer in self
3972 .syntax
3973 .layers_for_range(range.clone(), &self.text, true)
3974 {
3975 let mut cursor = layer.node().walk();
3976
3977 // Find the node that contains the range
3978 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3979 continue;
3980 }
3981
3982 // Look for the next sibling, moving up ancestor levels if needed
3983 loop {
3984 if cursor.goto_next_sibling() {
3985 let layer_result = cursor.node();
3986
3987 if let Some(previous_result) = &result {
3988 if previous_result.byte_range().start > layer_result.byte_range().start {
3989 continue;
3990 }
3991 }
3992 result = Some(layer_result);
3993 break;
3994 }
3995
3996 // No sibling found at this level, try moving up to parent
3997 if !cursor.goto_parent() {
3998 break;
3999 }
4000 }
4001 }
4002
4003 result
4004 }
4005
4006 /// Returns the root syntax node within the given row
4007 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4008 let start_offset = position.to_offset(self);
4009
4010 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4011
4012 let layer = self
4013 .syntax
4014 .layers_for_range(start_offset..start_offset, &self.text, true)
4015 .next()?;
4016
4017 let mut cursor = layer.node().walk();
4018
4019 // Descend to the first leaf that touches the start of the range.
4020 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4021 if cursor.node().end_byte() == start_offset {
4022 cursor.goto_next_sibling();
4023 }
4024 }
4025
4026 // Ascend to the root node within the same row.
4027 while cursor.goto_parent() {
4028 if cursor.node().start_position().row != row {
4029 break;
4030 }
4031 }
4032
4033 Some(cursor.node())
4034 }
4035
4036 /// Returns the outline for the buffer.
4037 ///
4038 /// This method allows passing an optional [`SyntaxTheme`] to
4039 /// syntax-highlight the returned symbols.
4040 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4041 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4042 }
4043
4044 /// Returns all the symbols that contain the given position.
4045 ///
4046 /// This method allows passing an optional [`SyntaxTheme`] to
4047 /// syntax-highlight the returned symbols.
4048 pub fn symbols_containing<T: ToOffset>(
4049 &self,
4050 position: T,
4051 theme: Option<&SyntaxTheme>,
4052 ) -> Vec<OutlineItem<Anchor>> {
4053 let position = position.to_offset(self);
4054 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4055 let end = self.clip_offset(position + 1, Bias::Right);
4056 let mut items = self.outline_items_containing(start..end, false, theme);
4057 let mut prev_depth = None;
4058 items.retain(|item| {
4059 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4060 prev_depth = Some(item.depth);
4061 result
4062 });
4063 items
4064 }
4065
4066 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4067 let range = range.to_offset(self);
4068 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4069 grammar.outline_config.as_ref().map(|c| &c.query)
4070 });
4071 let configs = matches
4072 .grammars()
4073 .iter()
4074 .map(|g| g.outline_config.as_ref().unwrap())
4075 .collect::<Vec<_>>();
4076
4077 while let Some(mat) = matches.peek() {
4078 let config = &configs[mat.grammar_index];
4079 let containing_item_node = maybe!({
4080 let item_node = mat.captures.iter().find_map(|cap| {
4081 if cap.index == config.item_capture_ix {
4082 Some(cap.node)
4083 } else {
4084 None
4085 }
4086 })?;
4087
4088 let item_byte_range = item_node.byte_range();
4089 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4090 None
4091 } else {
4092 Some(item_node)
4093 }
4094 });
4095
4096 if let Some(item_node) = containing_item_node {
4097 return Some(
4098 Point::from_ts_point(item_node.start_position())
4099 ..Point::from_ts_point(item_node.end_position()),
4100 );
4101 }
4102
4103 matches.advance();
4104 }
4105 None
4106 }
4107
4108 pub fn outline_items_containing<T: ToOffset>(
4109 &self,
4110 range: Range<T>,
4111 include_extra_context: bool,
4112 theme: Option<&SyntaxTheme>,
4113 ) -> Vec<OutlineItem<Anchor>> {
4114 self.outline_items_containing_internal(
4115 range,
4116 include_extra_context,
4117 theme,
4118 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4119 )
4120 }
4121
4122 pub fn outline_items_as_points_containing<T: ToOffset>(
4123 &self,
4124 range: Range<T>,
4125 include_extra_context: bool,
4126 theme: Option<&SyntaxTheme>,
4127 ) -> Vec<OutlineItem<Point>> {
4128 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4129 range
4130 })
4131 }
4132
4133 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4134 &self,
4135 range: Range<T>,
4136 include_extra_context: bool,
4137 theme: Option<&SyntaxTheme>,
4138 ) -> Vec<OutlineItem<usize>> {
4139 self.outline_items_containing_internal(
4140 range,
4141 include_extra_context,
4142 theme,
4143 |buffer, range| range.to_offset(buffer),
4144 )
4145 }
4146
4147 fn outline_items_containing_internal<T: ToOffset, U>(
4148 &self,
4149 range: Range<T>,
4150 include_extra_context: bool,
4151 theme: Option<&SyntaxTheme>,
4152 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4153 ) -> Vec<OutlineItem<U>> {
4154 let range = range.to_offset(self);
4155 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4156 grammar.outline_config.as_ref().map(|c| &c.query)
4157 });
4158
4159 let mut items = Vec::new();
4160 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4161 while let Some(mat) = matches.peek() {
4162 let config = matches.grammars()[mat.grammar_index]
4163 .outline_config
4164 .as_ref()
4165 .unwrap();
4166 if let Some(item) =
4167 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4168 {
4169 items.push(item);
4170 } else if let Some(capture) = mat
4171 .captures
4172 .iter()
4173 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4174 {
4175 let capture_range = capture.node.start_position()..capture.node.end_position();
4176 let mut capture_row_range =
4177 capture_range.start.row as u32..capture_range.end.row as u32;
4178 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4179 {
4180 capture_row_range.end -= 1;
4181 }
4182 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4183 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4184 last_row_range.end = capture_row_range.end;
4185 } else {
4186 annotation_row_ranges.push(capture_row_range);
4187 }
4188 } else {
4189 annotation_row_ranges.push(capture_row_range);
4190 }
4191 }
4192 matches.advance();
4193 }
4194
4195 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4196
4197 // Assign depths based on containment relationships and convert to anchors.
4198 let mut item_ends_stack = Vec::<Point>::new();
4199 let mut anchor_items = Vec::new();
4200 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4201 for item in items {
4202 while let Some(last_end) = item_ends_stack.last().copied() {
4203 if last_end < item.range.end {
4204 item_ends_stack.pop();
4205 } else {
4206 break;
4207 }
4208 }
4209
4210 let mut annotation_row_range = None;
4211 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4212 let row_preceding_item = item.range.start.row.saturating_sub(1);
4213 if next_annotation_row_range.end < row_preceding_item {
4214 annotation_row_ranges.next();
4215 } else {
4216 if next_annotation_row_range.end == row_preceding_item {
4217 annotation_row_range = Some(next_annotation_row_range.clone());
4218 annotation_row_ranges.next();
4219 }
4220 break;
4221 }
4222 }
4223
4224 anchor_items.push(OutlineItem {
4225 depth: item_ends_stack.len(),
4226 range: range_callback(self, item.range.clone()),
4227 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4228 text: item.text,
4229 highlight_ranges: item.highlight_ranges,
4230 name_ranges: item.name_ranges,
4231 body_range: item.body_range.map(|r| range_callback(self, r)),
4232 annotation_range: annotation_row_range.map(|annotation_range| {
4233 let point_range = Point::new(annotation_range.start, 0)
4234 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4235 range_callback(self, point_range)
4236 }),
4237 });
4238 item_ends_stack.push(item.range.end);
4239 }
4240
4241 anchor_items
4242 }
4243
4244 fn next_outline_item(
4245 &self,
4246 config: &OutlineConfig,
4247 mat: &SyntaxMapMatch,
4248 range: &Range<usize>,
4249 include_extra_context: bool,
4250 theme: Option<&SyntaxTheme>,
4251 ) -> Option<OutlineItem<Point>> {
4252 let item_node = mat.captures.iter().find_map(|cap| {
4253 if cap.index == config.item_capture_ix {
4254 Some(cap.node)
4255 } else {
4256 None
4257 }
4258 })?;
4259
4260 let item_byte_range = item_node.byte_range();
4261 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4262 return None;
4263 }
4264 let item_point_range = Point::from_ts_point(item_node.start_position())
4265 ..Point::from_ts_point(item_node.end_position());
4266
4267 let mut open_point = None;
4268 let mut close_point = None;
4269
4270 let mut buffer_ranges = Vec::new();
4271 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4272 let mut range = node.start_byte()..node.end_byte();
4273 let start = node.start_position();
4274 if node.end_position().row > start.row {
4275 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4276 }
4277
4278 if !range.is_empty() {
4279 buffer_ranges.push((range, node_is_name));
4280 }
4281 };
4282
4283 for capture in mat.captures {
4284 if capture.index == config.name_capture_ix {
4285 add_to_buffer_ranges(capture.node, true);
4286 } else if Some(capture.index) == config.context_capture_ix
4287 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4288 {
4289 add_to_buffer_ranges(capture.node, false);
4290 } else {
4291 if Some(capture.index) == config.open_capture_ix {
4292 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4293 } else if Some(capture.index) == config.close_capture_ix {
4294 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4295 }
4296 }
4297 }
4298
4299 if buffer_ranges.is_empty() {
4300 return None;
4301 }
4302 let source_range_for_text =
4303 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4304
4305 let mut text = String::new();
4306 let mut highlight_ranges = Vec::new();
4307 let mut name_ranges = Vec::new();
4308 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4309 let mut last_buffer_range_end = 0;
4310 for (buffer_range, is_name) in buffer_ranges {
4311 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4312 if space_added {
4313 text.push(' ');
4314 }
4315 let before_append_len = text.len();
4316 let mut offset = buffer_range.start;
4317 chunks.seek(buffer_range.clone());
4318 for mut chunk in chunks.by_ref() {
4319 if chunk.text.len() > buffer_range.end - offset {
4320 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4321 offset = buffer_range.end;
4322 } else {
4323 offset += chunk.text.len();
4324 }
4325 let style = chunk
4326 .syntax_highlight_id
4327 .zip(theme)
4328 .and_then(|(highlight, theme)| highlight.style(theme));
4329 if let Some(style) = style {
4330 let start = text.len();
4331 let end = start + chunk.text.len();
4332 highlight_ranges.push((start..end, style));
4333 }
4334 text.push_str(chunk.text);
4335 if offset >= buffer_range.end {
4336 break;
4337 }
4338 }
4339 if is_name {
4340 let after_append_len = text.len();
4341 let start = if space_added && !name_ranges.is_empty() {
4342 before_append_len - 1
4343 } else {
4344 before_append_len
4345 };
4346 name_ranges.push(start..after_append_len);
4347 }
4348 last_buffer_range_end = buffer_range.end;
4349 }
4350
4351 Some(OutlineItem {
4352 depth: 0, // We'll calculate the depth later
4353 range: item_point_range,
4354 source_range_for_text: source_range_for_text.to_point(self),
4355 text,
4356 highlight_ranges,
4357 name_ranges,
4358 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4359 annotation_range: None,
4360 })
4361 }
4362
4363 pub fn function_body_fold_ranges<T: ToOffset>(
4364 &self,
4365 within: Range<T>,
4366 ) -> impl Iterator<Item = Range<usize>> + '_ {
4367 self.text_object_ranges(within, TreeSitterOptions::default())
4368 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4369 }
4370
4371 /// For each grammar in the language, runs the provided
4372 /// [`tree_sitter::Query`] against the given range.
4373 pub fn matches(
4374 &self,
4375 range: Range<usize>,
4376 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4377 ) -> SyntaxMapMatches<'_> {
4378 self.syntax.matches(range, self, query)
4379 }
4380
4381 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4382 /// Hence, may return more bracket pairs than the range contains.
4383 ///
4384 /// Will omit known chunks.
4385 /// The resulting bracket match collections are not ordered.
4386 pub fn fetch_bracket_ranges(
4387 &self,
4388 range: Range<usize>,
4389 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4390 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4391 let mut all_bracket_matches = HashMap::default();
4392
4393 for chunk in self
4394 .tree_sitter_data
4395 .chunks
4396 .applicable_chunks(&[range.to_point(self)])
4397 {
4398 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4399 continue;
4400 }
4401 let chunk_range = chunk.anchor_range();
4402 let chunk_range = chunk_range.to_offset(&self);
4403
4404 if let Some(cached_brackets) =
4405 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4406 {
4407 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4408 continue;
4409 }
4410
4411 let mut all_brackets = Vec::new();
4412 let mut opens = Vec::new();
4413 let mut color_pairs = Vec::new();
4414
4415 let mut matches = self.syntax.matches_with_options(
4416 chunk_range.clone(),
4417 &self.text,
4418 TreeSitterOptions {
4419 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4420 max_start_depth: None,
4421 },
4422 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4423 );
4424 let configs = matches
4425 .grammars()
4426 .iter()
4427 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4428 .collect::<Vec<_>>();
4429
4430 while let Some(mat) = matches.peek() {
4431 let mut open = None;
4432 let mut close = None;
4433 let syntax_layer_depth = mat.depth;
4434 let config = configs[mat.grammar_index];
4435 let pattern = &config.patterns[mat.pattern_index];
4436 for capture in mat.captures {
4437 if capture.index == config.open_capture_ix {
4438 open = Some(capture.node.byte_range());
4439 } else if capture.index == config.close_capture_ix {
4440 close = Some(capture.node.byte_range());
4441 }
4442 }
4443
4444 matches.advance();
4445
4446 let Some((open_range, close_range)) = open.zip(close) else {
4447 continue;
4448 };
4449
4450 let bracket_range = open_range.start..=close_range.end;
4451 if !bracket_range.overlaps(&chunk_range) {
4452 continue;
4453 }
4454
4455 let index = all_brackets.len();
4456 all_brackets.push(BracketMatch {
4457 open_range: open_range.clone(),
4458 close_range: close_range.clone(),
4459 newline_only: pattern.newline_only,
4460 syntax_layer_depth,
4461 color_index: None,
4462 });
4463
4464 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4465 // bracket will match the entire tag with all text inside.
4466 // For now, avoid highlighting any pair that has more than single char in each bracket.
4467 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4468 let should_color =
4469 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4470 if should_color {
4471 opens.push(open_range.clone());
4472 color_pairs.push((open_range, close_range, index));
4473 }
4474 }
4475
4476 opens.sort_by_key(|r| (r.start, r.end));
4477 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4478 color_pairs.sort_by_key(|(_, close, _)| close.end);
4479
4480 let mut open_stack = Vec::new();
4481 let mut open_index = 0;
4482 for (open, close, index) in color_pairs {
4483 while open_index < opens.len() && opens[open_index].start < close.start {
4484 open_stack.push(opens[open_index].clone());
4485 open_index += 1;
4486 }
4487
4488 if open_stack.last() == Some(&open) {
4489 let depth_index = open_stack.len() - 1;
4490 all_brackets[index].color_index = Some(depth_index);
4491 open_stack.pop();
4492 }
4493 }
4494
4495 all_brackets.sort_by_key(|bracket_match| {
4496 (bracket_match.open_range.start, bracket_match.open_range.end)
4497 });
4498
4499 if let empty_slot @ None =
4500 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4501 {
4502 *empty_slot = Some(all_brackets.clone());
4503 }
4504 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4505 }
4506
4507 all_bracket_matches
4508 }
4509
4510 pub fn all_bracket_ranges(
4511 &self,
4512 range: Range<usize>,
4513 ) -> impl Iterator<Item = BracketMatch<usize>> {
4514 self.fetch_bracket_ranges(range.clone(), None)
4515 .into_values()
4516 .flatten()
4517 .filter(move |bracket_match| {
4518 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4519 bracket_range.overlaps(&range)
4520 })
4521 }
4522
4523 /// Returns bracket range pairs overlapping or adjacent to `range`
4524 pub fn bracket_ranges<T: ToOffset>(
4525 &self,
4526 range: Range<T>,
4527 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4528 // Find bracket pairs that *inclusively* contain the given range.
4529 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4530 self.all_bracket_ranges(range)
4531 .filter(|pair| !pair.newline_only)
4532 }
4533
4534 pub fn debug_variables_query<T: ToOffset>(
4535 &self,
4536 range: Range<T>,
4537 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4538 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4539
4540 let mut matches = self.syntax.matches_with_options(
4541 range.clone(),
4542 &self.text,
4543 TreeSitterOptions::default(),
4544 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4545 );
4546
4547 let configs = matches
4548 .grammars()
4549 .iter()
4550 .map(|grammar| grammar.debug_variables_config.as_ref())
4551 .collect::<Vec<_>>();
4552
4553 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4554
4555 iter::from_fn(move || {
4556 loop {
4557 while let Some(capture) = captures.pop() {
4558 if capture.0.overlaps(&range) {
4559 return Some(capture);
4560 }
4561 }
4562
4563 let mat = matches.peek()?;
4564
4565 let Some(config) = configs[mat.grammar_index].as_ref() else {
4566 matches.advance();
4567 continue;
4568 };
4569
4570 for capture in mat.captures {
4571 let Some(ix) = config
4572 .objects_by_capture_ix
4573 .binary_search_by_key(&capture.index, |e| e.0)
4574 .ok()
4575 else {
4576 continue;
4577 };
4578 let text_object = config.objects_by_capture_ix[ix].1;
4579 let byte_range = capture.node.byte_range();
4580
4581 let mut found = false;
4582 for (range, existing) in captures.iter_mut() {
4583 if existing == &text_object {
4584 range.start = range.start.min(byte_range.start);
4585 range.end = range.end.max(byte_range.end);
4586 found = true;
4587 break;
4588 }
4589 }
4590
4591 if !found {
4592 captures.push((byte_range, text_object));
4593 }
4594 }
4595
4596 matches.advance();
4597 }
4598 })
4599 }
4600
4601 pub fn text_object_ranges<T: ToOffset>(
4602 &self,
4603 range: Range<T>,
4604 options: TreeSitterOptions,
4605 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4606 let range =
4607 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4608
4609 let mut matches =
4610 self.syntax
4611 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4612 grammar.text_object_config.as_ref().map(|c| &c.query)
4613 });
4614
4615 let configs = matches
4616 .grammars()
4617 .iter()
4618 .map(|grammar| grammar.text_object_config.as_ref())
4619 .collect::<Vec<_>>();
4620
4621 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4622
4623 iter::from_fn(move || {
4624 loop {
4625 while let Some(capture) = captures.pop() {
4626 if capture.0.overlaps(&range) {
4627 return Some(capture);
4628 }
4629 }
4630
4631 let mat = matches.peek()?;
4632
4633 let Some(config) = configs[mat.grammar_index].as_ref() else {
4634 matches.advance();
4635 continue;
4636 };
4637
4638 for capture in mat.captures {
4639 let Some(ix) = config
4640 .text_objects_by_capture_ix
4641 .binary_search_by_key(&capture.index, |e| e.0)
4642 .ok()
4643 else {
4644 continue;
4645 };
4646 let text_object = config.text_objects_by_capture_ix[ix].1;
4647 let byte_range = capture.node.byte_range();
4648
4649 let mut found = false;
4650 for (range, existing) in captures.iter_mut() {
4651 if existing == &text_object {
4652 range.start = range.start.min(byte_range.start);
4653 range.end = range.end.max(byte_range.end);
4654 found = true;
4655 break;
4656 }
4657 }
4658
4659 if !found {
4660 captures.push((byte_range, text_object));
4661 }
4662 }
4663
4664 matches.advance();
4665 }
4666 })
4667 }
4668
4669 /// Returns enclosing bracket ranges containing the given range
4670 pub fn enclosing_bracket_ranges<T: ToOffset>(
4671 &self,
4672 range: Range<T>,
4673 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4674 let range = range.start.to_offset(self)..range.end.to_offset(self);
4675
4676 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4677 let max_depth = result
4678 .iter()
4679 .map(|mat| mat.syntax_layer_depth)
4680 .max()
4681 .unwrap_or(0);
4682 result.into_iter().filter(move |pair| {
4683 pair.open_range.start <= range.start
4684 && pair.close_range.end >= range.end
4685 && pair.syntax_layer_depth == max_depth
4686 })
4687 }
4688
4689 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4690 ///
4691 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4692 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4693 &self,
4694 range: Range<T>,
4695 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4696 ) -> Option<(Range<usize>, Range<usize>)> {
4697 let range = range.start.to_offset(self)..range.end.to_offset(self);
4698
4699 // Get the ranges of the innermost pair of brackets.
4700 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4701
4702 for pair in self.enclosing_bracket_ranges(range) {
4703 if let Some(range_filter) = range_filter
4704 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4705 {
4706 continue;
4707 }
4708
4709 let len = pair.close_range.end - pair.open_range.start;
4710
4711 if let Some((existing_open, existing_close)) = &result {
4712 let existing_len = existing_close.end - existing_open.start;
4713 if len > existing_len {
4714 continue;
4715 }
4716 }
4717
4718 result = Some((pair.open_range, pair.close_range));
4719 }
4720
4721 result
4722 }
4723
4724 /// Returns anchor ranges for any matches of the redaction query.
4725 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4726 /// will be run on the relevant section of the buffer.
4727 pub fn redacted_ranges<T: ToOffset>(
4728 &self,
4729 range: Range<T>,
4730 ) -> impl Iterator<Item = Range<usize>> + '_ {
4731 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4732 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4733 grammar
4734 .redactions_config
4735 .as_ref()
4736 .map(|config| &config.query)
4737 });
4738
4739 let configs = syntax_matches
4740 .grammars()
4741 .iter()
4742 .map(|grammar| grammar.redactions_config.as_ref())
4743 .collect::<Vec<_>>();
4744
4745 iter::from_fn(move || {
4746 let redacted_range = syntax_matches
4747 .peek()
4748 .and_then(|mat| {
4749 configs[mat.grammar_index].and_then(|config| {
4750 mat.captures
4751 .iter()
4752 .find(|capture| capture.index == config.redaction_capture_ix)
4753 })
4754 })
4755 .map(|mat| mat.node.byte_range());
4756 syntax_matches.advance();
4757 redacted_range
4758 })
4759 }
4760
4761 pub fn injections_intersecting_range<T: ToOffset>(
4762 &self,
4763 range: Range<T>,
4764 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4765 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4766
4767 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4768 grammar
4769 .injection_config
4770 .as_ref()
4771 .map(|config| &config.query)
4772 });
4773
4774 let configs = syntax_matches
4775 .grammars()
4776 .iter()
4777 .map(|grammar| grammar.injection_config.as_ref())
4778 .collect::<Vec<_>>();
4779
4780 iter::from_fn(move || {
4781 let ranges = syntax_matches.peek().and_then(|mat| {
4782 let config = &configs[mat.grammar_index]?;
4783 let content_capture_range = mat.captures.iter().find_map(|capture| {
4784 if capture.index == config.content_capture_ix {
4785 Some(capture.node.byte_range())
4786 } else {
4787 None
4788 }
4789 })?;
4790 let language = self.language_at(content_capture_range.start)?;
4791 Some((content_capture_range, language))
4792 });
4793 syntax_matches.advance();
4794 ranges
4795 })
4796 }
4797
4798 pub fn runnable_ranges(
4799 &self,
4800 offset_range: Range<usize>,
4801 ) -> impl Iterator<Item = RunnableRange> + '_ {
4802 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4803 grammar.runnable_config.as_ref().map(|config| &config.query)
4804 });
4805
4806 let test_configs = syntax_matches
4807 .grammars()
4808 .iter()
4809 .map(|grammar| grammar.runnable_config.as_ref())
4810 .collect::<Vec<_>>();
4811
4812 iter::from_fn(move || {
4813 loop {
4814 let mat = syntax_matches.peek()?;
4815
4816 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4817 let mut run_range = None;
4818 let full_range = mat.captures.iter().fold(
4819 Range {
4820 start: usize::MAX,
4821 end: 0,
4822 },
4823 |mut acc, next| {
4824 let byte_range = next.node.byte_range();
4825 if acc.start > byte_range.start {
4826 acc.start = byte_range.start;
4827 }
4828 if acc.end < byte_range.end {
4829 acc.end = byte_range.end;
4830 }
4831 acc
4832 },
4833 );
4834 if full_range.start > full_range.end {
4835 // We did not find a full spanning range of this match.
4836 return None;
4837 }
4838 let extra_captures: SmallVec<[_; 1]> =
4839 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4840 test_configs
4841 .extra_captures
4842 .get(capture.index as usize)
4843 .cloned()
4844 .and_then(|tag_name| match tag_name {
4845 RunnableCapture::Named(name) => {
4846 Some((capture.node.byte_range(), name))
4847 }
4848 RunnableCapture::Run => {
4849 let _ = run_range.insert(capture.node.byte_range());
4850 None
4851 }
4852 })
4853 }));
4854 let run_range = run_range?;
4855 let tags = test_configs
4856 .query
4857 .property_settings(mat.pattern_index)
4858 .iter()
4859 .filter_map(|property| {
4860 if *property.key == *"tag" {
4861 property
4862 .value
4863 .as_ref()
4864 .map(|value| RunnableTag(value.to_string().into()))
4865 } else {
4866 None
4867 }
4868 })
4869 .collect();
4870 let extra_captures = extra_captures
4871 .into_iter()
4872 .map(|(range, name)| {
4873 (
4874 name.to_string(),
4875 self.text_for_range(range).collect::<String>(),
4876 )
4877 })
4878 .collect();
4879 // All tags should have the same range.
4880 Some(RunnableRange {
4881 run_range,
4882 full_range,
4883 runnable: Runnable {
4884 tags,
4885 language: mat.language,
4886 buffer: self.remote_id(),
4887 },
4888 extra_captures,
4889 buffer_id: self.remote_id(),
4890 })
4891 });
4892
4893 syntax_matches.advance();
4894 if test_range.is_some() {
4895 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4896 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4897 return test_range;
4898 }
4899 }
4900 })
4901 }
4902
4903 /// Returns selections for remote peers intersecting the given range.
4904 #[allow(clippy::type_complexity)]
4905 pub fn selections_in_range(
4906 &self,
4907 range: Range<Anchor>,
4908 include_local: bool,
4909 ) -> impl Iterator<
4910 Item = (
4911 ReplicaId,
4912 bool,
4913 CursorShape,
4914 impl Iterator<Item = &Selection<Anchor>> + '_,
4915 ),
4916 > + '_ {
4917 self.remote_selections
4918 .iter()
4919 .filter(move |(replica_id, set)| {
4920 (include_local || **replica_id != self.text.replica_id())
4921 && !set.selections.is_empty()
4922 })
4923 .map(move |(replica_id, set)| {
4924 let start_ix = match set.selections.binary_search_by(|probe| {
4925 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4926 }) {
4927 Ok(ix) | Err(ix) => ix,
4928 };
4929 let end_ix = match set.selections.binary_search_by(|probe| {
4930 probe.start.cmp(&range.end, self).then(Ordering::Less)
4931 }) {
4932 Ok(ix) | Err(ix) => ix,
4933 };
4934
4935 (
4936 *replica_id,
4937 set.line_mode,
4938 set.cursor_shape,
4939 set.selections[start_ix..end_ix].iter(),
4940 )
4941 })
4942 }
4943
4944 /// Returns if the buffer contains any diagnostics.
4945 pub fn has_diagnostics(&self) -> bool {
4946 !self.diagnostics.is_empty()
4947 }
4948
4949 /// Returns all the diagnostics intersecting the given range.
4950 pub fn diagnostics_in_range<'a, T, O>(
4951 &'a self,
4952 search_range: Range<T>,
4953 reversed: bool,
4954 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4955 where
4956 T: 'a + Clone + ToOffset,
4957 O: 'a + FromAnchor,
4958 {
4959 let mut iterators: Vec<_> = self
4960 .diagnostics
4961 .iter()
4962 .map(|(_, collection)| {
4963 collection
4964 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4965 .peekable()
4966 })
4967 .collect();
4968
4969 std::iter::from_fn(move || {
4970 let (next_ix, _) = iterators
4971 .iter_mut()
4972 .enumerate()
4973 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4974 .min_by(|(_, a), (_, b)| {
4975 let cmp = a
4976 .range
4977 .start
4978 .cmp(&b.range.start, self)
4979 // when range is equal, sort by diagnostic severity
4980 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4981 // and stabilize order with group_id
4982 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4983 if reversed { cmp.reverse() } else { cmp }
4984 })?;
4985 iterators[next_ix]
4986 .next()
4987 .map(
4988 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4989 diagnostic,
4990 range: FromAnchor::from_anchor(&range.start, self)
4991 ..FromAnchor::from_anchor(&range.end, self),
4992 },
4993 )
4994 })
4995 }
4996
4997 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4998 /// should be used instead.
4999 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5000 &self.diagnostics
5001 }
5002
5003 /// Returns all the diagnostic groups associated with the given
5004 /// language server ID. If no language server ID is provided,
5005 /// all diagnostics groups are returned.
5006 pub fn diagnostic_groups(
5007 &self,
5008 language_server_id: Option<LanguageServerId>,
5009 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5010 let mut groups = Vec::new();
5011
5012 if let Some(language_server_id) = language_server_id {
5013 if let Ok(ix) = self
5014 .diagnostics
5015 .binary_search_by_key(&language_server_id, |e| e.0)
5016 {
5017 self.diagnostics[ix]
5018 .1
5019 .groups(language_server_id, &mut groups, self);
5020 }
5021 } else {
5022 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5023 diagnostics.groups(*language_server_id, &mut groups, self);
5024 }
5025 }
5026
5027 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5028 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5029 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5030 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5031 });
5032
5033 groups
5034 }
5035
5036 /// Returns an iterator over the diagnostics for the given group.
5037 pub fn diagnostic_group<O>(
5038 &self,
5039 group_id: usize,
5040 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5041 where
5042 O: FromAnchor + 'static,
5043 {
5044 self.diagnostics
5045 .iter()
5046 .flat_map(move |(_, set)| set.group(group_id, self))
5047 }
5048
5049 /// An integer version number that accounts for all updates besides
5050 /// the buffer's text itself (which is versioned via a version vector).
5051 pub fn non_text_state_update_count(&self) -> usize {
5052 self.non_text_state_update_count
5053 }
5054
5055 /// An integer version that changes when the buffer's syntax changes.
5056 pub fn syntax_update_count(&self) -> usize {
5057 self.syntax.update_count()
5058 }
5059
5060 /// Returns a snapshot of underlying file.
5061 pub fn file(&self) -> Option<&Arc<dyn File>> {
5062 self.file.as_ref()
5063 }
5064
5065 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5066 if let Some(file) = self.file() {
5067 if file.path().file_name().is_none() || include_root {
5068 Some(file.full_path(cx).to_string_lossy().into_owned())
5069 } else {
5070 Some(file.path().display(file.path_style(cx)).to_string())
5071 }
5072 } else {
5073 None
5074 }
5075 }
5076
5077 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5078 let query_str = query.fuzzy_contents;
5079 if query_str.is_some_and(|query| query.is_empty()) {
5080 return BTreeMap::default();
5081 }
5082
5083 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5084 language,
5085 override_id: None,
5086 }));
5087
5088 let mut query_ix = 0;
5089 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5090 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5091
5092 let mut words = BTreeMap::default();
5093 let mut current_word_start_ix = None;
5094 let mut chunk_ix = query.range.start;
5095 for chunk in self.chunks(query.range, false) {
5096 for (i, c) in chunk.text.char_indices() {
5097 let ix = chunk_ix + i;
5098 if classifier.is_word(c) {
5099 if current_word_start_ix.is_none() {
5100 current_word_start_ix = Some(ix);
5101 }
5102
5103 if let Some(query_chars) = &query_chars
5104 && query_ix < query_len
5105 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5106 {
5107 query_ix += 1;
5108 }
5109 continue;
5110 } else if let Some(word_start) = current_word_start_ix.take()
5111 && query_ix == query_len
5112 {
5113 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5114 let mut word_text = self.text_for_range(word_start..ix).peekable();
5115 let first_char = word_text
5116 .peek()
5117 .and_then(|first_chunk| first_chunk.chars().next());
5118 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5119 if !query.skip_digits
5120 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5121 {
5122 words.insert(word_text.collect(), word_range);
5123 }
5124 }
5125 query_ix = 0;
5126 }
5127 chunk_ix += chunk.text.len();
5128 }
5129
5130 words
5131 }
5132}
5133
5134pub struct WordsQuery<'a> {
5135 /// Only returns words with all chars from the fuzzy string in them.
5136 pub fuzzy_contents: Option<&'a str>,
5137 /// Skips words that start with a digit.
5138 pub skip_digits: bool,
5139 /// Buffer offset range, to look for words.
5140 pub range: Range<usize>,
5141}
5142
5143fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5144 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5145}
5146
5147fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5148 let mut result = IndentSize::spaces(0);
5149 for c in text {
5150 let kind = match c {
5151 ' ' => IndentKind::Space,
5152 '\t' => IndentKind::Tab,
5153 _ => break,
5154 };
5155 if result.len == 0 {
5156 result.kind = kind;
5157 }
5158 result.len += 1;
5159 }
5160 result
5161}
5162
5163impl Clone for BufferSnapshot {
5164 fn clone(&self) -> Self {
5165 Self {
5166 text: self.text.clone(),
5167 syntax: self.syntax.clone(),
5168 file: self.file.clone(),
5169 remote_selections: self.remote_selections.clone(),
5170 diagnostics: self.diagnostics.clone(),
5171 language: self.language.clone(),
5172 tree_sitter_data: self.tree_sitter_data.clone(),
5173 non_text_state_update_count: self.non_text_state_update_count,
5174 }
5175 }
5176}
5177
5178impl Deref for BufferSnapshot {
5179 type Target = text::BufferSnapshot;
5180
5181 fn deref(&self) -> &Self::Target {
5182 &self.text
5183 }
5184}
5185
5186unsafe impl Send for BufferChunks<'_> {}
5187
5188impl<'a> BufferChunks<'a> {
5189 pub(crate) fn new(
5190 text: &'a Rope,
5191 range: Range<usize>,
5192 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5193 diagnostics: bool,
5194 buffer_snapshot: Option<&'a BufferSnapshot>,
5195 ) -> Self {
5196 let mut highlights = None;
5197 if let Some((captures, highlight_maps)) = syntax {
5198 highlights = Some(BufferChunkHighlights {
5199 captures,
5200 next_capture: None,
5201 stack: Default::default(),
5202 highlight_maps,
5203 })
5204 }
5205
5206 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5207 let chunks = text.chunks_in_range(range.clone());
5208
5209 let mut this = BufferChunks {
5210 range,
5211 buffer_snapshot,
5212 chunks,
5213 diagnostic_endpoints,
5214 error_depth: 0,
5215 warning_depth: 0,
5216 information_depth: 0,
5217 hint_depth: 0,
5218 unnecessary_depth: 0,
5219 underline: true,
5220 highlights,
5221 };
5222 this.initialize_diagnostic_endpoints();
5223 this
5224 }
5225
5226 /// Seeks to the given byte offset in the buffer.
5227 pub fn seek(&mut self, range: Range<usize>) {
5228 let old_range = std::mem::replace(&mut self.range, range.clone());
5229 self.chunks.set_range(self.range.clone());
5230 if let Some(highlights) = self.highlights.as_mut() {
5231 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5232 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5233 highlights
5234 .stack
5235 .retain(|(end_offset, _)| *end_offset > range.start);
5236 if let Some(capture) = &highlights.next_capture
5237 && range.start >= capture.node.start_byte()
5238 {
5239 let next_capture_end = capture.node.end_byte();
5240 if range.start < next_capture_end {
5241 highlights.stack.push((
5242 next_capture_end,
5243 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5244 ));
5245 }
5246 highlights.next_capture.take();
5247 }
5248 } else if let Some(snapshot) = self.buffer_snapshot {
5249 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5250 *highlights = BufferChunkHighlights {
5251 captures,
5252 next_capture: None,
5253 stack: Default::default(),
5254 highlight_maps,
5255 };
5256 } else {
5257 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5258 // Seeking such BufferChunks is not supported.
5259 debug_assert!(
5260 false,
5261 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5262 );
5263 }
5264
5265 highlights.captures.set_byte_range(self.range.clone());
5266 self.initialize_diagnostic_endpoints();
5267 }
5268 }
5269
5270 fn initialize_diagnostic_endpoints(&mut self) {
5271 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5272 && let Some(buffer) = self.buffer_snapshot
5273 {
5274 let mut diagnostic_endpoints = Vec::new();
5275 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5276 diagnostic_endpoints.push(DiagnosticEndpoint {
5277 offset: entry.range.start,
5278 is_start: true,
5279 severity: entry.diagnostic.severity,
5280 is_unnecessary: entry.diagnostic.is_unnecessary,
5281 underline: entry.diagnostic.underline,
5282 });
5283 diagnostic_endpoints.push(DiagnosticEndpoint {
5284 offset: entry.range.end,
5285 is_start: false,
5286 severity: entry.diagnostic.severity,
5287 is_unnecessary: entry.diagnostic.is_unnecessary,
5288 underline: entry.diagnostic.underline,
5289 });
5290 }
5291 diagnostic_endpoints
5292 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5293 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5294 self.hint_depth = 0;
5295 self.error_depth = 0;
5296 self.warning_depth = 0;
5297 self.information_depth = 0;
5298 }
5299 }
5300
5301 /// The current byte offset in the buffer.
5302 pub fn offset(&self) -> usize {
5303 self.range.start
5304 }
5305
5306 pub fn range(&self) -> Range<usize> {
5307 self.range.clone()
5308 }
5309
5310 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5311 let depth = match endpoint.severity {
5312 DiagnosticSeverity::ERROR => &mut self.error_depth,
5313 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5314 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5315 DiagnosticSeverity::HINT => &mut self.hint_depth,
5316 _ => return,
5317 };
5318 if endpoint.is_start {
5319 *depth += 1;
5320 } else {
5321 *depth -= 1;
5322 }
5323
5324 if endpoint.is_unnecessary {
5325 if endpoint.is_start {
5326 self.unnecessary_depth += 1;
5327 } else {
5328 self.unnecessary_depth -= 1;
5329 }
5330 }
5331 }
5332
5333 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5334 if self.error_depth > 0 {
5335 Some(DiagnosticSeverity::ERROR)
5336 } else if self.warning_depth > 0 {
5337 Some(DiagnosticSeverity::WARNING)
5338 } else if self.information_depth > 0 {
5339 Some(DiagnosticSeverity::INFORMATION)
5340 } else if self.hint_depth > 0 {
5341 Some(DiagnosticSeverity::HINT)
5342 } else {
5343 None
5344 }
5345 }
5346
5347 fn current_code_is_unnecessary(&self) -> bool {
5348 self.unnecessary_depth > 0
5349 }
5350}
5351
5352impl<'a> Iterator for BufferChunks<'a> {
5353 type Item = Chunk<'a>;
5354
5355 fn next(&mut self) -> Option<Self::Item> {
5356 let mut next_capture_start = usize::MAX;
5357 let mut next_diagnostic_endpoint = usize::MAX;
5358
5359 if let Some(highlights) = self.highlights.as_mut() {
5360 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5361 if *parent_capture_end <= self.range.start {
5362 highlights.stack.pop();
5363 } else {
5364 break;
5365 }
5366 }
5367
5368 if highlights.next_capture.is_none() {
5369 highlights.next_capture = highlights.captures.next();
5370 }
5371
5372 while let Some(capture) = highlights.next_capture.as_ref() {
5373 if self.range.start < capture.node.start_byte() {
5374 next_capture_start = capture.node.start_byte();
5375 break;
5376 } else {
5377 let highlight_id =
5378 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5379 highlights
5380 .stack
5381 .push((capture.node.end_byte(), highlight_id));
5382 highlights.next_capture = highlights.captures.next();
5383 }
5384 }
5385 }
5386
5387 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5388 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5389 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5390 if endpoint.offset <= self.range.start {
5391 self.update_diagnostic_depths(endpoint);
5392 diagnostic_endpoints.next();
5393 self.underline = endpoint.underline;
5394 } else {
5395 next_diagnostic_endpoint = endpoint.offset;
5396 break;
5397 }
5398 }
5399 }
5400 self.diagnostic_endpoints = diagnostic_endpoints;
5401
5402 if let Some(ChunkBitmaps {
5403 text: chunk,
5404 chars: chars_map,
5405 tabs,
5406 }) = self.chunks.peek_with_bitmaps()
5407 {
5408 let chunk_start = self.range.start;
5409 let mut chunk_end = (self.chunks.offset() + chunk.len())
5410 .min(next_capture_start)
5411 .min(next_diagnostic_endpoint);
5412 let mut highlight_id = None;
5413 if let Some(highlights) = self.highlights.as_ref()
5414 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5415 {
5416 chunk_end = chunk_end.min(*parent_capture_end);
5417 highlight_id = Some(*parent_highlight_id);
5418 }
5419 let bit_start = chunk_start - self.chunks.offset();
5420 let bit_end = chunk_end - self.chunks.offset();
5421
5422 let slice = &chunk[bit_start..bit_end];
5423
5424 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5425 let tabs = (tabs >> bit_start) & mask;
5426 let chars = (chars_map >> bit_start) & mask;
5427
5428 self.range.start = chunk_end;
5429 if self.range.start == self.chunks.offset() + chunk.len() {
5430 self.chunks.next().unwrap();
5431 }
5432
5433 Some(Chunk {
5434 text: slice,
5435 syntax_highlight_id: highlight_id,
5436 underline: self.underline,
5437 diagnostic_severity: self.current_diagnostic_severity(),
5438 is_unnecessary: self.current_code_is_unnecessary(),
5439 tabs,
5440 chars,
5441 ..Chunk::default()
5442 })
5443 } else {
5444 None
5445 }
5446 }
5447}
5448
5449impl operation_queue::Operation for Operation {
5450 fn lamport_timestamp(&self) -> clock::Lamport {
5451 match self {
5452 Operation::Buffer(_) => {
5453 unreachable!("buffer operations should never be deferred at this layer")
5454 }
5455 Operation::UpdateDiagnostics {
5456 lamport_timestamp, ..
5457 }
5458 | Operation::UpdateSelections {
5459 lamport_timestamp, ..
5460 }
5461 | Operation::UpdateCompletionTriggers {
5462 lamport_timestamp, ..
5463 }
5464 | Operation::UpdateLineEnding {
5465 lamport_timestamp, ..
5466 } => *lamport_timestamp,
5467 }
5468 }
5469}
5470
5471impl Default for Diagnostic {
5472 fn default() -> Self {
5473 Self {
5474 source: Default::default(),
5475 source_kind: DiagnosticSourceKind::Other,
5476 code: None,
5477 code_description: None,
5478 severity: DiagnosticSeverity::ERROR,
5479 message: Default::default(),
5480 markdown: None,
5481 group_id: 0,
5482 is_primary: false,
5483 is_disk_based: false,
5484 is_unnecessary: false,
5485 underline: true,
5486 data: None,
5487 registration_id: None,
5488 }
5489 }
5490}
5491
5492impl IndentSize {
5493 /// Returns an [`IndentSize`] representing the given spaces.
5494 pub fn spaces(len: u32) -> Self {
5495 Self {
5496 len,
5497 kind: IndentKind::Space,
5498 }
5499 }
5500
5501 /// Returns an [`IndentSize`] representing a tab.
5502 pub fn tab() -> Self {
5503 Self {
5504 len: 1,
5505 kind: IndentKind::Tab,
5506 }
5507 }
5508
5509 /// An iterator over the characters represented by this [`IndentSize`].
5510 pub fn chars(&self) -> impl Iterator<Item = char> {
5511 iter::repeat(self.char()).take(self.len as usize)
5512 }
5513
5514 /// The character representation of this [`IndentSize`].
5515 pub fn char(&self) -> char {
5516 match self.kind {
5517 IndentKind::Space => ' ',
5518 IndentKind::Tab => '\t',
5519 }
5520 }
5521
5522 /// Consumes the current [`IndentSize`] and returns a new one that has
5523 /// been shrunk or enlarged by the given size along the given direction.
5524 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5525 match direction {
5526 Ordering::Less => {
5527 if self.kind == size.kind && self.len >= size.len {
5528 self.len -= size.len;
5529 }
5530 }
5531 Ordering::Equal => {}
5532 Ordering::Greater => {
5533 if self.len == 0 {
5534 self = size;
5535 } else if self.kind == size.kind {
5536 self.len += size.len;
5537 }
5538 }
5539 }
5540 self
5541 }
5542
5543 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5544 match self.kind {
5545 IndentKind::Space => self.len as usize,
5546 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5547 }
5548 }
5549}
5550
5551#[cfg(any(test, feature = "test-support"))]
5552pub struct TestFile {
5553 pub path: Arc<RelPath>,
5554 pub root_name: String,
5555 pub local_root: Option<PathBuf>,
5556}
5557
5558#[cfg(any(test, feature = "test-support"))]
5559impl File for TestFile {
5560 fn path(&self) -> &Arc<RelPath> {
5561 &self.path
5562 }
5563
5564 fn full_path(&self, _: &gpui::App) -> PathBuf {
5565 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5566 }
5567
5568 fn as_local(&self) -> Option<&dyn LocalFile> {
5569 if self.local_root.is_some() {
5570 Some(self)
5571 } else {
5572 None
5573 }
5574 }
5575
5576 fn disk_state(&self) -> DiskState {
5577 unimplemented!()
5578 }
5579
5580 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5581 self.path().file_name().unwrap_or(self.root_name.as_ref())
5582 }
5583
5584 fn worktree_id(&self, _: &App) -> WorktreeId {
5585 WorktreeId::from_usize(0)
5586 }
5587
5588 fn to_proto(&self, _: &App) -> rpc::proto::File {
5589 unimplemented!()
5590 }
5591
5592 fn is_private(&self) -> bool {
5593 false
5594 }
5595
5596 fn path_style(&self, _cx: &App) -> PathStyle {
5597 PathStyle::local()
5598 }
5599}
5600
5601#[cfg(any(test, feature = "test-support"))]
5602impl LocalFile for TestFile {
5603 fn abs_path(&self, _cx: &App) -> PathBuf {
5604 PathBuf::from(self.local_root.as_ref().unwrap())
5605 .join(&self.root_name)
5606 .join(self.path.as_std_path())
5607 }
5608
5609 fn load(&self, _cx: &App) -> Task<Result<String>> {
5610 unimplemented!()
5611 }
5612
5613 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5614 unimplemented!()
5615 }
5616}
5617
5618pub(crate) fn contiguous_ranges(
5619 values: impl Iterator<Item = u32>,
5620 max_len: usize,
5621) -> impl Iterator<Item = Range<u32>> {
5622 let mut values = values;
5623 let mut current_range: Option<Range<u32>> = None;
5624 std::iter::from_fn(move || {
5625 loop {
5626 if let Some(value) = values.next() {
5627 if let Some(range) = &mut current_range
5628 && value == range.end
5629 && range.len() < max_len
5630 {
5631 range.end += 1;
5632 continue;
5633 }
5634
5635 let prev_range = current_range.clone();
5636 current_range = Some(value..(value + 1));
5637 if prev_range.is_some() {
5638 return prev_range;
5639 }
5640 } else {
5641 return current_range.take();
5642 }
5643 }
5644 })
5645}
5646
5647#[derive(Default, Debug)]
5648pub struct CharClassifier {
5649 scope: Option<LanguageScope>,
5650 scope_context: Option<CharScopeContext>,
5651 ignore_punctuation: bool,
5652}
5653
5654impl CharClassifier {
5655 pub fn new(scope: Option<LanguageScope>) -> Self {
5656 Self {
5657 scope,
5658 scope_context: None,
5659 ignore_punctuation: false,
5660 }
5661 }
5662
5663 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5664 Self {
5665 scope_context,
5666 ..self
5667 }
5668 }
5669
5670 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5671 Self {
5672 ignore_punctuation,
5673 ..self
5674 }
5675 }
5676
5677 pub fn is_whitespace(&self, c: char) -> bool {
5678 self.kind(c) == CharKind::Whitespace
5679 }
5680
5681 pub fn is_word(&self, c: char) -> bool {
5682 self.kind(c) == CharKind::Word
5683 }
5684
5685 pub fn is_punctuation(&self, c: char) -> bool {
5686 self.kind(c) == CharKind::Punctuation
5687 }
5688
5689 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5690 if c.is_alphanumeric() || c == '_' {
5691 return CharKind::Word;
5692 }
5693
5694 if let Some(scope) = &self.scope {
5695 let characters = match self.scope_context {
5696 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5697 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5698 None => scope.word_characters(),
5699 };
5700 if let Some(characters) = characters
5701 && characters.contains(&c)
5702 {
5703 return CharKind::Word;
5704 }
5705 }
5706
5707 if c.is_whitespace() {
5708 return CharKind::Whitespace;
5709 }
5710
5711 if ignore_punctuation {
5712 CharKind::Word
5713 } else {
5714 CharKind::Punctuation
5715 }
5716 }
5717
5718 pub fn kind(&self, c: char) -> CharKind {
5719 self.kind_with(c, self.ignore_punctuation)
5720 }
5721}
5722
5723/// Find all of the ranges of whitespace that occur at the ends of lines
5724/// in the given rope.
5725///
5726/// This could also be done with a regex search, but this implementation
5727/// avoids copying text.
5728pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5729 let mut ranges = Vec::new();
5730
5731 let mut offset = 0;
5732 let mut prev_chunk_trailing_whitespace_range = 0..0;
5733 for chunk in rope.chunks() {
5734 let mut prev_line_trailing_whitespace_range = 0..0;
5735 for (i, line) in chunk.split('\n').enumerate() {
5736 let line_end_offset = offset + line.len();
5737 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5738 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5739
5740 if i == 0 && trimmed_line_len == 0 {
5741 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5742 }
5743 if !prev_line_trailing_whitespace_range.is_empty() {
5744 ranges.push(prev_line_trailing_whitespace_range);
5745 }
5746
5747 offset = line_end_offset + 1;
5748 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5749 }
5750
5751 offset -= 1;
5752 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5753 }
5754
5755 if !prev_chunk_trailing_whitespace_range.is_empty() {
5756 ranges.push(prev_chunk_trailing_whitespace_range);
5757 }
5758
5759 ranges
5760}