1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// A machine-readable code that identifies this diagnostic.
249 pub code: Option<NumberOrString>,
250 pub code_description: Option<lsp::Uri>,
251 /// Whether this diagnostic is a hint, warning, or error.
252 pub severity: DiagnosticSeverity,
253 /// The human-readable message associated with this diagnostic.
254 pub message: String,
255 /// The human-readable message (in markdown format)
256 pub markdown: Option<String>,
257 /// An id that identifies the group to which this diagnostic belongs.
258 ///
259 /// When a language server produces a diagnostic with
260 /// one or more associated diagnostics, those diagnostics are all
261 /// assigned a single group ID.
262 pub group_id: usize,
263 /// Whether this diagnostic is the primary diagnostic for its group.
264 ///
265 /// In a given group, the primary diagnostic is the top-level diagnostic
266 /// returned by the language server. The non-primary diagnostics are the
267 /// associated diagnostics.
268 pub is_primary: bool,
269 /// Whether this diagnostic is considered to originate from an analysis of
270 /// files on disk, as opposed to any unsaved buffer contents. This is a
271 /// property of a given diagnostic source, and is configured for a given
272 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
273 /// for the language server.
274 pub is_disk_based: bool,
275 /// Whether this diagnostic marks unnecessary code.
276 pub is_unnecessary: bool,
277 /// Quick separation of diagnostics groups based by their source.
278 pub source_kind: DiagnosticSourceKind,
279 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
280 pub data: Option<Value>,
281 /// Whether to underline the corresponding text range in the editor.
282 pub underline: bool,
283}
284
285#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
286pub enum DiagnosticSourceKind {
287 Pulled,
288 Pushed,
289 Other,
290}
291
292/// An operation used to synchronize this buffer with its other replicas.
293#[derive(Clone, Debug, PartialEq)]
294pub enum Operation {
295 /// A text operation.
296 Buffer(text::Operation),
297
298 /// An update to the buffer's diagnostics.
299 UpdateDiagnostics {
300 /// The id of the language server that produced the new diagnostics.
301 server_id: LanguageServerId,
302 /// The diagnostics.
303 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 },
307
308 /// An update to the most recent selections in this buffer.
309 UpdateSelections {
310 /// The selections.
311 selections: Arc<[Selection<Anchor>]>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// Whether the selections are in 'line mode'.
315 line_mode: bool,
316 /// The [`CursorShape`] associated with these selections.
317 cursor_shape: CursorShape,
318 },
319
320 /// An update to the characters that should trigger autocompletion
321 /// for this buffer.
322 UpdateCompletionTriggers {
323 /// The characters that trigger autocompletion.
324 triggers: Vec<String>,
325 /// The buffer's lamport timestamp.
326 lamport_timestamp: clock::Lamport,
327 /// The language server ID.
328 server_id: LanguageServerId,
329 },
330
331 /// An update to the line ending type of this buffer.
332 UpdateLineEnding {
333 /// The line ending type.
334 line_ending: LineEnding,
335 /// The buffer's lamport timestamp.
336 lamport_timestamp: clock::Lamport,
337 },
338}
339
340/// An event that occurs in a buffer.
341#[derive(Clone, Debug, PartialEq)]
342pub enum BufferEvent {
343 /// The buffer was changed in a way that must be
344 /// propagated to its other replicas.
345 Operation {
346 operation: Operation,
347 is_local: bool,
348 },
349 /// The buffer was edited.
350 Edited,
351 /// The buffer's `dirty` bit changed.
352 DirtyChanged,
353 /// The buffer was saved.
354 Saved,
355 /// The buffer's file was changed on disk.
356 FileHandleChanged,
357 /// The buffer was reloaded.
358 Reloaded,
359 /// The buffer is in need of a reload
360 ReloadNeeded,
361 /// The buffer's language was changed.
362 /// The boolean indicates whether this buffer did not have a language before, but does now.
363 LanguageChanged(bool),
364 /// The buffer's syntax trees were updated.
365 Reparsed,
366 /// The buffer's diagnostics were updated.
367 DiagnosticsUpdated,
368 /// The buffer gained or lost editing capabilities.
369 CapabilityChanged,
370}
371
372/// The file associated with a buffer.
373pub trait File: Send + Sync + Any {
374 /// Returns the [`LocalFile`] associated with this file, if the
375 /// file is local.
376 fn as_local(&self) -> Option<&dyn LocalFile>;
377
378 /// Returns whether this file is local.
379 fn is_local(&self) -> bool {
380 self.as_local().is_some()
381 }
382
383 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
384 /// only available in some states, such as modification time.
385 fn disk_state(&self) -> DiskState;
386
387 /// Returns the path of this file relative to the worktree's root directory.
388 fn path(&self) -> &Arc<RelPath>;
389
390 /// Returns the path of this file relative to the worktree's parent directory (this means it
391 /// includes the name of the worktree's root folder).
392 fn full_path(&self, cx: &App) -> PathBuf;
393
394 /// Returns the path style of this file.
395 fn path_style(&self, cx: &App) -> PathStyle;
396
397 /// Returns the last component of this handle's absolute path. If this handle refers to the root
398 /// of its worktree, then this method will return the name of the worktree itself.
399 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
400
401 /// Returns the id of the worktree to which this file belongs.
402 ///
403 /// This is needed for looking up project-specific settings.
404 fn worktree_id(&self, cx: &App) -> WorktreeId;
405
406 /// Converts this file into a protobuf message.
407 fn to_proto(&self, cx: &App) -> rpc::proto::File;
408
409 /// Return whether Zed considers this to be a private file.
410 fn is_private(&self) -> bool;
411}
412
413/// The file's storage status - whether it's stored (`Present`), and if so when it was last
414/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
415/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
416/// indicator for new files.
417#[derive(Copy, Clone, Debug, PartialEq)]
418pub enum DiskState {
419 /// File created in Zed that has not been saved.
420 New,
421 /// File present on the filesystem.
422 Present { mtime: MTime },
423 /// Deleted file that was previously present.
424 Deleted,
425}
426
427impl DiskState {
428 /// Returns the file's last known modification time on disk.
429 pub fn mtime(self) -> Option<MTime> {
430 match self {
431 DiskState::New => None,
432 DiskState::Present { mtime } => Some(mtime),
433 DiskState::Deleted => None,
434 }
435 }
436
437 pub fn exists(&self) -> bool {
438 match self {
439 DiskState::New => false,
440 DiskState::Present { .. } => true,
441 DiskState::Deleted => false,
442 }
443 }
444}
445
446/// The file associated with a buffer, in the case where the file is on the local disk.
447pub trait LocalFile: File {
448 /// Returns the absolute path of this file
449 fn abs_path(&self, cx: &App) -> PathBuf;
450
451 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
452 fn load(&self, cx: &App) -> Task<Result<String>>;
453
454 /// Loads the file's contents from disk.
455 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
456}
457
458/// The auto-indent behavior associated with an editing operation.
459/// For some editing operations, each affected line of text has its
460/// indentation recomputed. For other operations, the entire block
461/// of edited text is adjusted uniformly.
462#[derive(Clone, Debug)]
463pub enum AutoindentMode {
464 /// Indent each line of inserted text.
465 EachLine,
466 /// Apply the same indentation adjustment to all of the lines
467 /// in a given insertion.
468 Block {
469 /// The original indentation column of the first line of each
470 /// insertion, if it has been copied.
471 ///
472 /// Knowing this makes it possible to preserve the relative indentation
473 /// of every line in the insertion from when it was copied.
474 ///
475 /// If the original indent column is `a`, and the first line of insertion
476 /// is then auto-indented to column `b`, then every other line of
477 /// the insertion will be auto-indented to column `b - a`
478 original_indent_columns: Vec<Option<u32>>,
479 },
480}
481
482#[derive(Clone)]
483struct AutoindentRequest {
484 before_edit: BufferSnapshot,
485 entries: Vec<AutoindentRequestEntry>,
486 is_block_mode: bool,
487 ignore_empty_lines: bool,
488}
489
490#[derive(Debug, Clone)]
491struct AutoindentRequestEntry {
492 /// A range of the buffer whose indentation should be adjusted.
493 range: Range<Anchor>,
494 /// Whether or not these lines should be considered brand new, for the
495 /// purpose of auto-indent. When text is not new, its indentation will
496 /// only be adjusted if the suggested indentation level has *changed*
497 /// since the edit was made.
498 first_line_is_new: bool,
499 indent_size: IndentSize,
500 original_indent_column: Option<u32>,
501}
502
503#[derive(Debug)]
504struct IndentSuggestion {
505 basis_row: u32,
506 delta: Ordering,
507 within_error: bool,
508}
509
510struct BufferChunkHighlights<'a> {
511 captures: SyntaxMapCaptures<'a>,
512 next_capture: Option<SyntaxMapCapture<'a>>,
513 stack: Vec<(usize, HighlightId)>,
514 highlight_maps: Vec<HighlightMap>,
515}
516
517/// An iterator that yields chunks of a buffer's text, along with their
518/// syntax highlights and diagnostic status.
519pub struct BufferChunks<'a> {
520 buffer_snapshot: Option<&'a BufferSnapshot>,
521 range: Range<usize>,
522 chunks: text::Chunks<'a>,
523 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
524 error_depth: usize,
525 warning_depth: usize,
526 information_depth: usize,
527 hint_depth: usize,
528 unnecessary_depth: usize,
529 underline: bool,
530 highlights: Option<BufferChunkHighlights<'a>>,
531}
532
533/// A chunk of a buffer's text, along with its syntax highlight and
534/// diagnostic status.
535#[derive(Clone, Debug, Default)]
536pub struct Chunk<'a> {
537 /// The text of the chunk.
538 pub text: &'a str,
539 /// The syntax highlighting style of the chunk.
540 pub syntax_highlight_id: Option<HighlightId>,
541 /// The highlight style that has been applied to this chunk in
542 /// the editor.
543 pub highlight_style: Option<HighlightStyle>,
544 /// The severity of diagnostic associated with this chunk, if any.
545 pub diagnostic_severity: Option<DiagnosticSeverity>,
546 /// A bitset of which characters are tabs in this string.
547 pub tabs: u128,
548 /// Bitmap of character indices in this chunk
549 pub chars: u128,
550 /// Whether this chunk of text is marked as unnecessary.
551 pub is_unnecessary: bool,
552 /// Whether this chunk of text was originally a tab character.
553 pub is_tab: bool,
554 /// Whether this chunk of text was originally an inlay.
555 pub is_inlay: bool,
556 /// Whether to underline the corresponding text range in the editor.
557 pub underline: bool,
558}
559
560/// A set of edits to a given version of a buffer, computed asynchronously.
561#[derive(Debug)]
562pub struct Diff {
563 pub base_version: clock::Global,
564 pub line_ending: LineEnding,
565 pub edits: Vec<(Range<usize>, Arc<str>)>,
566}
567
568#[derive(Debug, Clone, Copy)]
569pub(crate) struct DiagnosticEndpoint {
570 offset: usize,
571 is_start: bool,
572 underline: bool,
573 severity: DiagnosticSeverity,
574 is_unnecessary: bool,
575}
576
577/// A class of characters, used for characterizing a run of text.
578#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
579pub enum CharKind {
580 /// Whitespace.
581 Whitespace,
582 /// Punctuation.
583 Punctuation,
584 /// Word.
585 Word,
586}
587
588/// Context for character classification within a specific scope.
589#[derive(Copy, Clone, Eq, PartialEq, Debug)]
590pub enum CharScopeContext {
591 /// Character classification for completion queries.
592 ///
593 /// This context treats certain characters as word constituents that would
594 /// normally be considered punctuation, such as '-' in Tailwind classes
595 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
596 Completion,
597 /// Character classification for linked edits.
598 ///
599 /// This context handles characters that should be treated as part of
600 /// identifiers during linked editing operations, such as '.' in JSX
601 /// component names like `<Animated.View>`.
602 LinkedEdit,
603}
604
605/// A runnable is a set of data about a region that could be resolved into a task
606pub struct Runnable {
607 pub tags: SmallVec<[RunnableTag; 1]>,
608 pub language: Arc<Language>,
609 pub buffer: BufferId,
610}
611
612#[derive(Default, Clone, Debug)]
613pub struct HighlightedText {
614 pub text: SharedString,
615 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
616}
617
618#[derive(Default, Debug)]
619struct HighlightedTextBuilder {
620 pub text: String,
621 highlights: Vec<(Range<usize>, HighlightStyle)>,
622}
623
624impl HighlightedText {
625 pub fn from_buffer_range<T: ToOffset>(
626 range: Range<T>,
627 snapshot: &text::BufferSnapshot,
628 syntax_snapshot: &SyntaxSnapshot,
629 override_style: Option<HighlightStyle>,
630 syntax_theme: &SyntaxTheme,
631 ) -> Self {
632 let mut highlighted_text = HighlightedTextBuilder::default();
633 highlighted_text.add_text_from_buffer_range(
634 range,
635 snapshot,
636 syntax_snapshot,
637 override_style,
638 syntax_theme,
639 );
640 highlighted_text.build()
641 }
642
643 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
644 gpui::StyledText::new(self.text.clone())
645 .with_default_highlights(default_style, self.highlights.iter().cloned())
646 }
647
648 /// Returns the first line without leading whitespace unless highlighted
649 /// and a boolean indicating if there are more lines after
650 pub fn first_line_preview(self) -> (Self, bool) {
651 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
652 let first_line = &self.text[..newline_ix];
653
654 // Trim leading whitespace, unless an edit starts prior to it.
655 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
656 if let Some((first_highlight_range, _)) = self.highlights.first() {
657 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
658 }
659
660 let preview_text = &first_line[preview_start_ix..];
661 let preview_highlights = self
662 .highlights
663 .into_iter()
664 .skip_while(|(range, _)| range.end <= preview_start_ix)
665 .take_while(|(range, _)| range.start < newline_ix)
666 .filter_map(|(mut range, highlight)| {
667 range.start = range.start.saturating_sub(preview_start_ix);
668 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
669 if range.is_empty() {
670 None
671 } else {
672 Some((range, highlight))
673 }
674 });
675
676 let preview = Self {
677 text: SharedString::new(preview_text),
678 highlights: preview_highlights.collect(),
679 };
680
681 (preview, self.text.len() > newline_ix)
682 }
683}
684
685impl HighlightedTextBuilder {
686 pub fn build(self) -> HighlightedText {
687 HighlightedText {
688 text: self.text.into(),
689 highlights: self.highlights,
690 }
691 }
692
693 pub fn add_text_from_buffer_range<T: ToOffset>(
694 &mut self,
695 range: Range<T>,
696 snapshot: &text::BufferSnapshot,
697 syntax_snapshot: &SyntaxSnapshot,
698 override_style: Option<HighlightStyle>,
699 syntax_theme: &SyntaxTheme,
700 ) {
701 let range = range.to_offset(snapshot);
702 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
703 let start = self.text.len();
704 self.text.push_str(chunk.text);
705 let end = self.text.len();
706
707 if let Some(highlight_style) = chunk
708 .syntax_highlight_id
709 .and_then(|id| id.style(syntax_theme))
710 {
711 let highlight_style = override_style.map_or(highlight_style, |override_style| {
712 highlight_style.highlight(override_style)
713 });
714 self.highlights.push((start..end, highlight_style));
715 } else if let Some(override_style) = override_style {
716 self.highlights.push((start..end, override_style));
717 }
718 }
719 }
720
721 fn highlighted_chunks<'a>(
722 range: Range<usize>,
723 snapshot: &'a text::BufferSnapshot,
724 syntax_snapshot: &'a SyntaxSnapshot,
725 ) -> BufferChunks<'a> {
726 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
727 grammar
728 .highlights_config
729 .as_ref()
730 .map(|config| &config.query)
731 });
732
733 let highlight_maps = captures
734 .grammars()
735 .iter()
736 .map(|grammar| grammar.highlight_map())
737 .collect();
738
739 BufferChunks::new(
740 snapshot.as_rope(),
741 range,
742 Some((captures, highlight_maps)),
743 false,
744 None,
745 )
746 }
747}
748
749#[derive(Clone)]
750pub struct EditPreview {
751 old_snapshot: text::BufferSnapshot,
752 applied_edits_snapshot: text::BufferSnapshot,
753 syntax_snapshot: SyntaxSnapshot,
754}
755
756impl EditPreview {
757 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
758 let (first, _) = edits.first()?;
759 let (last, _) = edits.last()?;
760
761 let start = first.start.to_point(&self.old_snapshot);
762 let old_end = last.end.to_point(&self.old_snapshot);
763 let new_end = last
764 .end
765 .bias_right(&self.old_snapshot)
766 .to_point(&self.applied_edits_snapshot);
767
768 let start = Point::new(start.row.saturating_sub(3), 0);
769 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
770 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
771
772 Some(unified_diff(
773 &self
774 .old_snapshot
775 .text_for_range(start..old_end)
776 .collect::<String>(),
777 &self
778 .applied_edits_snapshot
779 .text_for_range(start..new_end)
780 .collect::<String>(),
781 ))
782 }
783
784 pub fn highlight_edits(
785 &self,
786 current_snapshot: &BufferSnapshot,
787 edits: &[(Range<Anchor>, impl AsRef<str>)],
788 include_deletions: bool,
789 cx: &App,
790 ) -> HighlightedText {
791 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
792 return HighlightedText::default();
793 };
794
795 let mut highlighted_text = HighlightedTextBuilder::default();
796
797 let visible_range_in_preview_snapshot =
798 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
799 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
800
801 let insertion_highlight_style = HighlightStyle {
802 background_color: Some(cx.theme().status().created_background),
803 ..Default::default()
804 };
805 let deletion_highlight_style = HighlightStyle {
806 background_color: Some(cx.theme().status().deleted_background),
807 ..Default::default()
808 };
809 let syntax_theme = cx.theme().syntax();
810
811 for (range, edit_text) in edits {
812 let edit_new_end_in_preview_snapshot = range
813 .end
814 .bias_right(&self.old_snapshot)
815 .to_offset(&self.applied_edits_snapshot);
816 let edit_start_in_preview_snapshot =
817 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
818
819 let unchanged_range_in_preview_snapshot =
820 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
821 if !unchanged_range_in_preview_snapshot.is_empty() {
822 highlighted_text.add_text_from_buffer_range(
823 unchanged_range_in_preview_snapshot,
824 &self.applied_edits_snapshot,
825 &self.syntax_snapshot,
826 None,
827 syntax_theme,
828 );
829 }
830
831 let range_in_current_snapshot = range.to_offset(current_snapshot);
832 if include_deletions && !range_in_current_snapshot.is_empty() {
833 highlighted_text.add_text_from_buffer_range(
834 range_in_current_snapshot,
835 ¤t_snapshot.text,
836 ¤t_snapshot.syntax,
837 Some(deletion_highlight_style),
838 syntax_theme,
839 );
840 }
841
842 if !edit_text.as_ref().is_empty() {
843 highlighted_text.add_text_from_buffer_range(
844 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
845 &self.applied_edits_snapshot,
846 &self.syntax_snapshot,
847 Some(insertion_highlight_style),
848 syntax_theme,
849 );
850 }
851
852 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
853 }
854
855 highlighted_text.add_text_from_buffer_range(
856 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
857 &self.applied_edits_snapshot,
858 &self.syntax_snapshot,
859 None,
860 syntax_theme,
861 );
862
863 highlighted_text.build()
864 }
865
866 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
867 cx.new(|cx| {
868 let mut buffer = Buffer::local_normalized(
869 self.applied_edits_snapshot.as_rope().clone(),
870 self.applied_edits_snapshot.line_ending(),
871 cx,
872 );
873 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
874 buffer
875 })
876 }
877
878 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
879 let (first, _) = edits.first()?;
880 let (last, _) = edits.last()?;
881
882 let start = first
883 .start
884 .bias_left(&self.old_snapshot)
885 .to_point(&self.applied_edits_snapshot);
886 let end = last
887 .end
888 .bias_right(&self.old_snapshot)
889 .to_point(&self.applied_edits_snapshot);
890
891 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
892 let range = Point::new(start.row, 0)
893 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
894
895 Some(range)
896 }
897}
898
899#[derive(Clone, Debug, PartialEq, Eq)]
900pub struct BracketMatch<T> {
901 pub open_range: Range<T>,
902 pub close_range: Range<T>,
903 pub newline_only: bool,
904 pub syntax_layer_depth: usize,
905 pub color_index: Option<usize>,
906}
907
908impl<T> BracketMatch<T> {
909 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
910 (self.open_range, self.close_range)
911 }
912}
913
914impl Buffer {
915 /// Create a new buffer with the given base text.
916 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
917 Self::build(
918 TextBuffer::new(
919 ReplicaId::LOCAL,
920 cx.entity_id().as_non_zero_u64().into(),
921 base_text.into(),
922 ),
923 None,
924 Capability::ReadWrite,
925 )
926 }
927
928 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
929 pub fn local_normalized(
930 base_text_normalized: Rope,
931 line_ending: LineEnding,
932 cx: &Context<Self>,
933 ) -> Self {
934 Self::build(
935 TextBuffer::new_normalized(
936 ReplicaId::LOCAL,
937 cx.entity_id().as_non_zero_u64().into(),
938 line_ending,
939 base_text_normalized,
940 ),
941 None,
942 Capability::ReadWrite,
943 )
944 }
945
946 /// Create a new buffer that is a replica of a remote buffer.
947 pub fn remote(
948 remote_id: BufferId,
949 replica_id: ReplicaId,
950 capability: Capability,
951 base_text: impl Into<String>,
952 ) -> Self {
953 Self::build(
954 TextBuffer::new(replica_id, remote_id, base_text.into()),
955 None,
956 capability,
957 )
958 }
959
960 /// Create a new buffer that is a replica of a remote buffer, populating its
961 /// state from the given protobuf message.
962 pub fn from_proto(
963 replica_id: ReplicaId,
964 capability: Capability,
965 message: proto::BufferState,
966 file: Option<Arc<dyn File>>,
967 ) -> Result<Self> {
968 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
969 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
970 let mut this = Self::build(buffer, file, capability);
971 this.text.set_line_ending(proto::deserialize_line_ending(
972 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
973 ));
974 this.saved_version = proto::deserialize_version(&message.saved_version);
975 this.saved_mtime = message.saved_mtime.map(|time| time.into());
976 Ok(this)
977 }
978
979 /// Serialize the buffer's state to a protobuf message.
980 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
981 proto::BufferState {
982 id: self.remote_id().into(),
983 file: self.file.as_ref().map(|f| f.to_proto(cx)),
984 base_text: self.base_text().to_string(),
985 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
986 saved_version: proto::serialize_version(&self.saved_version),
987 saved_mtime: self.saved_mtime.map(|time| time.into()),
988 }
989 }
990
991 /// Serialize as protobufs all of the changes to the buffer since the given version.
992 pub fn serialize_ops(
993 &self,
994 since: Option<clock::Global>,
995 cx: &App,
996 ) -> Task<Vec<proto::Operation>> {
997 let mut operations = Vec::new();
998 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
999
1000 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1001 proto::serialize_operation(&Operation::UpdateSelections {
1002 selections: set.selections.clone(),
1003 lamport_timestamp: set.lamport_timestamp,
1004 line_mode: set.line_mode,
1005 cursor_shape: set.cursor_shape,
1006 })
1007 }));
1008
1009 for (server_id, diagnostics) in &self.diagnostics {
1010 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1011 lamport_timestamp: self.diagnostics_timestamp,
1012 server_id: *server_id,
1013 diagnostics: diagnostics.iter().cloned().collect(),
1014 }));
1015 }
1016
1017 for (server_id, completions) in &self.completion_triggers_per_language_server {
1018 operations.push(proto::serialize_operation(
1019 &Operation::UpdateCompletionTriggers {
1020 triggers: completions.iter().cloned().collect(),
1021 lamport_timestamp: self.completion_triggers_timestamp,
1022 server_id: *server_id,
1023 },
1024 ));
1025 }
1026
1027 let text_operations = self.text.operations().clone();
1028 cx.background_spawn(async move {
1029 let since = since.unwrap_or_default();
1030 operations.extend(
1031 text_operations
1032 .iter()
1033 .filter(|(_, op)| !since.observed(op.timestamp()))
1034 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1035 );
1036 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1037 operations
1038 })
1039 }
1040
1041 /// Assign a language to the buffer, returning the buffer.
1042 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1043 self.set_language_async(Some(language), cx);
1044 self
1045 }
1046
1047 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1048 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1049 self.set_language(Some(language), cx);
1050 self
1051 }
1052
1053 /// Returns the [`Capability`] of this buffer.
1054 pub fn capability(&self) -> Capability {
1055 self.capability
1056 }
1057
1058 /// Whether this buffer can only be read.
1059 pub fn read_only(&self) -> bool {
1060 self.capability == Capability::ReadOnly
1061 }
1062
1063 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1064 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1065 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1066 let snapshot = buffer.snapshot();
1067 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1068 let tree_sitter_data = TreeSitterData::new(snapshot);
1069 Self {
1070 saved_mtime,
1071 tree_sitter_data: Arc::new(tree_sitter_data),
1072 saved_version: buffer.version(),
1073 preview_version: buffer.version(),
1074 reload_task: None,
1075 transaction_depth: 0,
1076 was_dirty_before_starting_transaction: None,
1077 has_unsaved_edits: Cell::new((buffer.version(), false)),
1078 text: buffer,
1079 branch_state: None,
1080 file,
1081 capability,
1082 syntax_map,
1083 reparse: None,
1084 non_text_state_update_count: 0,
1085 sync_parse_timeout: Duration::from_millis(1),
1086 parse_status: watch::channel(ParseStatus::Idle),
1087 autoindent_requests: Default::default(),
1088 wait_for_autoindent_txs: Default::default(),
1089 pending_autoindent: Default::default(),
1090 language: None,
1091 remote_selections: Default::default(),
1092 diagnostics: Default::default(),
1093 diagnostics_timestamp: Lamport::MIN,
1094 completion_triggers: Default::default(),
1095 completion_triggers_per_language_server: Default::default(),
1096 completion_triggers_timestamp: Lamport::MIN,
1097 deferred_ops: OperationQueue::new(),
1098 has_conflict: false,
1099 change_bits: Default::default(),
1100 _subscriptions: Vec::new(),
1101 }
1102 }
1103
1104 pub fn build_snapshot(
1105 text: Rope,
1106 language: Option<Arc<Language>>,
1107 language_registry: Option<Arc<LanguageRegistry>>,
1108 cx: &mut App,
1109 ) -> impl Future<Output = BufferSnapshot> + use<> {
1110 let entity_id = cx.reserve_entity::<Self>().entity_id();
1111 let buffer_id = entity_id.as_non_zero_u64().into();
1112 async move {
1113 let text =
1114 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1115 .snapshot();
1116 let mut syntax = SyntaxMap::new(&text).snapshot();
1117 if let Some(language) = language.clone() {
1118 let language_registry = language_registry.clone();
1119 syntax.reparse(&text, language_registry, language);
1120 }
1121 let tree_sitter_data = TreeSitterData::new(text.clone());
1122 BufferSnapshot {
1123 text,
1124 syntax,
1125 file: None,
1126 diagnostics: Default::default(),
1127 remote_selections: Default::default(),
1128 tree_sitter_data: Arc::new(tree_sitter_data),
1129 language,
1130 non_text_state_update_count: 0,
1131 }
1132 }
1133 }
1134
1135 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1136 let entity_id = cx.reserve_entity::<Self>().entity_id();
1137 let buffer_id = entity_id.as_non_zero_u64().into();
1138 let text = TextBuffer::new_normalized(
1139 ReplicaId::LOCAL,
1140 buffer_id,
1141 Default::default(),
1142 Rope::new(),
1143 )
1144 .snapshot();
1145 let syntax = SyntaxMap::new(&text).snapshot();
1146 let tree_sitter_data = TreeSitterData::new(text.clone());
1147 BufferSnapshot {
1148 text,
1149 syntax,
1150 tree_sitter_data: Arc::new(tree_sitter_data),
1151 file: None,
1152 diagnostics: Default::default(),
1153 remote_selections: Default::default(),
1154 language: None,
1155 non_text_state_update_count: 0,
1156 }
1157 }
1158
1159 #[cfg(any(test, feature = "test-support"))]
1160 pub fn build_snapshot_sync(
1161 text: Rope,
1162 language: Option<Arc<Language>>,
1163 language_registry: Option<Arc<LanguageRegistry>>,
1164 cx: &mut App,
1165 ) -> BufferSnapshot {
1166 let entity_id = cx.reserve_entity::<Self>().entity_id();
1167 let buffer_id = entity_id.as_non_zero_u64().into();
1168 let text =
1169 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1170 .snapshot();
1171 let mut syntax = SyntaxMap::new(&text).snapshot();
1172 if let Some(language) = language.clone() {
1173 syntax.reparse(&text, language_registry, language);
1174 }
1175 let tree_sitter_data = TreeSitterData::new(text.clone());
1176 BufferSnapshot {
1177 text,
1178 syntax,
1179 tree_sitter_data: Arc::new(tree_sitter_data),
1180 file: None,
1181 diagnostics: Default::default(),
1182 remote_selections: Default::default(),
1183 language,
1184 non_text_state_update_count: 0,
1185 }
1186 }
1187
1188 /// Retrieve a snapshot of the buffer's current state. This is computationally
1189 /// cheap, and allows reading from the buffer on a background thread.
1190 pub fn snapshot(&self) -> BufferSnapshot {
1191 let text = self.text.snapshot();
1192 let mut syntax_map = self.syntax_map.lock();
1193 syntax_map.interpolate(&text);
1194 let syntax = syntax_map.snapshot();
1195
1196 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1197 Arc::new(TreeSitterData::new(text.clone()))
1198 } else {
1199 self.tree_sitter_data.clone()
1200 };
1201
1202 BufferSnapshot {
1203 text,
1204 syntax,
1205 tree_sitter_data,
1206 file: self.file.clone(),
1207 remote_selections: self.remote_selections.clone(),
1208 diagnostics: self.diagnostics.clone(),
1209 language: self.language.clone(),
1210 non_text_state_update_count: self.non_text_state_update_count,
1211 }
1212 }
1213
1214 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1215 let this = cx.entity();
1216 cx.new(|cx| {
1217 let mut branch = Self {
1218 branch_state: Some(BufferBranchState {
1219 base_buffer: this.clone(),
1220 merged_operations: Default::default(),
1221 }),
1222 language: self.language.clone(),
1223 has_conflict: self.has_conflict,
1224 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1225 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1226 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1227 };
1228 if let Some(language_registry) = self.language_registry() {
1229 branch.set_language_registry(language_registry);
1230 }
1231
1232 // Reparse the branch buffer so that we get syntax highlighting immediately.
1233 branch.reparse(cx, true);
1234
1235 branch
1236 })
1237 }
1238
1239 pub fn preview_edits(
1240 &self,
1241 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1242 cx: &App,
1243 ) -> Task<EditPreview> {
1244 let registry = self.language_registry();
1245 let language = self.language().cloned();
1246 let old_snapshot = self.text.snapshot();
1247 let mut branch_buffer = self.text.branch();
1248 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1249 cx.background_spawn(async move {
1250 if !edits.is_empty() {
1251 if let Some(language) = language.clone() {
1252 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1253 }
1254
1255 branch_buffer.edit(edits.iter().cloned());
1256 let snapshot = branch_buffer.snapshot();
1257 syntax_snapshot.interpolate(&snapshot);
1258
1259 if let Some(language) = language {
1260 syntax_snapshot.reparse(&snapshot, registry, language);
1261 }
1262 }
1263 EditPreview {
1264 old_snapshot,
1265 applied_edits_snapshot: branch_buffer.snapshot(),
1266 syntax_snapshot,
1267 }
1268 })
1269 }
1270
1271 /// Applies all of the changes in this buffer that intersect any of the
1272 /// given `ranges` to its base buffer.
1273 ///
1274 /// If `ranges` is empty, then all changes will be applied. This buffer must
1275 /// be a branch buffer to call this method.
1276 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1277 let Some(base_buffer) = self.base_buffer() else {
1278 debug_panic!("not a branch buffer");
1279 return;
1280 };
1281
1282 let mut ranges = if ranges.is_empty() {
1283 &[0..usize::MAX]
1284 } else {
1285 ranges.as_slice()
1286 }
1287 .iter()
1288 .peekable();
1289
1290 let mut edits = Vec::new();
1291 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1292 let mut is_included = false;
1293 while let Some(range) = ranges.peek() {
1294 if range.end < edit.new.start {
1295 ranges.next().unwrap();
1296 } else {
1297 if range.start <= edit.new.end {
1298 is_included = true;
1299 }
1300 break;
1301 }
1302 }
1303
1304 if is_included {
1305 edits.push((
1306 edit.old.clone(),
1307 self.text_for_range(edit.new.clone()).collect::<String>(),
1308 ));
1309 }
1310 }
1311
1312 let operation = base_buffer.update(cx, |base_buffer, cx| {
1313 // cx.emit(BufferEvent::DiffBaseChanged);
1314 base_buffer.edit(edits, None, cx)
1315 });
1316
1317 if let Some(operation) = operation
1318 && let Some(BufferBranchState {
1319 merged_operations, ..
1320 }) = &mut self.branch_state
1321 {
1322 merged_operations.push(operation);
1323 }
1324 }
1325
1326 fn on_base_buffer_event(
1327 &mut self,
1328 _: Entity<Buffer>,
1329 event: &BufferEvent,
1330 cx: &mut Context<Self>,
1331 ) {
1332 let BufferEvent::Operation { operation, .. } = event else {
1333 return;
1334 };
1335 let Some(BufferBranchState {
1336 merged_operations, ..
1337 }) = &mut self.branch_state
1338 else {
1339 return;
1340 };
1341
1342 let mut operation_to_undo = None;
1343 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1344 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1345 {
1346 merged_operations.remove(ix);
1347 operation_to_undo = Some(operation.timestamp);
1348 }
1349
1350 self.apply_ops([operation.clone()], cx);
1351
1352 if let Some(timestamp) = operation_to_undo {
1353 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1354 self.undo_operations(counts, cx);
1355 }
1356 }
1357
1358 #[cfg(test)]
1359 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1360 &self.text
1361 }
1362
1363 /// Retrieve a snapshot of the buffer's raw text, without any
1364 /// language-related state like the syntax tree or diagnostics.
1365 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1366 self.text.snapshot()
1367 }
1368
1369 /// The file associated with the buffer, if any.
1370 pub fn file(&self) -> Option<&Arc<dyn File>> {
1371 self.file.as_ref()
1372 }
1373
1374 /// The version of the buffer that was last saved or reloaded from disk.
1375 pub fn saved_version(&self) -> &clock::Global {
1376 &self.saved_version
1377 }
1378
1379 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1380 pub fn saved_mtime(&self) -> Option<MTime> {
1381 self.saved_mtime
1382 }
1383
1384 /// Assign a language to the buffer.
1385 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1386 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1387 }
1388
1389 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1390 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1391 self.set_language_(language, true, cx);
1392 }
1393
1394 fn set_language_(
1395 &mut self,
1396 language: Option<Arc<Language>>,
1397 may_block: bool,
1398 cx: &mut Context<Self>,
1399 ) {
1400 self.non_text_state_update_count += 1;
1401 self.syntax_map.lock().clear(&self.text);
1402 let old_language = std::mem::replace(&mut self.language, language);
1403 self.was_changed();
1404 self.reparse(cx, may_block);
1405 let has_fresh_language =
1406 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1407 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1408 }
1409
1410 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1411 /// other languages if parts of the buffer are written in different languages.
1412 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1413 self.syntax_map
1414 .lock()
1415 .set_language_registry(language_registry);
1416 }
1417
1418 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1419 self.syntax_map.lock().language_registry()
1420 }
1421
1422 /// Assign the line ending type to the buffer.
1423 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1424 self.text.set_line_ending(line_ending);
1425
1426 let lamport_timestamp = self.text.lamport_clock.tick();
1427 self.send_operation(
1428 Operation::UpdateLineEnding {
1429 line_ending,
1430 lamport_timestamp,
1431 },
1432 true,
1433 cx,
1434 );
1435 }
1436
1437 /// Assign the buffer a new [`Capability`].
1438 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1439 if self.capability != capability {
1440 self.capability = capability;
1441 cx.emit(BufferEvent::CapabilityChanged)
1442 }
1443 }
1444
1445 /// This method is called to signal that the buffer has been saved.
1446 pub fn did_save(
1447 &mut self,
1448 version: clock::Global,
1449 mtime: Option<MTime>,
1450 cx: &mut Context<Self>,
1451 ) {
1452 self.saved_version = version.clone();
1453 self.has_unsaved_edits.set((version, false));
1454 self.has_conflict = false;
1455 self.saved_mtime = mtime;
1456 self.was_changed();
1457 cx.emit(BufferEvent::Saved);
1458 cx.notify();
1459 }
1460
1461 /// Reloads the contents of the buffer from disk.
1462 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1463 let (tx, rx) = futures::channel::oneshot::channel();
1464 let prev_version = self.text.version();
1465 self.reload_task = Some(cx.spawn(async move |this, cx| {
1466 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1467 let file = this.file.as_ref()?.as_local()?;
1468
1469 Some((file.disk_state().mtime(), file.load(cx)))
1470 })?
1471 else {
1472 return Ok(());
1473 };
1474
1475 let new_text = new_text.await?;
1476 let diff = this
1477 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1478 .await;
1479 this.update(cx, |this, cx| {
1480 if this.version() == diff.base_version {
1481 this.finalize_last_transaction();
1482 this.apply_diff(diff, cx);
1483 tx.send(this.finalize_last_transaction().cloned()).ok();
1484 this.has_conflict = false;
1485 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1486 } else {
1487 if !diff.edits.is_empty()
1488 || this
1489 .edits_since::<usize>(&diff.base_version)
1490 .next()
1491 .is_some()
1492 {
1493 this.has_conflict = true;
1494 }
1495
1496 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1497 }
1498
1499 this.reload_task.take();
1500 })
1501 }));
1502 rx
1503 }
1504
1505 /// This method is called to signal that the buffer has been reloaded.
1506 pub fn did_reload(
1507 &mut self,
1508 version: clock::Global,
1509 line_ending: LineEnding,
1510 mtime: Option<MTime>,
1511 cx: &mut Context<Self>,
1512 ) {
1513 self.saved_version = version;
1514 self.has_unsaved_edits
1515 .set((self.saved_version.clone(), false));
1516 self.text.set_line_ending(line_ending);
1517 self.saved_mtime = mtime;
1518 cx.emit(BufferEvent::Reloaded);
1519 cx.notify();
1520 }
1521
1522 /// Updates the [`File`] backing this buffer. This should be called when
1523 /// the file has changed or has been deleted.
1524 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1525 let was_dirty = self.is_dirty();
1526 let mut file_changed = false;
1527
1528 if let Some(old_file) = self.file.as_ref() {
1529 if new_file.path() != old_file.path() {
1530 file_changed = true;
1531 }
1532
1533 let old_state = old_file.disk_state();
1534 let new_state = new_file.disk_state();
1535 if old_state != new_state {
1536 file_changed = true;
1537 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1538 cx.emit(BufferEvent::ReloadNeeded)
1539 }
1540 }
1541 } else {
1542 file_changed = true;
1543 };
1544
1545 self.file = Some(new_file);
1546 if file_changed {
1547 self.was_changed();
1548 self.non_text_state_update_count += 1;
1549 if was_dirty != self.is_dirty() {
1550 cx.emit(BufferEvent::DirtyChanged);
1551 }
1552 cx.emit(BufferEvent::FileHandleChanged);
1553 cx.notify();
1554 }
1555 }
1556
1557 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1558 Some(self.branch_state.as_ref()?.base_buffer.clone())
1559 }
1560
1561 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1562 pub fn language(&self) -> Option<&Arc<Language>> {
1563 self.language.as_ref()
1564 }
1565
1566 /// Returns the [`Language`] at the given location.
1567 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1568 let offset = position.to_offset(self);
1569 let mut is_first = true;
1570 let start_anchor = self.anchor_before(offset);
1571 let end_anchor = self.anchor_after(offset);
1572 self.syntax_map
1573 .lock()
1574 .layers_for_range(offset..offset, &self.text, false)
1575 .filter(|layer| {
1576 if is_first {
1577 is_first = false;
1578 return true;
1579 }
1580
1581 layer
1582 .included_sub_ranges
1583 .map(|sub_ranges| {
1584 sub_ranges.iter().any(|sub_range| {
1585 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1586 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1587 !is_before_start && !is_after_end
1588 })
1589 })
1590 .unwrap_or(true)
1591 })
1592 .last()
1593 .map(|info| info.language.clone())
1594 .or_else(|| self.language.clone())
1595 }
1596
1597 /// Returns each [`Language`] for the active syntax layers at the given location.
1598 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1599 let offset = position.to_offset(self);
1600 let mut languages: Vec<Arc<Language>> = self
1601 .syntax_map
1602 .lock()
1603 .layers_for_range(offset..offset, &self.text, false)
1604 .map(|info| info.language.clone())
1605 .collect();
1606
1607 if languages.is_empty()
1608 && let Some(buffer_language) = self.language()
1609 {
1610 languages.push(buffer_language.clone());
1611 }
1612
1613 languages
1614 }
1615
1616 /// An integer version number that accounts for all updates besides
1617 /// the buffer's text itself (which is versioned via a version vector).
1618 pub fn non_text_state_update_count(&self) -> usize {
1619 self.non_text_state_update_count
1620 }
1621
1622 /// Whether the buffer is being parsed in the background.
1623 #[cfg(any(test, feature = "test-support"))]
1624 pub fn is_parsing(&self) -> bool {
1625 self.reparse.is_some()
1626 }
1627
1628 /// Indicates whether the buffer contains any regions that may be
1629 /// written in a language that hasn't been loaded yet.
1630 pub fn contains_unknown_injections(&self) -> bool {
1631 self.syntax_map.lock().contains_unknown_injections()
1632 }
1633
1634 #[cfg(any(test, feature = "test-support"))]
1635 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1636 self.sync_parse_timeout = timeout;
1637 }
1638
1639 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1640 match Arc::get_mut(&mut self.tree_sitter_data) {
1641 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1642 None => {
1643 let tree_sitter_data = TreeSitterData::new(snapshot);
1644 self.tree_sitter_data = Arc::new(tree_sitter_data)
1645 }
1646 }
1647 }
1648
1649 /// Called after an edit to synchronize the buffer's main parse tree with
1650 /// the buffer's new underlying state.
1651 ///
1652 /// Locks the syntax map and interpolates the edits since the last reparse
1653 /// into the foreground syntax tree.
1654 ///
1655 /// Then takes a stable snapshot of the syntax map before unlocking it.
1656 /// The snapshot with the interpolated edits is sent to a background thread,
1657 /// where we ask Tree-sitter to perform an incremental parse.
1658 ///
1659 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1660 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1661 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1662 ///
1663 /// If we time out waiting on the parse, we spawn a second task waiting
1664 /// until the parse does complete and return with the interpolated tree still
1665 /// in the foreground. When the background parse completes, call back into
1666 /// the main thread and assign the foreground parse state.
1667 ///
1668 /// If the buffer or grammar changed since the start of the background parse,
1669 /// initiate an additional reparse recursively. To avoid concurrent parses
1670 /// for the same buffer, we only initiate a new parse if we are not already
1671 /// parsing in the background.
1672 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1673 if self.reparse.is_some() {
1674 return;
1675 }
1676 let language = if let Some(language) = self.language.clone() {
1677 language
1678 } else {
1679 return;
1680 };
1681
1682 let text = self.text_snapshot();
1683 let parsed_version = self.version();
1684
1685 let mut syntax_map = self.syntax_map.lock();
1686 syntax_map.interpolate(&text);
1687 let language_registry = syntax_map.language_registry();
1688 let mut syntax_snapshot = syntax_map.snapshot();
1689 drop(syntax_map);
1690
1691 let parse_task = cx.background_spawn({
1692 let language = language.clone();
1693 let language_registry = language_registry.clone();
1694 async move {
1695 syntax_snapshot.reparse(&text, language_registry, language);
1696 syntax_snapshot
1697 }
1698 });
1699
1700 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1701 if may_block {
1702 match cx
1703 .background_executor()
1704 .block_with_timeout(self.sync_parse_timeout, parse_task)
1705 {
1706 Ok(new_syntax_snapshot) => {
1707 self.did_finish_parsing(new_syntax_snapshot, cx);
1708 self.reparse = None;
1709 }
1710 Err(parse_task) => {
1711 self.reparse = Some(cx.spawn(async move |this, cx| {
1712 let new_syntax_map = cx.background_spawn(parse_task).await;
1713 this.update(cx, move |this, cx| {
1714 let grammar_changed = || {
1715 this.language.as_ref().is_none_or(|current_language| {
1716 !Arc::ptr_eq(&language, current_language)
1717 })
1718 };
1719 let language_registry_changed = || {
1720 new_syntax_map.contains_unknown_injections()
1721 && language_registry.is_some_and(|registry| {
1722 registry.version()
1723 != new_syntax_map.language_registry_version()
1724 })
1725 };
1726 let parse_again = this.version.changed_since(&parsed_version)
1727 || language_registry_changed()
1728 || grammar_changed();
1729 this.did_finish_parsing(new_syntax_map, cx);
1730 this.reparse = None;
1731 if parse_again {
1732 this.reparse(cx, false);
1733 }
1734 })
1735 .ok();
1736 }));
1737 }
1738 }
1739 } else {
1740 self.reparse = Some(cx.spawn(async move |this, cx| {
1741 let new_syntax_map = cx.background_spawn(parse_task).await;
1742 this.update(cx, move |this, cx| {
1743 let grammar_changed = || {
1744 this.language.as_ref().is_none_or(|current_language| {
1745 !Arc::ptr_eq(&language, current_language)
1746 })
1747 };
1748 let language_registry_changed = || {
1749 new_syntax_map.contains_unknown_injections()
1750 && language_registry.is_some_and(|registry| {
1751 registry.version() != new_syntax_map.language_registry_version()
1752 })
1753 };
1754 let parse_again = this.version.changed_since(&parsed_version)
1755 || language_registry_changed()
1756 || grammar_changed();
1757 this.did_finish_parsing(new_syntax_map, cx);
1758 this.reparse = None;
1759 if parse_again {
1760 this.reparse(cx, false);
1761 }
1762 })
1763 .ok();
1764 }));
1765 }
1766 }
1767
1768 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1769 self.was_changed();
1770 self.non_text_state_update_count += 1;
1771 self.syntax_map.lock().did_parse(syntax_snapshot);
1772 self.request_autoindent(cx);
1773 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1774 self.invalidate_tree_sitter_data(self.text.snapshot());
1775 cx.emit(BufferEvent::Reparsed);
1776 cx.notify();
1777 }
1778
1779 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1780 self.parse_status.1.clone()
1781 }
1782
1783 /// Wait until the buffer is no longer parsing
1784 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1785 let mut parse_status = self.parse_status();
1786 async move {
1787 while *parse_status.borrow() != ParseStatus::Idle {
1788 if parse_status.changed().await.is_err() {
1789 break;
1790 }
1791 }
1792 }
1793 }
1794
1795 /// Assign to the buffer a set of diagnostics created by a given language server.
1796 pub fn update_diagnostics(
1797 &mut self,
1798 server_id: LanguageServerId,
1799 diagnostics: DiagnosticSet,
1800 cx: &mut Context<Self>,
1801 ) {
1802 let lamport_timestamp = self.text.lamport_clock.tick();
1803 let op = Operation::UpdateDiagnostics {
1804 server_id,
1805 diagnostics: diagnostics.iter().cloned().collect(),
1806 lamport_timestamp,
1807 };
1808
1809 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1810 self.send_operation(op, true, cx);
1811 }
1812
1813 pub fn buffer_diagnostics(
1814 &self,
1815 for_server: Option<LanguageServerId>,
1816 ) -> Vec<&DiagnosticEntry<Anchor>> {
1817 match for_server {
1818 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1819 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1820 Err(_) => Vec::new(),
1821 },
1822 None => self
1823 .diagnostics
1824 .iter()
1825 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1826 .collect(),
1827 }
1828 }
1829
1830 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1831 if let Some(indent_sizes) = self.compute_autoindents() {
1832 let indent_sizes = cx.background_spawn(indent_sizes);
1833 match cx
1834 .background_executor()
1835 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1836 {
1837 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1838 Err(indent_sizes) => {
1839 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1840 let indent_sizes = indent_sizes.await;
1841 this.update(cx, |this, cx| {
1842 this.apply_autoindents(indent_sizes, cx);
1843 })
1844 .ok();
1845 }));
1846 }
1847 }
1848 } else {
1849 self.autoindent_requests.clear();
1850 for tx in self.wait_for_autoindent_txs.drain(..) {
1851 tx.send(()).ok();
1852 }
1853 }
1854 }
1855
1856 fn compute_autoindents(
1857 &self,
1858 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1859 let max_rows_between_yields = 100;
1860 let snapshot = self.snapshot();
1861 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1862 return None;
1863 }
1864
1865 let autoindent_requests = self.autoindent_requests.clone();
1866 Some(async move {
1867 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1868 for request in autoindent_requests {
1869 // Resolve each edited range to its row in the current buffer and in the
1870 // buffer before this batch of edits.
1871 let mut row_ranges = Vec::new();
1872 let mut old_to_new_rows = BTreeMap::new();
1873 let mut language_indent_sizes_by_new_row = Vec::new();
1874 for entry in &request.entries {
1875 let position = entry.range.start;
1876 let new_row = position.to_point(&snapshot).row;
1877 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1878 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1879
1880 if !entry.first_line_is_new {
1881 let old_row = position.to_point(&request.before_edit).row;
1882 old_to_new_rows.insert(old_row, new_row);
1883 }
1884 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1885 }
1886
1887 // Build a map containing the suggested indentation for each of the edited lines
1888 // with respect to the state of the buffer before these edits. This map is keyed
1889 // by the rows for these lines in the current state of the buffer.
1890 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1891 let old_edited_ranges =
1892 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1893 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1894 let mut language_indent_size = IndentSize::default();
1895 for old_edited_range in old_edited_ranges {
1896 let suggestions = request
1897 .before_edit
1898 .suggest_autoindents(old_edited_range.clone())
1899 .into_iter()
1900 .flatten();
1901 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1902 if let Some(suggestion) = suggestion {
1903 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1904
1905 // Find the indent size based on the language for this row.
1906 while let Some((row, size)) = language_indent_sizes.peek() {
1907 if *row > new_row {
1908 break;
1909 }
1910 language_indent_size = *size;
1911 language_indent_sizes.next();
1912 }
1913
1914 let suggested_indent = old_to_new_rows
1915 .get(&suggestion.basis_row)
1916 .and_then(|from_row| {
1917 Some(old_suggestions.get(from_row).copied()?.0)
1918 })
1919 .unwrap_or_else(|| {
1920 request
1921 .before_edit
1922 .indent_size_for_line(suggestion.basis_row)
1923 })
1924 .with_delta(suggestion.delta, language_indent_size);
1925 old_suggestions
1926 .insert(new_row, (suggested_indent, suggestion.within_error));
1927 }
1928 }
1929 yield_now().await;
1930 }
1931
1932 // Compute new suggestions for each line, but only include them in the result
1933 // if they differ from the old suggestion for that line.
1934 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1935 let mut language_indent_size = IndentSize::default();
1936 for (row_range, original_indent_column) in row_ranges {
1937 let new_edited_row_range = if request.is_block_mode {
1938 row_range.start..row_range.start + 1
1939 } else {
1940 row_range.clone()
1941 };
1942
1943 let suggestions = snapshot
1944 .suggest_autoindents(new_edited_row_range.clone())
1945 .into_iter()
1946 .flatten();
1947 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1948 if let Some(suggestion) = suggestion {
1949 // Find the indent size based on the language for this row.
1950 while let Some((row, size)) = language_indent_sizes.peek() {
1951 if *row > new_row {
1952 break;
1953 }
1954 language_indent_size = *size;
1955 language_indent_sizes.next();
1956 }
1957
1958 let suggested_indent = indent_sizes
1959 .get(&suggestion.basis_row)
1960 .copied()
1961 .map(|e| e.0)
1962 .unwrap_or_else(|| {
1963 snapshot.indent_size_for_line(suggestion.basis_row)
1964 })
1965 .with_delta(suggestion.delta, language_indent_size);
1966
1967 if old_suggestions.get(&new_row).is_none_or(
1968 |(old_indentation, was_within_error)| {
1969 suggested_indent != *old_indentation
1970 && (!suggestion.within_error || *was_within_error)
1971 },
1972 ) {
1973 indent_sizes.insert(
1974 new_row,
1975 (suggested_indent, request.ignore_empty_lines),
1976 );
1977 }
1978 }
1979 }
1980
1981 if let (true, Some(original_indent_column)) =
1982 (request.is_block_mode, original_indent_column)
1983 {
1984 let new_indent =
1985 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1986 *indent
1987 } else {
1988 snapshot.indent_size_for_line(row_range.start)
1989 };
1990 let delta = new_indent.len as i64 - original_indent_column as i64;
1991 if delta != 0 {
1992 for row in row_range.skip(1) {
1993 indent_sizes.entry(row).or_insert_with(|| {
1994 let mut size = snapshot.indent_size_for_line(row);
1995 if size.kind == new_indent.kind {
1996 match delta.cmp(&0) {
1997 Ordering::Greater => size.len += delta as u32,
1998 Ordering::Less => {
1999 size.len = size.len.saturating_sub(-delta as u32)
2000 }
2001 Ordering::Equal => {}
2002 }
2003 }
2004 (size, request.ignore_empty_lines)
2005 });
2006 }
2007 }
2008 }
2009
2010 yield_now().await;
2011 }
2012 }
2013
2014 indent_sizes
2015 .into_iter()
2016 .filter_map(|(row, (indent, ignore_empty_lines))| {
2017 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2018 None
2019 } else {
2020 Some((row, indent))
2021 }
2022 })
2023 .collect()
2024 })
2025 }
2026
2027 fn apply_autoindents(
2028 &mut self,
2029 indent_sizes: BTreeMap<u32, IndentSize>,
2030 cx: &mut Context<Self>,
2031 ) {
2032 self.autoindent_requests.clear();
2033 for tx in self.wait_for_autoindent_txs.drain(..) {
2034 tx.send(()).ok();
2035 }
2036
2037 let edits: Vec<_> = indent_sizes
2038 .into_iter()
2039 .filter_map(|(row, indent_size)| {
2040 let current_size = indent_size_for_line(self, row);
2041 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2042 })
2043 .collect();
2044
2045 let preserve_preview = self.preserve_preview();
2046 self.edit(edits, None, cx);
2047 if preserve_preview {
2048 self.refresh_preview();
2049 }
2050 }
2051
2052 /// Create a minimal edit that will cause the given row to be indented
2053 /// with the given size. After applying this edit, the length of the line
2054 /// will always be at least `new_size.len`.
2055 pub fn edit_for_indent_size_adjustment(
2056 row: u32,
2057 current_size: IndentSize,
2058 new_size: IndentSize,
2059 ) -> Option<(Range<Point>, String)> {
2060 if new_size.kind == current_size.kind {
2061 match new_size.len.cmp(¤t_size.len) {
2062 Ordering::Greater => {
2063 let point = Point::new(row, 0);
2064 Some((
2065 point..point,
2066 iter::repeat(new_size.char())
2067 .take((new_size.len - current_size.len) as usize)
2068 .collect::<String>(),
2069 ))
2070 }
2071
2072 Ordering::Less => Some((
2073 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2074 String::new(),
2075 )),
2076
2077 Ordering::Equal => None,
2078 }
2079 } else {
2080 Some((
2081 Point::new(row, 0)..Point::new(row, current_size.len),
2082 iter::repeat(new_size.char())
2083 .take(new_size.len as usize)
2084 .collect::<String>(),
2085 ))
2086 }
2087 }
2088
2089 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2090 /// and the given new text.
2091 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2092 let old_text = self.as_rope().clone();
2093 let base_version = self.version();
2094 cx.background_executor()
2095 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2096 let old_text = old_text.to_string();
2097 let line_ending = LineEnding::detect(&new_text);
2098 LineEnding::normalize(&mut new_text);
2099 let edits = text_diff(&old_text, &new_text);
2100 Diff {
2101 base_version,
2102 line_ending,
2103 edits,
2104 }
2105 })
2106 }
2107
2108 /// Spawns a background task that searches the buffer for any whitespace
2109 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2110 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2111 let old_text = self.as_rope().clone();
2112 let line_ending = self.line_ending();
2113 let base_version = self.version();
2114 cx.background_spawn(async move {
2115 let ranges = trailing_whitespace_ranges(&old_text);
2116 let empty = Arc::<str>::from("");
2117 Diff {
2118 base_version,
2119 line_ending,
2120 edits: ranges
2121 .into_iter()
2122 .map(|range| (range, empty.clone()))
2123 .collect(),
2124 }
2125 })
2126 }
2127
2128 /// Ensures that the buffer ends with a single newline character, and
2129 /// no other whitespace. Skips if the buffer is empty.
2130 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2131 let len = self.len();
2132 if len == 0 {
2133 return;
2134 }
2135 let mut offset = len;
2136 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2137 let non_whitespace_len = chunk
2138 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2139 .len();
2140 offset -= chunk.len();
2141 offset += non_whitespace_len;
2142 if non_whitespace_len != 0 {
2143 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2144 return;
2145 }
2146 break;
2147 }
2148 }
2149 self.edit([(offset..len, "\n")], None, cx);
2150 }
2151
2152 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2153 /// calculated, then adjust the diff to account for those changes, and discard any
2154 /// parts of the diff that conflict with those changes.
2155 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2156 let snapshot = self.snapshot();
2157 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2158 let mut delta = 0;
2159 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2160 while let Some(edit_since) = edits_since.peek() {
2161 // If the edit occurs after a diff hunk, then it does not
2162 // affect that hunk.
2163 if edit_since.old.start > range.end {
2164 break;
2165 }
2166 // If the edit precedes the diff hunk, then adjust the hunk
2167 // to reflect the edit.
2168 else if edit_since.old.end < range.start {
2169 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2170 edits_since.next();
2171 }
2172 // If the edit intersects a diff hunk, then discard that hunk.
2173 else {
2174 return None;
2175 }
2176 }
2177
2178 let start = (range.start as i64 + delta) as usize;
2179 let end = (range.end as i64 + delta) as usize;
2180 Some((start..end, new_text))
2181 });
2182
2183 self.start_transaction();
2184 self.text.set_line_ending(diff.line_ending);
2185 self.edit(adjusted_edits, None, cx);
2186 self.end_transaction(cx)
2187 }
2188
2189 pub fn has_unsaved_edits(&self) -> bool {
2190 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2191
2192 if last_version == self.version {
2193 self.has_unsaved_edits
2194 .set((last_version, has_unsaved_edits));
2195 return has_unsaved_edits;
2196 }
2197
2198 let has_edits = self.has_edits_since(&self.saved_version);
2199 self.has_unsaved_edits
2200 .set((self.version.clone(), has_edits));
2201 has_edits
2202 }
2203
2204 /// Checks if the buffer has unsaved changes.
2205 pub fn is_dirty(&self) -> bool {
2206 if self.capability == Capability::ReadOnly {
2207 return false;
2208 }
2209 if self.has_conflict {
2210 return true;
2211 }
2212 match self.file.as_ref().map(|f| f.disk_state()) {
2213 Some(DiskState::New) | Some(DiskState::Deleted) => {
2214 !self.is_empty() && self.has_unsaved_edits()
2215 }
2216 _ => self.has_unsaved_edits(),
2217 }
2218 }
2219
2220 /// Marks the buffer as having a conflict regardless of current buffer state.
2221 pub fn set_conflict(&mut self) {
2222 self.has_conflict = true;
2223 }
2224
2225 /// Checks if the buffer and its file have both changed since the buffer
2226 /// was last saved or reloaded.
2227 pub fn has_conflict(&self) -> bool {
2228 if self.has_conflict {
2229 return true;
2230 }
2231 let Some(file) = self.file.as_ref() else {
2232 return false;
2233 };
2234 match file.disk_state() {
2235 DiskState::New => false,
2236 DiskState::Present { mtime } => match self.saved_mtime {
2237 Some(saved_mtime) => {
2238 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2239 }
2240 None => true,
2241 },
2242 DiskState::Deleted => false,
2243 }
2244 }
2245
2246 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2247 pub fn subscribe(&mut self) -> Subscription<usize> {
2248 self.text.subscribe()
2249 }
2250
2251 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2252 ///
2253 /// This allows downstream code to check if the buffer's text has changed without
2254 /// waiting for an effect cycle, which would be required if using eents.
2255 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2256 if let Err(ix) = self
2257 .change_bits
2258 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2259 {
2260 self.change_bits.insert(ix, bit);
2261 }
2262 }
2263
2264 /// Set the change bit for all "listeners".
2265 fn was_changed(&mut self) {
2266 self.change_bits.retain(|change_bit| {
2267 change_bit
2268 .upgrade()
2269 .inspect(|bit| {
2270 _ = bit.replace(true);
2271 })
2272 .is_some()
2273 });
2274 }
2275
2276 /// Starts a transaction, if one is not already in-progress. When undoing or
2277 /// redoing edits, all of the edits performed within a transaction are undone
2278 /// or redone together.
2279 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2280 self.start_transaction_at(Instant::now())
2281 }
2282
2283 /// Starts a transaction, providing the current time. Subsequent transactions
2284 /// that occur within a short period of time will be grouped together. This
2285 /// is controlled by the buffer's undo grouping duration.
2286 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2287 self.transaction_depth += 1;
2288 if self.was_dirty_before_starting_transaction.is_none() {
2289 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2290 }
2291 self.text.start_transaction_at(now)
2292 }
2293
2294 /// Terminates the current transaction, if this is the outermost transaction.
2295 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2296 self.end_transaction_at(Instant::now(), cx)
2297 }
2298
2299 /// Terminates the current transaction, providing the current time. Subsequent transactions
2300 /// that occur within a short period of time will be grouped together. This
2301 /// is controlled by the buffer's undo grouping duration.
2302 pub fn end_transaction_at(
2303 &mut self,
2304 now: Instant,
2305 cx: &mut Context<Self>,
2306 ) -> Option<TransactionId> {
2307 assert!(self.transaction_depth > 0);
2308 self.transaction_depth -= 1;
2309 let was_dirty = if self.transaction_depth == 0 {
2310 self.was_dirty_before_starting_transaction.take().unwrap()
2311 } else {
2312 false
2313 };
2314 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2315 self.did_edit(&start_version, was_dirty, cx);
2316 Some(transaction_id)
2317 } else {
2318 None
2319 }
2320 }
2321
2322 /// Manually add a transaction to the buffer's undo history.
2323 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2324 self.text.push_transaction(transaction, now);
2325 }
2326
2327 /// Differs from `push_transaction` in that it does not clear the redo
2328 /// stack. Intended to be used to create a parent transaction to merge
2329 /// potential child transactions into.
2330 ///
2331 /// The caller is responsible for removing it from the undo history using
2332 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2333 /// are merged into this transaction, the caller is responsible for ensuring
2334 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2335 /// cleared is to create transactions with the usual `start_transaction` and
2336 /// `end_transaction` methods and merging the resulting transactions into
2337 /// the transaction created by this method
2338 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2339 self.text.push_empty_transaction(now)
2340 }
2341
2342 /// Prevent the last transaction from being grouped with any subsequent transactions,
2343 /// even if they occur with the buffer's undo grouping duration.
2344 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2345 self.text.finalize_last_transaction()
2346 }
2347
2348 /// Manually group all changes since a given transaction.
2349 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2350 self.text.group_until_transaction(transaction_id);
2351 }
2352
2353 /// Manually remove a transaction from the buffer's undo history
2354 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2355 self.text.forget_transaction(transaction_id)
2356 }
2357
2358 /// Retrieve a transaction from the buffer's undo history
2359 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2360 self.text.get_transaction(transaction_id)
2361 }
2362
2363 /// Manually merge two transactions in the buffer's undo history.
2364 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2365 self.text.merge_transactions(transaction, destination);
2366 }
2367
2368 /// Waits for the buffer to receive operations with the given timestamps.
2369 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2370 &mut self,
2371 edit_ids: It,
2372 ) -> impl Future<Output = Result<()>> + use<It> {
2373 self.text.wait_for_edits(edit_ids)
2374 }
2375
2376 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2377 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2378 &mut self,
2379 anchors: It,
2380 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2381 self.text.wait_for_anchors(anchors)
2382 }
2383
2384 /// Waits for the buffer to receive operations up to the given version.
2385 pub fn wait_for_version(
2386 &mut self,
2387 version: clock::Global,
2388 ) -> impl Future<Output = Result<()>> + use<> {
2389 self.text.wait_for_version(version)
2390 }
2391
2392 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2393 /// [`Buffer::wait_for_version`] to resolve with an error.
2394 pub fn give_up_waiting(&mut self) {
2395 self.text.give_up_waiting();
2396 }
2397
2398 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2399 let mut rx = None;
2400 if !self.autoindent_requests.is_empty() {
2401 let channel = oneshot::channel();
2402 self.wait_for_autoindent_txs.push(channel.0);
2403 rx = Some(channel.1);
2404 }
2405 rx
2406 }
2407
2408 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2409 pub fn set_active_selections(
2410 &mut self,
2411 selections: Arc<[Selection<Anchor>]>,
2412 line_mode: bool,
2413 cursor_shape: CursorShape,
2414 cx: &mut Context<Self>,
2415 ) {
2416 let lamport_timestamp = self.text.lamport_clock.tick();
2417 self.remote_selections.insert(
2418 self.text.replica_id(),
2419 SelectionSet {
2420 selections: selections.clone(),
2421 lamport_timestamp,
2422 line_mode,
2423 cursor_shape,
2424 },
2425 );
2426 self.send_operation(
2427 Operation::UpdateSelections {
2428 selections,
2429 line_mode,
2430 lamport_timestamp,
2431 cursor_shape,
2432 },
2433 true,
2434 cx,
2435 );
2436 self.non_text_state_update_count += 1;
2437 cx.notify();
2438 }
2439
2440 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2441 /// this replica.
2442 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2443 if self
2444 .remote_selections
2445 .get(&self.text.replica_id())
2446 .is_none_or(|set| !set.selections.is_empty())
2447 {
2448 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2449 }
2450 }
2451
2452 pub fn set_agent_selections(
2453 &mut self,
2454 selections: Arc<[Selection<Anchor>]>,
2455 line_mode: bool,
2456 cursor_shape: CursorShape,
2457 cx: &mut Context<Self>,
2458 ) {
2459 let lamport_timestamp = self.text.lamport_clock.tick();
2460 self.remote_selections.insert(
2461 ReplicaId::AGENT,
2462 SelectionSet {
2463 selections,
2464 lamport_timestamp,
2465 line_mode,
2466 cursor_shape,
2467 },
2468 );
2469 self.non_text_state_update_count += 1;
2470 cx.notify();
2471 }
2472
2473 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2474 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2475 }
2476
2477 /// Replaces the buffer's entire text.
2478 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2479 where
2480 T: Into<Arc<str>>,
2481 {
2482 self.autoindent_requests.clear();
2483 self.edit([(0..self.len(), text)], None, cx)
2484 }
2485
2486 /// Appends the given text to the end of the buffer.
2487 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2488 where
2489 T: Into<Arc<str>>,
2490 {
2491 self.edit([(self.len()..self.len(), text)], None, cx)
2492 }
2493
2494 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2495 /// delete, and a string of text to insert at that location.
2496 ///
2497 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2498 /// request for the edited ranges, which will be processed when the buffer finishes
2499 /// parsing.
2500 ///
2501 /// Parsing takes place at the end of a transaction, and may compute synchronously
2502 /// or asynchronously, depending on the changes.
2503 pub fn edit<I, S, T>(
2504 &mut self,
2505 edits_iter: I,
2506 autoindent_mode: Option<AutoindentMode>,
2507 cx: &mut Context<Self>,
2508 ) -> Option<clock::Lamport>
2509 where
2510 I: IntoIterator<Item = (Range<S>, T)>,
2511 S: ToOffset,
2512 T: Into<Arc<str>>,
2513 {
2514 // Skip invalid edits and coalesce contiguous ones.
2515 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2516
2517 for (range, new_text) in edits_iter {
2518 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2519
2520 if range.start > range.end {
2521 mem::swap(&mut range.start, &mut range.end);
2522 }
2523 let new_text = new_text.into();
2524 if !new_text.is_empty() || !range.is_empty() {
2525 if let Some((prev_range, prev_text)) = edits.last_mut()
2526 && prev_range.end >= range.start
2527 {
2528 prev_range.end = cmp::max(prev_range.end, range.end);
2529 *prev_text = format!("{prev_text}{new_text}").into();
2530 } else {
2531 edits.push((range, new_text));
2532 }
2533 }
2534 }
2535 if edits.is_empty() {
2536 return None;
2537 }
2538
2539 self.start_transaction();
2540 self.pending_autoindent.take();
2541 let autoindent_request = autoindent_mode
2542 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2543
2544 let edit_operation = self.text.edit(edits.iter().cloned());
2545 let edit_id = edit_operation.timestamp();
2546
2547 if let Some((before_edit, mode)) = autoindent_request {
2548 let mut delta = 0isize;
2549 let mut previous_setting = None;
2550 let entries: Vec<_> = edits
2551 .into_iter()
2552 .enumerate()
2553 .zip(&edit_operation.as_edit().unwrap().new_text)
2554 .filter(|((_, (range, _)), _)| {
2555 let language = before_edit.language_at(range.start);
2556 let language_id = language.map(|l| l.id());
2557 if let Some((cached_language_id, auto_indent)) = previous_setting
2558 && cached_language_id == language_id
2559 {
2560 auto_indent
2561 } else {
2562 // The auto-indent setting is not present in editorconfigs, hence
2563 // we can avoid passing the file here.
2564 let auto_indent =
2565 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2566 previous_setting = Some((language_id, auto_indent));
2567 auto_indent
2568 }
2569 })
2570 .map(|((ix, (range, _)), new_text)| {
2571 let new_text_length = new_text.len();
2572 let old_start = range.start.to_point(&before_edit);
2573 let new_start = (delta + range.start as isize) as usize;
2574 let range_len = range.end - range.start;
2575 delta += new_text_length as isize - range_len as isize;
2576
2577 // Decide what range of the insertion to auto-indent, and whether
2578 // the first line of the insertion should be considered a newly-inserted line
2579 // or an edit to an existing line.
2580 let mut range_of_insertion_to_indent = 0..new_text_length;
2581 let mut first_line_is_new = true;
2582
2583 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2584 let old_line_end = before_edit.line_len(old_start.row);
2585
2586 if old_start.column > old_line_start {
2587 first_line_is_new = false;
2588 }
2589
2590 if !new_text.contains('\n')
2591 && (old_start.column + (range_len as u32) < old_line_end
2592 || old_line_end == old_line_start)
2593 {
2594 first_line_is_new = false;
2595 }
2596
2597 // When inserting text starting with a newline, avoid auto-indenting the
2598 // previous line.
2599 if new_text.starts_with('\n') {
2600 range_of_insertion_to_indent.start += 1;
2601 first_line_is_new = true;
2602 }
2603
2604 let mut original_indent_column = None;
2605 if let AutoindentMode::Block {
2606 original_indent_columns,
2607 } = &mode
2608 {
2609 original_indent_column = Some(if new_text.starts_with('\n') {
2610 indent_size_for_text(
2611 new_text[range_of_insertion_to_indent.clone()].chars(),
2612 )
2613 .len
2614 } else {
2615 original_indent_columns
2616 .get(ix)
2617 .copied()
2618 .flatten()
2619 .unwrap_or_else(|| {
2620 indent_size_for_text(
2621 new_text[range_of_insertion_to_indent.clone()].chars(),
2622 )
2623 .len
2624 })
2625 });
2626
2627 // Avoid auto-indenting the line after the edit.
2628 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2629 range_of_insertion_to_indent.end -= 1;
2630 }
2631 }
2632
2633 AutoindentRequestEntry {
2634 first_line_is_new,
2635 original_indent_column,
2636 indent_size: before_edit.language_indent_size_at(range.start, cx),
2637 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2638 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2639 }
2640 })
2641 .collect();
2642
2643 if !entries.is_empty() {
2644 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2645 before_edit,
2646 entries,
2647 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2648 ignore_empty_lines: false,
2649 }));
2650 }
2651 }
2652
2653 self.end_transaction(cx);
2654 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2655 Some(edit_id)
2656 }
2657
2658 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2659 self.was_changed();
2660
2661 if self.edits_since::<usize>(old_version).next().is_none() {
2662 return;
2663 }
2664
2665 self.reparse(cx, true);
2666 cx.emit(BufferEvent::Edited);
2667 if was_dirty != self.is_dirty() {
2668 cx.emit(BufferEvent::DirtyChanged);
2669 }
2670 cx.notify();
2671 }
2672
2673 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2674 where
2675 I: IntoIterator<Item = Range<T>>,
2676 T: ToOffset + Copy,
2677 {
2678 let before_edit = self.snapshot();
2679 let entries = ranges
2680 .into_iter()
2681 .map(|range| AutoindentRequestEntry {
2682 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2683 first_line_is_new: true,
2684 indent_size: before_edit.language_indent_size_at(range.start, cx),
2685 original_indent_column: None,
2686 })
2687 .collect();
2688 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2689 before_edit,
2690 entries,
2691 is_block_mode: false,
2692 ignore_empty_lines: true,
2693 }));
2694 self.request_autoindent(cx);
2695 }
2696
2697 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2698 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2699 pub fn insert_empty_line(
2700 &mut self,
2701 position: impl ToPoint,
2702 space_above: bool,
2703 space_below: bool,
2704 cx: &mut Context<Self>,
2705 ) -> Point {
2706 let mut position = position.to_point(self);
2707
2708 self.start_transaction();
2709
2710 self.edit(
2711 [(position..position, "\n")],
2712 Some(AutoindentMode::EachLine),
2713 cx,
2714 );
2715
2716 if position.column > 0 {
2717 position += Point::new(1, 0);
2718 }
2719
2720 if !self.is_line_blank(position.row) {
2721 self.edit(
2722 [(position..position, "\n")],
2723 Some(AutoindentMode::EachLine),
2724 cx,
2725 );
2726 }
2727
2728 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2729 self.edit(
2730 [(position..position, "\n")],
2731 Some(AutoindentMode::EachLine),
2732 cx,
2733 );
2734 position.row += 1;
2735 }
2736
2737 if space_below
2738 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2739 {
2740 self.edit(
2741 [(position..position, "\n")],
2742 Some(AutoindentMode::EachLine),
2743 cx,
2744 );
2745 }
2746
2747 self.end_transaction(cx);
2748
2749 position
2750 }
2751
2752 /// Applies the given remote operations to the buffer.
2753 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2754 self.pending_autoindent.take();
2755 let was_dirty = self.is_dirty();
2756 let old_version = self.version.clone();
2757 let mut deferred_ops = Vec::new();
2758 let buffer_ops = ops
2759 .into_iter()
2760 .filter_map(|op| match op {
2761 Operation::Buffer(op) => Some(op),
2762 _ => {
2763 if self.can_apply_op(&op) {
2764 self.apply_op(op, cx);
2765 } else {
2766 deferred_ops.push(op);
2767 }
2768 None
2769 }
2770 })
2771 .collect::<Vec<_>>();
2772 for operation in buffer_ops.iter() {
2773 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2774 }
2775 self.text.apply_ops(buffer_ops);
2776 self.deferred_ops.insert(deferred_ops);
2777 self.flush_deferred_ops(cx);
2778 self.did_edit(&old_version, was_dirty, cx);
2779 // Notify independently of whether the buffer was edited as the operations could include a
2780 // selection update.
2781 cx.notify();
2782 }
2783
2784 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2785 let mut deferred_ops = Vec::new();
2786 for op in self.deferred_ops.drain().iter().cloned() {
2787 if self.can_apply_op(&op) {
2788 self.apply_op(op, cx);
2789 } else {
2790 deferred_ops.push(op);
2791 }
2792 }
2793 self.deferred_ops.insert(deferred_ops);
2794 }
2795
2796 pub fn has_deferred_ops(&self) -> bool {
2797 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2798 }
2799
2800 fn can_apply_op(&self, operation: &Operation) -> bool {
2801 match operation {
2802 Operation::Buffer(_) => {
2803 unreachable!("buffer operations should never be applied at this layer")
2804 }
2805 Operation::UpdateDiagnostics {
2806 diagnostics: diagnostic_set,
2807 ..
2808 } => diagnostic_set.iter().all(|diagnostic| {
2809 self.text.can_resolve(&diagnostic.range.start)
2810 && self.text.can_resolve(&diagnostic.range.end)
2811 }),
2812 Operation::UpdateSelections { selections, .. } => selections
2813 .iter()
2814 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2815 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2816 }
2817 }
2818
2819 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2820 match operation {
2821 Operation::Buffer(_) => {
2822 unreachable!("buffer operations should never be applied at this layer")
2823 }
2824 Operation::UpdateDiagnostics {
2825 server_id,
2826 diagnostics: diagnostic_set,
2827 lamport_timestamp,
2828 } => {
2829 let snapshot = self.snapshot();
2830 self.apply_diagnostic_update(
2831 server_id,
2832 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2833 lamport_timestamp,
2834 cx,
2835 );
2836 }
2837 Operation::UpdateSelections {
2838 selections,
2839 lamport_timestamp,
2840 line_mode,
2841 cursor_shape,
2842 } => {
2843 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2844 && set.lamport_timestamp > lamport_timestamp
2845 {
2846 return;
2847 }
2848
2849 self.remote_selections.insert(
2850 lamport_timestamp.replica_id,
2851 SelectionSet {
2852 selections,
2853 lamport_timestamp,
2854 line_mode,
2855 cursor_shape,
2856 },
2857 );
2858 self.text.lamport_clock.observe(lamport_timestamp);
2859 self.non_text_state_update_count += 1;
2860 }
2861 Operation::UpdateCompletionTriggers {
2862 triggers,
2863 lamport_timestamp,
2864 server_id,
2865 } => {
2866 if triggers.is_empty() {
2867 self.completion_triggers_per_language_server
2868 .remove(&server_id);
2869 self.completion_triggers = self
2870 .completion_triggers_per_language_server
2871 .values()
2872 .flat_map(|triggers| triggers.iter().cloned())
2873 .collect();
2874 } else {
2875 self.completion_triggers_per_language_server
2876 .insert(server_id, triggers.iter().cloned().collect());
2877 self.completion_triggers.extend(triggers);
2878 }
2879 self.text.lamport_clock.observe(lamport_timestamp);
2880 }
2881 Operation::UpdateLineEnding {
2882 line_ending,
2883 lamport_timestamp,
2884 } => {
2885 self.text.set_line_ending(line_ending);
2886 self.text.lamport_clock.observe(lamport_timestamp);
2887 }
2888 }
2889 }
2890
2891 fn apply_diagnostic_update(
2892 &mut self,
2893 server_id: LanguageServerId,
2894 diagnostics: DiagnosticSet,
2895 lamport_timestamp: clock::Lamport,
2896 cx: &mut Context<Self>,
2897 ) {
2898 if lamport_timestamp > self.diagnostics_timestamp {
2899 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2900 if diagnostics.is_empty() {
2901 if let Ok(ix) = ix {
2902 self.diagnostics.remove(ix);
2903 }
2904 } else {
2905 match ix {
2906 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2907 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2908 };
2909 }
2910 self.diagnostics_timestamp = lamport_timestamp;
2911 self.non_text_state_update_count += 1;
2912 self.text.lamport_clock.observe(lamport_timestamp);
2913 cx.notify();
2914 cx.emit(BufferEvent::DiagnosticsUpdated);
2915 }
2916 }
2917
2918 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2919 self.was_changed();
2920 cx.emit(BufferEvent::Operation {
2921 operation,
2922 is_local,
2923 });
2924 }
2925
2926 /// Removes the selections for a given peer.
2927 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2928 self.remote_selections.remove(&replica_id);
2929 cx.notify();
2930 }
2931
2932 /// Undoes the most recent transaction.
2933 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2934 let was_dirty = self.is_dirty();
2935 let old_version = self.version.clone();
2936
2937 if let Some((transaction_id, operation)) = self.text.undo() {
2938 self.send_operation(Operation::Buffer(operation), true, cx);
2939 self.did_edit(&old_version, was_dirty, cx);
2940 Some(transaction_id)
2941 } else {
2942 None
2943 }
2944 }
2945
2946 /// Manually undoes a specific transaction in the buffer's undo history.
2947 pub fn undo_transaction(
2948 &mut self,
2949 transaction_id: TransactionId,
2950 cx: &mut Context<Self>,
2951 ) -> bool {
2952 let was_dirty = self.is_dirty();
2953 let old_version = self.version.clone();
2954 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2955 self.send_operation(Operation::Buffer(operation), true, cx);
2956 self.did_edit(&old_version, was_dirty, cx);
2957 true
2958 } else {
2959 false
2960 }
2961 }
2962
2963 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2964 pub fn undo_to_transaction(
2965 &mut self,
2966 transaction_id: TransactionId,
2967 cx: &mut Context<Self>,
2968 ) -> bool {
2969 let was_dirty = self.is_dirty();
2970 let old_version = self.version.clone();
2971
2972 let operations = self.text.undo_to_transaction(transaction_id);
2973 let undone = !operations.is_empty();
2974 for operation in operations {
2975 self.send_operation(Operation::Buffer(operation), true, cx);
2976 }
2977 if undone {
2978 self.did_edit(&old_version, was_dirty, cx)
2979 }
2980 undone
2981 }
2982
2983 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2984 let was_dirty = self.is_dirty();
2985 let operation = self.text.undo_operations(counts);
2986 let old_version = self.version.clone();
2987 self.send_operation(Operation::Buffer(operation), true, cx);
2988 self.did_edit(&old_version, was_dirty, cx);
2989 }
2990
2991 /// Manually redoes a specific transaction in the buffer's redo history.
2992 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2993 let was_dirty = self.is_dirty();
2994 let old_version = self.version.clone();
2995
2996 if let Some((transaction_id, operation)) = self.text.redo() {
2997 self.send_operation(Operation::Buffer(operation), true, cx);
2998 self.did_edit(&old_version, was_dirty, cx);
2999 Some(transaction_id)
3000 } else {
3001 None
3002 }
3003 }
3004
3005 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3006 pub fn redo_to_transaction(
3007 &mut self,
3008 transaction_id: TransactionId,
3009 cx: &mut Context<Self>,
3010 ) -> bool {
3011 let was_dirty = self.is_dirty();
3012 let old_version = self.version.clone();
3013
3014 let operations = self.text.redo_to_transaction(transaction_id);
3015 let redone = !operations.is_empty();
3016 for operation in operations {
3017 self.send_operation(Operation::Buffer(operation), true, cx);
3018 }
3019 if redone {
3020 self.did_edit(&old_version, was_dirty, cx)
3021 }
3022 redone
3023 }
3024
3025 /// Override current completion triggers with the user-provided completion triggers.
3026 pub fn set_completion_triggers(
3027 &mut self,
3028 server_id: LanguageServerId,
3029 triggers: BTreeSet<String>,
3030 cx: &mut Context<Self>,
3031 ) {
3032 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3033 if triggers.is_empty() {
3034 self.completion_triggers_per_language_server
3035 .remove(&server_id);
3036 self.completion_triggers = self
3037 .completion_triggers_per_language_server
3038 .values()
3039 .flat_map(|triggers| triggers.iter().cloned())
3040 .collect();
3041 } else {
3042 self.completion_triggers_per_language_server
3043 .insert(server_id, triggers.clone());
3044 self.completion_triggers.extend(triggers.iter().cloned());
3045 }
3046 self.send_operation(
3047 Operation::UpdateCompletionTriggers {
3048 triggers: triggers.into_iter().collect(),
3049 lamport_timestamp: self.completion_triggers_timestamp,
3050 server_id,
3051 },
3052 true,
3053 cx,
3054 );
3055 cx.notify();
3056 }
3057
3058 /// Returns a list of strings which trigger a completion menu for this language.
3059 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3060 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3061 &self.completion_triggers
3062 }
3063
3064 /// Call this directly after performing edits to prevent the preview tab
3065 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3066 /// to return false until there are additional edits.
3067 pub fn refresh_preview(&mut self) {
3068 self.preview_version = self.version.clone();
3069 }
3070
3071 /// Whether we should preserve the preview status of a tab containing this buffer.
3072 pub fn preserve_preview(&self) -> bool {
3073 !self.has_edits_since(&self.preview_version)
3074 }
3075}
3076
3077#[doc(hidden)]
3078#[cfg(any(test, feature = "test-support"))]
3079impl Buffer {
3080 pub fn edit_via_marked_text(
3081 &mut self,
3082 marked_string: &str,
3083 autoindent_mode: Option<AutoindentMode>,
3084 cx: &mut Context<Self>,
3085 ) {
3086 let edits = self.edits_for_marked_text(marked_string);
3087 self.edit(edits, autoindent_mode, cx);
3088 }
3089
3090 pub fn set_group_interval(&mut self, group_interval: Duration) {
3091 self.text.set_group_interval(group_interval);
3092 }
3093
3094 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3095 where
3096 T: rand::Rng,
3097 {
3098 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3099 let mut last_end = None;
3100 for _ in 0..old_range_count {
3101 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3102 break;
3103 }
3104
3105 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3106 let mut range = self.random_byte_range(new_start, rng);
3107 if rng.random_bool(0.2) {
3108 mem::swap(&mut range.start, &mut range.end);
3109 }
3110 last_end = Some(range.end);
3111
3112 let new_text_len = rng.random_range(0..10);
3113 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3114 new_text = new_text.to_uppercase();
3115
3116 edits.push((range, new_text));
3117 }
3118 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3119 self.edit(edits, None, cx);
3120 }
3121
3122 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3123 let was_dirty = self.is_dirty();
3124 let old_version = self.version.clone();
3125
3126 let ops = self.text.randomly_undo_redo(rng);
3127 if !ops.is_empty() {
3128 for op in ops {
3129 self.send_operation(Operation::Buffer(op), true, cx);
3130 self.did_edit(&old_version, was_dirty, cx);
3131 }
3132 }
3133 }
3134}
3135
3136impl EventEmitter<BufferEvent> for Buffer {}
3137
3138impl Deref for Buffer {
3139 type Target = TextBuffer;
3140
3141 fn deref(&self) -> &Self::Target {
3142 &self.text
3143 }
3144}
3145
3146impl BufferSnapshot {
3147 /// Returns [`IndentSize`] for a given line that respects user settings and
3148 /// language preferences.
3149 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3150 indent_size_for_line(self, row)
3151 }
3152
3153 /// Returns [`IndentSize`] for a given position that respects user settings
3154 /// and language preferences.
3155 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3156 let settings = language_settings(
3157 self.language_at(position).map(|l| l.name()),
3158 self.file(),
3159 cx,
3160 );
3161 if settings.hard_tabs {
3162 IndentSize::tab()
3163 } else {
3164 IndentSize::spaces(settings.tab_size.get())
3165 }
3166 }
3167
3168 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3169 /// is passed in as `single_indent_size`.
3170 pub fn suggested_indents(
3171 &self,
3172 rows: impl Iterator<Item = u32>,
3173 single_indent_size: IndentSize,
3174 ) -> BTreeMap<u32, IndentSize> {
3175 let mut result = BTreeMap::new();
3176
3177 for row_range in contiguous_ranges(rows, 10) {
3178 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3179 Some(suggestions) => suggestions,
3180 _ => break,
3181 };
3182
3183 for (row, suggestion) in row_range.zip(suggestions) {
3184 let indent_size = if let Some(suggestion) = suggestion {
3185 result
3186 .get(&suggestion.basis_row)
3187 .copied()
3188 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3189 .with_delta(suggestion.delta, single_indent_size)
3190 } else {
3191 self.indent_size_for_line(row)
3192 };
3193
3194 result.insert(row, indent_size);
3195 }
3196 }
3197
3198 result
3199 }
3200
3201 fn suggest_autoindents(
3202 &self,
3203 row_range: Range<u32>,
3204 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3205 let config = &self.language.as_ref()?.config;
3206 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3207
3208 #[derive(Debug, Clone)]
3209 struct StartPosition {
3210 start: Point,
3211 suffix: SharedString,
3212 }
3213
3214 // Find the suggested indentation ranges based on the syntax tree.
3215 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3216 let end = Point::new(row_range.end, 0);
3217 let range = (start..end).to_offset(&self.text);
3218 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3219 Some(&grammar.indents_config.as_ref()?.query)
3220 });
3221 let indent_configs = matches
3222 .grammars()
3223 .iter()
3224 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3225 .collect::<Vec<_>>();
3226
3227 let mut indent_ranges = Vec::<Range<Point>>::new();
3228 let mut start_positions = Vec::<StartPosition>::new();
3229 let mut outdent_positions = Vec::<Point>::new();
3230 while let Some(mat) = matches.peek() {
3231 let mut start: Option<Point> = None;
3232 let mut end: Option<Point> = None;
3233
3234 let config = indent_configs[mat.grammar_index];
3235 for capture in mat.captures {
3236 if capture.index == config.indent_capture_ix {
3237 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3238 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3239 } else if Some(capture.index) == config.start_capture_ix {
3240 start = Some(Point::from_ts_point(capture.node.end_position()));
3241 } else if Some(capture.index) == config.end_capture_ix {
3242 end = Some(Point::from_ts_point(capture.node.start_position()));
3243 } else if Some(capture.index) == config.outdent_capture_ix {
3244 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3245 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3246 start_positions.push(StartPosition {
3247 start: Point::from_ts_point(capture.node.start_position()),
3248 suffix: suffix.clone(),
3249 });
3250 }
3251 }
3252
3253 matches.advance();
3254 if let Some((start, end)) = start.zip(end) {
3255 if start.row == end.row {
3256 continue;
3257 }
3258 let range = start..end;
3259 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3260 Err(ix) => indent_ranges.insert(ix, range),
3261 Ok(ix) => {
3262 let prev_range = &mut indent_ranges[ix];
3263 prev_range.end = prev_range.end.max(range.end);
3264 }
3265 }
3266 }
3267 }
3268
3269 let mut error_ranges = Vec::<Range<Point>>::new();
3270 let mut matches = self
3271 .syntax
3272 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3273 while let Some(mat) = matches.peek() {
3274 let node = mat.captures[0].node;
3275 let start = Point::from_ts_point(node.start_position());
3276 let end = Point::from_ts_point(node.end_position());
3277 let range = start..end;
3278 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3279 Ok(ix) | Err(ix) => ix,
3280 };
3281 let mut end_ix = ix;
3282 while let Some(existing_range) = error_ranges.get(end_ix) {
3283 if existing_range.end < end {
3284 end_ix += 1;
3285 } else {
3286 break;
3287 }
3288 }
3289 error_ranges.splice(ix..end_ix, [range]);
3290 matches.advance();
3291 }
3292
3293 outdent_positions.sort();
3294 for outdent_position in outdent_positions {
3295 // find the innermost indent range containing this outdent_position
3296 // set its end to the outdent position
3297 if let Some(range_to_truncate) = indent_ranges
3298 .iter_mut()
3299 .filter(|indent_range| indent_range.contains(&outdent_position))
3300 .next_back()
3301 {
3302 range_to_truncate.end = outdent_position;
3303 }
3304 }
3305
3306 start_positions.sort_by_key(|b| b.start);
3307
3308 // Find the suggested indentation increases and decreased based on regexes.
3309 let mut regex_outdent_map = HashMap::default();
3310 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3311 let mut start_positions_iter = start_positions.iter().peekable();
3312
3313 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3314 self.for_each_line(
3315 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3316 ..Point::new(row_range.end, 0),
3317 |row, line| {
3318 if config
3319 .decrease_indent_pattern
3320 .as_ref()
3321 .is_some_and(|regex| regex.is_match(line))
3322 {
3323 indent_change_rows.push((row, Ordering::Less));
3324 }
3325 if config
3326 .increase_indent_pattern
3327 .as_ref()
3328 .is_some_and(|regex| regex.is_match(line))
3329 {
3330 indent_change_rows.push((row + 1, Ordering::Greater));
3331 }
3332 while let Some(pos) = start_positions_iter.peek() {
3333 if pos.start.row < row {
3334 let pos = start_positions_iter.next().unwrap();
3335 last_seen_suffix
3336 .entry(pos.suffix.to_string())
3337 .or_default()
3338 .push(pos.start);
3339 } else {
3340 break;
3341 }
3342 }
3343 for rule in &config.decrease_indent_patterns {
3344 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3345 let row_start_column = self.indent_size_for_line(row).len;
3346 let basis_row = rule
3347 .valid_after
3348 .iter()
3349 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3350 .flatten()
3351 .filter(|start_point| start_point.column <= row_start_column)
3352 .max_by_key(|start_point| start_point.row);
3353 if let Some(outdent_to_row) = basis_row {
3354 regex_outdent_map.insert(row, outdent_to_row.row);
3355 }
3356 break;
3357 }
3358 }
3359 },
3360 );
3361
3362 let mut indent_changes = indent_change_rows.into_iter().peekable();
3363 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3364 prev_non_blank_row.unwrap_or(0)
3365 } else {
3366 row_range.start.saturating_sub(1)
3367 };
3368
3369 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3370 Some(row_range.map(move |row| {
3371 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3372
3373 let mut indent_from_prev_row = false;
3374 let mut outdent_from_prev_row = false;
3375 let mut outdent_to_row = u32::MAX;
3376 let mut from_regex = false;
3377
3378 while let Some((indent_row, delta)) = indent_changes.peek() {
3379 match indent_row.cmp(&row) {
3380 Ordering::Equal => match delta {
3381 Ordering::Less => {
3382 from_regex = true;
3383 outdent_from_prev_row = true
3384 }
3385 Ordering::Greater => {
3386 indent_from_prev_row = true;
3387 from_regex = true
3388 }
3389 _ => {}
3390 },
3391
3392 Ordering::Greater => break,
3393 Ordering::Less => {}
3394 }
3395
3396 indent_changes.next();
3397 }
3398
3399 for range in &indent_ranges {
3400 if range.start.row >= row {
3401 break;
3402 }
3403 if range.start.row == prev_row && range.end > row_start {
3404 indent_from_prev_row = true;
3405 }
3406 if range.end > prev_row_start && range.end <= row_start {
3407 outdent_to_row = outdent_to_row.min(range.start.row);
3408 }
3409 }
3410
3411 if let Some(basis_row) = regex_outdent_map.get(&row) {
3412 indent_from_prev_row = false;
3413 outdent_to_row = *basis_row;
3414 from_regex = true;
3415 }
3416
3417 let within_error = error_ranges
3418 .iter()
3419 .any(|e| e.start.row < row && e.end > row_start);
3420
3421 let suggestion = if outdent_to_row == prev_row
3422 || (outdent_from_prev_row && indent_from_prev_row)
3423 {
3424 Some(IndentSuggestion {
3425 basis_row: prev_row,
3426 delta: Ordering::Equal,
3427 within_error: within_error && !from_regex,
3428 })
3429 } else if indent_from_prev_row {
3430 Some(IndentSuggestion {
3431 basis_row: prev_row,
3432 delta: Ordering::Greater,
3433 within_error: within_error && !from_regex,
3434 })
3435 } else if outdent_to_row < prev_row {
3436 Some(IndentSuggestion {
3437 basis_row: outdent_to_row,
3438 delta: Ordering::Equal,
3439 within_error: within_error && !from_regex,
3440 })
3441 } else if outdent_from_prev_row {
3442 Some(IndentSuggestion {
3443 basis_row: prev_row,
3444 delta: Ordering::Less,
3445 within_error: within_error && !from_regex,
3446 })
3447 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3448 {
3449 Some(IndentSuggestion {
3450 basis_row: prev_row,
3451 delta: Ordering::Equal,
3452 within_error: within_error && !from_regex,
3453 })
3454 } else {
3455 None
3456 };
3457
3458 prev_row = row;
3459 prev_row_start = row_start;
3460 suggestion
3461 }))
3462 }
3463
3464 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3465 while row > 0 {
3466 row -= 1;
3467 if !self.is_line_blank(row) {
3468 return Some(row);
3469 }
3470 }
3471 None
3472 }
3473
3474 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3475 let captures = self.syntax.captures(range, &self.text, |grammar| {
3476 grammar
3477 .highlights_config
3478 .as_ref()
3479 .map(|config| &config.query)
3480 });
3481 let highlight_maps = captures
3482 .grammars()
3483 .iter()
3484 .map(|grammar| grammar.highlight_map())
3485 .collect();
3486 (captures, highlight_maps)
3487 }
3488
3489 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3490 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3491 /// returned in chunks where each chunk has a single syntax highlighting style and
3492 /// diagnostic status.
3493 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3494 let range = range.start.to_offset(self)..range.end.to_offset(self);
3495
3496 let mut syntax = None;
3497 if language_aware {
3498 syntax = Some(self.get_highlights(range.clone()));
3499 }
3500 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3501 let diagnostics = language_aware;
3502 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3503 }
3504
3505 pub fn highlighted_text_for_range<T: ToOffset>(
3506 &self,
3507 range: Range<T>,
3508 override_style: Option<HighlightStyle>,
3509 syntax_theme: &SyntaxTheme,
3510 ) -> HighlightedText {
3511 HighlightedText::from_buffer_range(
3512 range,
3513 &self.text,
3514 &self.syntax,
3515 override_style,
3516 syntax_theme,
3517 )
3518 }
3519
3520 /// Invokes the given callback for each line of text in the given range of the buffer.
3521 /// Uses callback to avoid allocating a string for each line.
3522 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3523 let mut line = String::new();
3524 let mut row = range.start.row;
3525 for chunk in self
3526 .as_rope()
3527 .chunks_in_range(range.to_offset(self))
3528 .chain(["\n"])
3529 {
3530 for (newline_ix, text) in chunk.split('\n').enumerate() {
3531 if newline_ix > 0 {
3532 callback(row, &line);
3533 row += 1;
3534 line.clear();
3535 }
3536 line.push_str(text);
3537 }
3538 }
3539 }
3540
3541 /// Iterates over every [`SyntaxLayer`] in the buffer.
3542 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3543 self.syntax_layers_for_range(0..self.len(), true)
3544 }
3545
3546 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3547 let offset = position.to_offset(self);
3548 self.syntax_layers_for_range(offset..offset, false)
3549 .filter(|l| {
3550 if let Some(ranges) = l.included_sub_ranges {
3551 ranges.iter().any(|range| {
3552 let start = range.start.to_offset(self);
3553 start <= offset && {
3554 let end = range.end.to_offset(self);
3555 offset < end
3556 }
3557 })
3558 } else {
3559 l.node().start_byte() <= offset && l.node().end_byte() > offset
3560 }
3561 })
3562 .last()
3563 }
3564
3565 pub fn syntax_layers_for_range<D: ToOffset>(
3566 &self,
3567 range: Range<D>,
3568 include_hidden: bool,
3569 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3570 self.syntax
3571 .layers_for_range(range, &self.text, include_hidden)
3572 }
3573
3574 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3575 &self,
3576 range: Range<D>,
3577 ) -> Option<SyntaxLayer<'_>> {
3578 let range = range.to_offset(self);
3579 self.syntax
3580 .layers_for_range(range, &self.text, false)
3581 .max_by(|a, b| {
3582 if a.depth != b.depth {
3583 a.depth.cmp(&b.depth)
3584 } else if a.offset.0 != b.offset.0 {
3585 a.offset.0.cmp(&b.offset.0)
3586 } else {
3587 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3588 }
3589 })
3590 }
3591
3592 /// Returns the main [`Language`].
3593 pub fn language(&self) -> Option<&Arc<Language>> {
3594 self.language.as_ref()
3595 }
3596
3597 /// Returns the [`Language`] at the given location.
3598 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3599 self.syntax_layer_at(position)
3600 .map(|info| info.language)
3601 .or(self.language.as_ref())
3602 }
3603
3604 /// Returns the settings for the language at the given location.
3605 pub fn settings_at<'a, D: ToOffset>(
3606 &'a self,
3607 position: D,
3608 cx: &'a App,
3609 ) -> Cow<'a, LanguageSettings> {
3610 language_settings(
3611 self.language_at(position).map(|l| l.name()),
3612 self.file.as_ref(),
3613 cx,
3614 )
3615 }
3616
3617 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3618 CharClassifier::new(self.language_scope_at(point))
3619 }
3620
3621 /// Returns the [`LanguageScope`] at the given location.
3622 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3623 let offset = position.to_offset(self);
3624 let mut scope = None;
3625 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3626
3627 // Use the layer that has the smallest node intersecting the given point.
3628 for layer in self
3629 .syntax
3630 .layers_for_range(offset..offset, &self.text, false)
3631 {
3632 let mut cursor = layer.node().walk();
3633
3634 let mut range = None;
3635 loop {
3636 let child_range = cursor.node().byte_range();
3637 if !child_range.contains(&offset) {
3638 break;
3639 }
3640
3641 range = Some(child_range);
3642 if cursor.goto_first_child_for_byte(offset).is_none() {
3643 break;
3644 }
3645 }
3646
3647 if let Some(range) = range
3648 && smallest_range_and_depth.as_ref().is_none_or(
3649 |(smallest_range, smallest_range_depth)| {
3650 if layer.depth > *smallest_range_depth {
3651 true
3652 } else if layer.depth == *smallest_range_depth {
3653 range.len() < smallest_range.len()
3654 } else {
3655 false
3656 }
3657 },
3658 )
3659 {
3660 smallest_range_and_depth = Some((range, layer.depth));
3661 scope = Some(LanguageScope {
3662 language: layer.language.clone(),
3663 override_id: layer.override_id(offset, &self.text),
3664 });
3665 }
3666 }
3667
3668 scope.or_else(|| {
3669 self.language.clone().map(|language| LanguageScope {
3670 language,
3671 override_id: None,
3672 })
3673 })
3674 }
3675
3676 /// Returns a tuple of the range and character kind of the word
3677 /// surrounding the given position.
3678 pub fn surrounding_word<T: ToOffset>(
3679 &self,
3680 start: T,
3681 scope_context: Option<CharScopeContext>,
3682 ) -> (Range<usize>, Option<CharKind>) {
3683 let mut start = start.to_offset(self);
3684 let mut end = start;
3685 let mut next_chars = self.chars_at(start).take(128).peekable();
3686 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3687
3688 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3689 let word_kind = cmp::max(
3690 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3691 next_chars.peek().copied().map(|c| classifier.kind(c)),
3692 );
3693
3694 for ch in prev_chars {
3695 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3696 start -= ch.len_utf8();
3697 } else {
3698 break;
3699 }
3700 }
3701
3702 for ch in next_chars {
3703 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3704 end += ch.len_utf8();
3705 } else {
3706 break;
3707 }
3708 }
3709
3710 (start..end, word_kind)
3711 }
3712
3713 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3714 /// range. When `require_larger` is true, the node found must be larger than the query range.
3715 ///
3716 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3717 /// be moved to the root of the tree.
3718 fn goto_node_enclosing_range(
3719 cursor: &mut tree_sitter::TreeCursor,
3720 query_range: &Range<usize>,
3721 require_larger: bool,
3722 ) -> bool {
3723 let mut ascending = false;
3724 loop {
3725 let mut range = cursor.node().byte_range();
3726 if query_range.is_empty() {
3727 // When the query range is empty and the current node starts after it, move to the
3728 // previous sibling to find the node the containing node.
3729 if range.start > query_range.start {
3730 cursor.goto_previous_sibling();
3731 range = cursor.node().byte_range();
3732 }
3733 } else {
3734 // When the query range is non-empty and the current node ends exactly at the start,
3735 // move to the next sibling to find a node that extends beyond the start.
3736 if range.end == query_range.start {
3737 cursor.goto_next_sibling();
3738 range = cursor.node().byte_range();
3739 }
3740 }
3741
3742 let encloses = range.contains_inclusive(query_range)
3743 && (!require_larger || range.len() > query_range.len());
3744 if !encloses {
3745 ascending = true;
3746 if !cursor.goto_parent() {
3747 return false;
3748 }
3749 continue;
3750 } else if ascending {
3751 return true;
3752 }
3753
3754 // Descend into the current node.
3755 if cursor
3756 .goto_first_child_for_byte(query_range.start)
3757 .is_none()
3758 {
3759 return true;
3760 }
3761 }
3762 }
3763
3764 pub fn syntax_ancestor<'a, T: ToOffset>(
3765 &'a self,
3766 range: Range<T>,
3767 ) -> Option<tree_sitter::Node<'a>> {
3768 let range = range.start.to_offset(self)..range.end.to_offset(self);
3769 let mut result: Option<tree_sitter::Node<'a>> = None;
3770 for layer in self
3771 .syntax
3772 .layers_for_range(range.clone(), &self.text, true)
3773 {
3774 let mut cursor = layer.node().walk();
3775
3776 // Find the node that both contains the range and is larger than it.
3777 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3778 continue;
3779 }
3780
3781 let left_node = cursor.node();
3782 let mut layer_result = left_node;
3783
3784 // For an empty range, try to find another node immediately to the right of the range.
3785 if left_node.end_byte() == range.start {
3786 let mut right_node = None;
3787 while !cursor.goto_next_sibling() {
3788 if !cursor.goto_parent() {
3789 break;
3790 }
3791 }
3792
3793 while cursor.node().start_byte() == range.start {
3794 right_node = Some(cursor.node());
3795 if !cursor.goto_first_child() {
3796 break;
3797 }
3798 }
3799
3800 // If there is a candidate node on both sides of the (empty) range, then
3801 // decide between the two by favoring a named node over an anonymous token.
3802 // If both nodes are the same in that regard, favor the right one.
3803 if let Some(right_node) = right_node
3804 && (right_node.is_named() || !left_node.is_named())
3805 {
3806 layer_result = right_node;
3807 }
3808 }
3809
3810 if let Some(previous_result) = &result
3811 && previous_result.byte_range().len() < layer_result.byte_range().len()
3812 {
3813 continue;
3814 }
3815 result = Some(layer_result);
3816 }
3817
3818 result
3819 }
3820
3821 /// Find the previous sibling syntax node at the given range.
3822 ///
3823 /// This function locates the syntax node that precedes the node containing
3824 /// the given range. It searches hierarchically by:
3825 /// 1. Finding the node that contains the given range
3826 /// 2. Looking for the previous sibling at the same tree level
3827 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3828 ///
3829 /// Returns `None` if there is no previous sibling at any ancestor level.
3830 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3831 &'a self,
3832 range: Range<T>,
3833 ) -> Option<tree_sitter::Node<'a>> {
3834 let range = range.start.to_offset(self)..range.end.to_offset(self);
3835 let mut result: Option<tree_sitter::Node<'a>> = None;
3836
3837 for layer in self
3838 .syntax
3839 .layers_for_range(range.clone(), &self.text, true)
3840 {
3841 let mut cursor = layer.node().walk();
3842
3843 // Find the node that contains the range
3844 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3845 continue;
3846 }
3847
3848 // Look for the previous sibling, moving up ancestor levels if needed
3849 loop {
3850 if cursor.goto_previous_sibling() {
3851 let layer_result = cursor.node();
3852
3853 if let Some(previous_result) = &result {
3854 if previous_result.byte_range().end < layer_result.byte_range().end {
3855 continue;
3856 }
3857 }
3858 result = Some(layer_result);
3859 break;
3860 }
3861
3862 // No sibling found at this level, try moving up to parent
3863 if !cursor.goto_parent() {
3864 break;
3865 }
3866 }
3867 }
3868
3869 result
3870 }
3871
3872 /// Find the next sibling syntax node at the given range.
3873 ///
3874 /// This function locates the syntax node that follows the node containing
3875 /// the given range. It searches hierarchically by:
3876 /// 1. Finding the node that contains the given range
3877 /// 2. Looking for the next sibling at the same tree level
3878 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3879 ///
3880 /// Returns `None` if there is no next sibling at any ancestor level.
3881 pub fn syntax_next_sibling<'a, T: ToOffset>(
3882 &'a self,
3883 range: Range<T>,
3884 ) -> Option<tree_sitter::Node<'a>> {
3885 let range = range.start.to_offset(self)..range.end.to_offset(self);
3886 let mut result: Option<tree_sitter::Node<'a>> = None;
3887
3888 for layer in self
3889 .syntax
3890 .layers_for_range(range.clone(), &self.text, true)
3891 {
3892 let mut cursor = layer.node().walk();
3893
3894 // Find the node that contains the range
3895 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3896 continue;
3897 }
3898
3899 // Look for the next sibling, moving up ancestor levels if needed
3900 loop {
3901 if cursor.goto_next_sibling() {
3902 let layer_result = cursor.node();
3903
3904 if let Some(previous_result) = &result {
3905 if previous_result.byte_range().start > layer_result.byte_range().start {
3906 continue;
3907 }
3908 }
3909 result = Some(layer_result);
3910 break;
3911 }
3912
3913 // No sibling found at this level, try moving up to parent
3914 if !cursor.goto_parent() {
3915 break;
3916 }
3917 }
3918 }
3919
3920 result
3921 }
3922
3923 /// Returns the root syntax node within the given row
3924 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3925 let start_offset = position.to_offset(self);
3926
3927 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3928
3929 let layer = self
3930 .syntax
3931 .layers_for_range(start_offset..start_offset, &self.text, true)
3932 .next()?;
3933
3934 let mut cursor = layer.node().walk();
3935
3936 // Descend to the first leaf that touches the start of the range.
3937 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3938 if cursor.node().end_byte() == start_offset {
3939 cursor.goto_next_sibling();
3940 }
3941 }
3942
3943 // Ascend to the root node within the same row.
3944 while cursor.goto_parent() {
3945 if cursor.node().start_position().row != row {
3946 break;
3947 }
3948 }
3949
3950 Some(cursor.node())
3951 }
3952
3953 /// Returns the outline for the buffer.
3954 ///
3955 /// This method allows passing an optional [`SyntaxTheme`] to
3956 /// syntax-highlight the returned symbols.
3957 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3958 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3959 }
3960
3961 /// Returns all the symbols that contain the given position.
3962 ///
3963 /// This method allows passing an optional [`SyntaxTheme`] to
3964 /// syntax-highlight the returned symbols.
3965 pub fn symbols_containing<T: ToOffset>(
3966 &self,
3967 position: T,
3968 theme: Option<&SyntaxTheme>,
3969 ) -> Vec<OutlineItem<Anchor>> {
3970 let position = position.to_offset(self);
3971 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3972 let end = self.clip_offset(position + 1, Bias::Right);
3973 let mut items = self.outline_items_containing(start..end, false, theme);
3974 let mut prev_depth = None;
3975 items.retain(|item| {
3976 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3977 prev_depth = Some(item.depth);
3978 result
3979 });
3980 items
3981 }
3982
3983 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3984 let range = range.to_offset(self);
3985 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3986 grammar.outline_config.as_ref().map(|c| &c.query)
3987 });
3988 let configs = matches
3989 .grammars()
3990 .iter()
3991 .map(|g| g.outline_config.as_ref().unwrap())
3992 .collect::<Vec<_>>();
3993
3994 while let Some(mat) = matches.peek() {
3995 let config = &configs[mat.grammar_index];
3996 let containing_item_node = maybe!({
3997 let item_node = mat.captures.iter().find_map(|cap| {
3998 if cap.index == config.item_capture_ix {
3999 Some(cap.node)
4000 } else {
4001 None
4002 }
4003 })?;
4004
4005 let item_byte_range = item_node.byte_range();
4006 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4007 None
4008 } else {
4009 Some(item_node)
4010 }
4011 });
4012
4013 if let Some(item_node) = containing_item_node {
4014 return Some(
4015 Point::from_ts_point(item_node.start_position())
4016 ..Point::from_ts_point(item_node.end_position()),
4017 );
4018 }
4019
4020 matches.advance();
4021 }
4022 None
4023 }
4024
4025 pub fn outline_items_containing<T: ToOffset>(
4026 &self,
4027 range: Range<T>,
4028 include_extra_context: bool,
4029 theme: Option<&SyntaxTheme>,
4030 ) -> Vec<OutlineItem<Anchor>> {
4031 self.outline_items_containing_internal(
4032 range,
4033 include_extra_context,
4034 theme,
4035 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4036 )
4037 }
4038
4039 pub fn outline_items_as_points_containing<T: ToOffset>(
4040 &self,
4041 range: Range<T>,
4042 include_extra_context: bool,
4043 theme: Option<&SyntaxTheme>,
4044 ) -> Vec<OutlineItem<Point>> {
4045 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4046 range
4047 })
4048 }
4049
4050 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4051 &self,
4052 range: Range<T>,
4053 include_extra_context: bool,
4054 theme: Option<&SyntaxTheme>,
4055 ) -> Vec<OutlineItem<usize>> {
4056 self.outline_items_containing_internal(
4057 range,
4058 include_extra_context,
4059 theme,
4060 |buffer, range| range.to_offset(buffer),
4061 )
4062 }
4063
4064 fn outline_items_containing_internal<T: ToOffset, U>(
4065 &self,
4066 range: Range<T>,
4067 include_extra_context: bool,
4068 theme: Option<&SyntaxTheme>,
4069 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4070 ) -> Vec<OutlineItem<U>> {
4071 let range = range.to_offset(self);
4072 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4073 grammar.outline_config.as_ref().map(|c| &c.query)
4074 });
4075
4076 let mut items = Vec::new();
4077 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4078 while let Some(mat) = matches.peek() {
4079 let config = matches.grammars()[mat.grammar_index]
4080 .outline_config
4081 .as_ref()
4082 .unwrap();
4083 if let Some(item) =
4084 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4085 {
4086 items.push(item);
4087 } else if let Some(capture) = mat
4088 .captures
4089 .iter()
4090 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4091 {
4092 let capture_range = capture.node.start_position()..capture.node.end_position();
4093 let mut capture_row_range =
4094 capture_range.start.row as u32..capture_range.end.row as u32;
4095 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4096 {
4097 capture_row_range.end -= 1;
4098 }
4099 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4100 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4101 last_row_range.end = capture_row_range.end;
4102 } else {
4103 annotation_row_ranges.push(capture_row_range);
4104 }
4105 } else {
4106 annotation_row_ranges.push(capture_row_range);
4107 }
4108 }
4109 matches.advance();
4110 }
4111
4112 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4113
4114 // Assign depths based on containment relationships and convert to anchors.
4115 let mut item_ends_stack = Vec::<Point>::new();
4116 let mut anchor_items = Vec::new();
4117 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4118 for item in items {
4119 while let Some(last_end) = item_ends_stack.last().copied() {
4120 if last_end < item.range.end {
4121 item_ends_stack.pop();
4122 } else {
4123 break;
4124 }
4125 }
4126
4127 let mut annotation_row_range = None;
4128 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4129 let row_preceding_item = item.range.start.row.saturating_sub(1);
4130 if next_annotation_row_range.end < row_preceding_item {
4131 annotation_row_ranges.next();
4132 } else {
4133 if next_annotation_row_range.end == row_preceding_item {
4134 annotation_row_range = Some(next_annotation_row_range.clone());
4135 annotation_row_ranges.next();
4136 }
4137 break;
4138 }
4139 }
4140
4141 anchor_items.push(OutlineItem {
4142 depth: item_ends_stack.len(),
4143 range: range_callback(self, item.range.clone()),
4144 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4145 text: item.text,
4146 highlight_ranges: item.highlight_ranges,
4147 name_ranges: item.name_ranges,
4148 body_range: item.body_range.map(|r| range_callback(self, r)),
4149 annotation_range: annotation_row_range.map(|annotation_range| {
4150 let point_range = Point::new(annotation_range.start, 0)
4151 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4152 range_callback(self, point_range)
4153 }),
4154 });
4155 item_ends_stack.push(item.range.end);
4156 }
4157
4158 anchor_items
4159 }
4160
4161 fn next_outline_item(
4162 &self,
4163 config: &OutlineConfig,
4164 mat: &SyntaxMapMatch,
4165 range: &Range<usize>,
4166 include_extra_context: bool,
4167 theme: Option<&SyntaxTheme>,
4168 ) -> Option<OutlineItem<Point>> {
4169 let item_node = mat.captures.iter().find_map(|cap| {
4170 if cap.index == config.item_capture_ix {
4171 Some(cap.node)
4172 } else {
4173 None
4174 }
4175 })?;
4176
4177 let item_byte_range = item_node.byte_range();
4178 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4179 return None;
4180 }
4181 let item_point_range = Point::from_ts_point(item_node.start_position())
4182 ..Point::from_ts_point(item_node.end_position());
4183
4184 let mut open_point = None;
4185 let mut close_point = None;
4186
4187 let mut buffer_ranges = Vec::new();
4188 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4189 let mut range = node.start_byte()..node.end_byte();
4190 let start = node.start_position();
4191 if node.end_position().row > start.row {
4192 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4193 }
4194
4195 if !range.is_empty() {
4196 buffer_ranges.push((range, node_is_name));
4197 }
4198 };
4199
4200 for capture in mat.captures {
4201 if capture.index == config.name_capture_ix {
4202 add_to_buffer_ranges(capture.node, true);
4203 } else if Some(capture.index) == config.context_capture_ix
4204 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4205 {
4206 add_to_buffer_ranges(capture.node, false);
4207 } else {
4208 if Some(capture.index) == config.open_capture_ix {
4209 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4210 } else if Some(capture.index) == config.close_capture_ix {
4211 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4212 }
4213 }
4214 }
4215
4216 if buffer_ranges.is_empty() {
4217 return None;
4218 }
4219 let source_range_for_text =
4220 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4221
4222 let mut text = String::new();
4223 let mut highlight_ranges = Vec::new();
4224 let mut name_ranges = Vec::new();
4225 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4226 let mut last_buffer_range_end = 0;
4227 for (buffer_range, is_name) in buffer_ranges {
4228 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4229 if space_added {
4230 text.push(' ');
4231 }
4232 let before_append_len = text.len();
4233 let mut offset = buffer_range.start;
4234 chunks.seek(buffer_range.clone());
4235 for mut chunk in chunks.by_ref() {
4236 if chunk.text.len() > buffer_range.end - offset {
4237 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4238 offset = buffer_range.end;
4239 } else {
4240 offset += chunk.text.len();
4241 }
4242 let style = chunk
4243 .syntax_highlight_id
4244 .zip(theme)
4245 .and_then(|(highlight, theme)| highlight.style(theme));
4246 if let Some(style) = style {
4247 let start = text.len();
4248 let end = start + chunk.text.len();
4249 highlight_ranges.push((start..end, style));
4250 }
4251 text.push_str(chunk.text);
4252 if offset >= buffer_range.end {
4253 break;
4254 }
4255 }
4256 if is_name {
4257 let after_append_len = text.len();
4258 let start = if space_added && !name_ranges.is_empty() {
4259 before_append_len - 1
4260 } else {
4261 before_append_len
4262 };
4263 name_ranges.push(start..after_append_len);
4264 }
4265 last_buffer_range_end = buffer_range.end;
4266 }
4267
4268 Some(OutlineItem {
4269 depth: 0, // We'll calculate the depth later
4270 range: item_point_range,
4271 source_range_for_text: source_range_for_text.to_point(self),
4272 text,
4273 highlight_ranges,
4274 name_ranges,
4275 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4276 annotation_range: None,
4277 })
4278 }
4279
4280 pub fn function_body_fold_ranges<T: ToOffset>(
4281 &self,
4282 within: Range<T>,
4283 ) -> impl Iterator<Item = Range<usize>> + '_ {
4284 self.text_object_ranges(within, TreeSitterOptions::default())
4285 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4286 }
4287
4288 /// For each grammar in the language, runs the provided
4289 /// [`tree_sitter::Query`] against the given range.
4290 pub fn matches(
4291 &self,
4292 range: Range<usize>,
4293 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4294 ) -> SyntaxMapMatches<'_> {
4295 self.syntax.matches(range, self, query)
4296 }
4297
4298 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4299 /// Hence, may return more bracket pairs than the range contains.
4300 ///
4301 /// Will omit known chunks.
4302 /// The resulting bracket match collections are not ordered.
4303 pub fn fetch_bracket_ranges(
4304 &self,
4305 range: Range<usize>,
4306 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4307 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4308 let mut all_bracket_matches = HashMap::default();
4309
4310 for chunk in self
4311 .tree_sitter_data
4312 .chunks
4313 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4314 {
4315 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4316 continue;
4317 }
4318 let Some(chunk_range) = self.tree_sitter_data.chunks.chunk_range(chunk) else {
4319 continue;
4320 };
4321 let chunk_range = chunk_range.to_offset(&self);
4322
4323 if let Some(cached_brackets) =
4324 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4325 {
4326 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4327 continue;
4328 }
4329
4330 let mut all_brackets = Vec::new();
4331 let mut opens = Vec::new();
4332 let mut color_pairs = Vec::new();
4333
4334 let mut matches = self
4335 .syntax
4336 .matches(chunk_range.clone(), &self.text, |grammar| {
4337 grammar.brackets_config.as_ref().map(|c| &c.query)
4338 });
4339 let configs = matches
4340 .grammars()
4341 .iter()
4342 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4343 .collect::<Vec<_>>();
4344
4345 while let Some(mat) = matches.peek() {
4346 let mut open = None;
4347 let mut close = None;
4348 let syntax_layer_depth = mat.depth;
4349 let config = configs[mat.grammar_index];
4350 let pattern = &config.patterns[mat.pattern_index];
4351 for capture in mat.captures {
4352 if capture.index == config.open_capture_ix {
4353 open = Some(capture.node.byte_range());
4354 } else if capture.index == config.close_capture_ix {
4355 close = Some(capture.node.byte_range());
4356 }
4357 }
4358
4359 matches.advance();
4360
4361 let Some((open_range, close_range)) = open.zip(close) else {
4362 continue;
4363 };
4364
4365 let bracket_range = open_range.start..=close_range.end;
4366 if !bracket_range.overlaps(&chunk_range) {
4367 continue;
4368 }
4369
4370 let index = all_brackets.len();
4371 all_brackets.push(BracketMatch {
4372 open_range: open_range.clone(),
4373 close_range: close_range.clone(),
4374 newline_only: pattern.newline_only,
4375 syntax_layer_depth,
4376 color_index: None,
4377 });
4378
4379 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4380 // bracket will match the entire tag with all text inside.
4381 // For now, avoid highlighting any pair that has more than single char in each bracket.
4382 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4383 let should_color =
4384 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4385 if should_color {
4386 opens.push(open_range.clone());
4387 color_pairs.push((open_range, close_range, index));
4388 }
4389 }
4390
4391 opens.sort_by_key(|r| (r.start, r.end));
4392 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4393 color_pairs.sort_by_key(|(_, close, _)| close.end);
4394
4395 let mut open_stack = Vec::new();
4396 let mut open_index = 0;
4397 for (open, close, index) in color_pairs {
4398 while open_index < opens.len() && opens[open_index].start < close.start {
4399 open_stack.push(opens[open_index].clone());
4400 open_index += 1;
4401 }
4402
4403 if open_stack.last() == Some(&open) {
4404 let depth_index = open_stack.len() - 1;
4405 all_brackets[index].color_index = Some(depth_index);
4406 open_stack.pop();
4407 }
4408 }
4409
4410 all_brackets.sort_by_key(|bracket_match| {
4411 (bracket_match.open_range.start, bracket_match.open_range.end)
4412 });
4413
4414 if let empty_slot @ None =
4415 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4416 {
4417 *empty_slot = Some(all_brackets.clone());
4418 }
4419 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4420 }
4421
4422 all_bracket_matches
4423 }
4424
4425 pub fn all_bracket_ranges(
4426 &self,
4427 range: Range<usize>,
4428 ) -> impl Iterator<Item = BracketMatch<usize>> {
4429 self.fetch_bracket_ranges(range.clone(), None)
4430 .into_values()
4431 .flatten()
4432 .filter(move |bracket_match| {
4433 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4434 bracket_range.overlaps(&range)
4435 })
4436 }
4437
4438 /// Returns bracket range pairs overlapping or adjacent to `range`
4439 pub fn bracket_ranges<T: ToOffset>(
4440 &self,
4441 range: Range<T>,
4442 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4443 // Find bracket pairs that *inclusively* contain the given range.
4444 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4445 self.all_bracket_ranges(range)
4446 .filter(|pair| !pair.newline_only)
4447 }
4448
4449 pub fn debug_variables_query<T: ToOffset>(
4450 &self,
4451 range: Range<T>,
4452 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4453 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4454
4455 let mut matches = self.syntax.matches_with_options(
4456 range.clone(),
4457 &self.text,
4458 TreeSitterOptions::default(),
4459 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4460 );
4461
4462 let configs = matches
4463 .grammars()
4464 .iter()
4465 .map(|grammar| grammar.debug_variables_config.as_ref())
4466 .collect::<Vec<_>>();
4467
4468 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4469
4470 iter::from_fn(move || {
4471 loop {
4472 while let Some(capture) = captures.pop() {
4473 if capture.0.overlaps(&range) {
4474 return Some(capture);
4475 }
4476 }
4477
4478 let mat = matches.peek()?;
4479
4480 let Some(config) = configs[mat.grammar_index].as_ref() else {
4481 matches.advance();
4482 continue;
4483 };
4484
4485 for capture in mat.captures {
4486 let Some(ix) = config
4487 .objects_by_capture_ix
4488 .binary_search_by_key(&capture.index, |e| e.0)
4489 .ok()
4490 else {
4491 continue;
4492 };
4493 let text_object = config.objects_by_capture_ix[ix].1;
4494 let byte_range = capture.node.byte_range();
4495
4496 let mut found = false;
4497 for (range, existing) in captures.iter_mut() {
4498 if existing == &text_object {
4499 range.start = range.start.min(byte_range.start);
4500 range.end = range.end.max(byte_range.end);
4501 found = true;
4502 break;
4503 }
4504 }
4505
4506 if !found {
4507 captures.push((byte_range, text_object));
4508 }
4509 }
4510
4511 matches.advance();
4512 }
4513 })
4514 }
4515
4516 pub fn text_object_ranges<T: ToOffset>(
4517 &self,
4518 range: Range<T>,
4519 options: TreeSitterOptions,
4520 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4521 let range =
4522 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4523
4524 let mut matches =
4525 self.syntax
4526 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4527 grammar.text_object_config.as_ref().map(|c| &c.query)
4528 });
4529
4530 let configs = matches
4531 .grammars()
4532 .iter()
4533 .map(|grammar| grammar.text_object_config.as_ref())
4534 .collect::<Vec<_>>();
4535
4536 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4537
4538 iter::from_fn(move || {
4539 loop {
4540 while let Some(capture) = captures.pop() {
4541 if capture.0.overlaps(&range) {
4542 return Some(capture);
4543 }
4544 }
4545
4546 let mat = matches.peek()?;
4547
4548 let Some(config) = configs[mat.grammar_index].as_ref() else {
4549 matches.advance();
4550 continue;
4551 };
4552
4553 for capture in mat.captures {
4554 let Some(ix) = config
4555 .text_objects_by_capture_ix
4556 .binary_search_by_key(&capture.index, |e| e.0)
4557 .ok()
4558 else {
4559 continue;
4560 };
4561 let text_object = config.text_objects_by_capture_ix[ix].1;
4562 let byte_range = capture.node.byte_range();
4563
4564 let mut found = false;
4565 for (range, existing) in captures.iter_mut() {
4566 if existing == &text_object {
4567 range.start = range.start.min(byte_range.start);
4568 range.end = range.end.max(byte_range.end);
4569 found = true;
4570 break;
4571 }
4572 }
4573
4574 if !found {
4575 captures.push((byte_range, text_object));
4576 }
4577 }
4578
4579 matches.advance();
4580 }
4581 })
4582 }
4583
4584 /// Returns enclosing bracket ranges containing the given range
4585 pub fn enclosing_bracket_ranges<T: ToOffset>(
4586 &self,
4587 range: Range<T>,
4588 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4589 let range = range.start.to_offset(self)..range.end.to_offset(self);
4590
4591 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4592 let max_depth = result
4593 .iter()
4594 .map(|mat| mat.syntax_layer_depth)
4595 .max()
4596 .unwrap_or(0);
4597 result.into_iter().filter(move |pair| {
4598 pair.open_range.start <= range.start
4599 && pair.close_range.end >= range.end
4600 && pair.syntax_layer_depth == max_depth
4601 })
4602 }
4603
4604 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4605 ///
4606 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4607 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4608 &self,
4609 range: Range<T>,
4610 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4611 ) -> Option<(Range<usize>, Range<usize>)> {
4612 let range = range.start.to_offset(self)..range.end.to_offset(self);
4613
4614 // Get the ranges of the innermost pair of brackets.
4615 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4616
4617 for pair in self.enclosing_bracket_ranges(range) {
4618 if let Some(range_filter) = range_filter
4619 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4620 {
4621 continue;
4622 }
4623
4624 let len = pair.close_range.end - pair.open_range.start;
4625
4626 if let Some((existing_open, existing_close)) = &result {
4627 let existing_len = existing_close.end - existing_open.start;
4628 if len > existing_len {
4629 continue;
4630 }
4631 }
4632
4633 result = Some((pair.open_range, pair.close_range));
4634 }
4635
4636 result
4637 }
4638
4639 /// Returns anchor ranges for any matches of the redaction query.
4640 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4641 /// will be run on the relevant section of the buffer.
4642 pub fn redacted_ranges<T: ToOffset>(
4643 &self,
4644 range: Range<T>,
4645 ) -> impl Iterator<Item = Range<usize>> + '_ {
4646 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4647 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4648 grammar
4649 .redactions_config
4650 .as_ref()
4651 .map(|config| &config.query)
4652 });
4653
4654 let configs = syntax_matches
4655 .grammars()
4656 .iter()
4657 .map(|grammar| grammar.redactions_config.as_ref())
4658 .collect::<Vec<_>>();
4659
4660 iter::from_fn(move || {
4661 let redacted_range = syntax_matches
4662 .peek()
4663 .and_then(|mat| {
4664 configs[mat.grammar_index].and_then(|config| {
4665 mat.captures
4666 .iter()
4667 .find(|capture| capture.index == config.redaction_capture_ix)
4668 })
4669 })
4670 .map(|mat| mat.node.byte_range());
4671 syntax_matches.advance();
4672 redacted_range
4673 })
4674 }
4675
4676 pub fn injections_intersecting_range<T: ToOffset>(
4677 &self,
4678 range: Range<T>,
4679 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4680 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4681
4682 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4683 grammar
4684 .injection_config
4685 .as_ref()
4686 .map(|config| &config.query)
4687 });
4688
4689 let configs = syntax_matches
4690 .grammars()
4691 .iter()
4692 .map(|grammar| grammar.injection_config.as_ref())
4693 .collect::<Vec<_>>();
4694
4695 iter::from_fn(move || {
4696 let ranges = syntax_matches.peek().and_then(|mat| {
4697 let config = &configs[mat.grammar_index]?;
4698 let content_capture_range = mat.captures.iter().find_map(|capture| {
4699 if capture.index == config.content_capture_ix {
4700 Some(capture.node.byte_range())
4701 } else {
4702 None
4703 }
4704 })?;
4705 let language = self.language_at(content_capture_range.start)?;
4706 Some((content_capture_range, language))
4707 });
4708 syntax_matches.advance();
4709 ranges
4710 })
4711 }
4712
4713 pub fn runnable_ranges(
4714 &self,
4715 offset_range: Range<usize>,
4716 ) -> impl Iterator<Item = RunnableRange> + '_ {
4717 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4718 grammar.runnable_config.as_ref().map(|config| &config.query)
4719 });
4720
4721 let test_configs = syntax_matches
4722 .grammars()
4723 .iter()
4724 .map(|grammar| grammar.runnable_config.as_ref())
4725 .collect::<Vec<_>>();
4726
4727 iter::from_fn(move || {
4728 loop {
4729 let mat = syntax_matches.peek()?;
4730
4731 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4732 let mut run_range = None;
4733 let full_range = mat.captures.iter().fold(
4734 Range {
4735 start: usize::MAX,
4736 end: 0,
4737 },
4738 |mut acc, next| {
4739 let byte_range = next.node.byte_range();
4740 if acc.start > byte_range.start {
4741 acc.start = byte_range.start;
4742 }
4743 if acc.end < byte_range.end {
4744 acc.end = byte_range.end;
4745 }
4746 acc
4747 },
4748 );
4749 if full_range.start > full_range.end {
4750 // We did not find a full spanning range of this match.
4751 return None;
4752 }
4753 let extra_captures: SmallVec<[_; 1]> =
4754 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4755 test_configs
4756 .extra_captures
4757 .get(capture.index as usize)
4758 .cloned()
4759 .and_then(|tag_name| match tag_name {
4760 RunnableCapture::Named(name) => {
4761 Some((capture.node.byte_range(), name))
4762 }
4763 RunnableCapture::Run => {
4764 let _ = run_range.insert(capture.node.byte_range());
4765 None
4766 }
4767 })
4768 }));
4769 let run_range = run_range?;
4770 let tags = test_configs
4771 .query
4772 .property_settings(mat.pattern_index)
4773 .iter()
4774 .filter_map(|property| {
4775 if *property.key == *"tag" {
4776 property
4777 .value
4778 .as_ref()
4779 .map(|value| RunnableTag(value.to_string().into()))
4780 } else {
4781 None
4782 }
4783 })
4784 .collect();
4785 let extra_captures = extra_captures
4786 .into_iter()
4787 .map(|(range, name)| {
4788 (
4789 name.to_string(),
4790 self.text_for_range(range).collect::<String>(),
4791 )
4792 })
4793 .collect();
4794 // All tags should have the same range.
4795 Some(RunnableRange {
4796 run_range,
4797 full_range,
4798 runnable: Runnable {
4799 tags,
4800 language: mat.language,
4801 buffer: self.remote_id(),
4802 },
4803 extra_captures,
4804 buffer_id: self.remote_id(),
4805 })
4806 });
4807
4808 syntax_matches.advance();
4809 if test_range.is_some() {
4810 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4811 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4812 return test_range;
4813 }
4814 }
4815 })
4816 }
4817
4818 /// Returns selections for remote peers intersecting the given range.
4819 #[allow(clippy::type_complexity)]
4820 pub fn selections_in_range(
4821 &self,
4822 range: Range<Anchor>,
4823 include_local: bool,
4824 ) -> impl Iterator<
4825 Item = (
4826 ReplicaId,
4827 bool,
4828 CursorShape,
4829 impl Iterator<Item = &Selection<Anchor>> + '_,
4830 ),
4831 > + '_ {
4832 self.remote_selections
4833 .iter()
4834 .filter(move |(replica_id, set)| {
4835 (include_local || **replica_id != self.text.replica_id())
4836 && !set.selections.is_empty()
4837 })
4838 .map(move |(replica_id, set)| {
4839 let start_ix = match set.selections.binary_search_by(|probe| {
4840 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4841 }) {
4842 Ok(ix) | Err(ix) => ix,
4843 };
4844 let end_ix = match set.selections.binary_search_by(|probe| {
4845 probe.start.cmp(&range.end, self).then(Ordering::Less)
4846 }) {
4847 Ok(ix) | Err(ix) => ix,
4848 };
4849
4850 (
4851 *replica_id,
4852 set.line_mode,
4853 set.cursor_shape,
4854 set.selections[start_ix..end_ix].iter(),
4855 )
4856 })
4857 }
4858
4859 /// Returns if the buffer contains any diagnostics.
4860 pub fn has_diagnostics(&self) -> bool {
4861 !self.diagnostics.is_empty()
4862 }
4863
4864 /// Returns all the diagnostics intersecting the given range.
4865 pub fn diagnostics_in_range<'a, T, O>(
4866 &'a self,
4867 search_range: Range<T>,
4868 reversed: bool,
4869 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4870 where
4871 T: 'a + Clone + ToOffset,
4872 O: 'a + FromAnchor,
4873 {
4874 let mut iterators: Vec<_> = self
4875 .diagnostics
4876 .iter()
4877 .map(|(_, collection)| {
4878 collection
4879 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4880 .peekable()
4881 })
4882 .collect();
4883
4884 std::iter::from_fn(move || {
4885 let (next_ix, _) = iterators
4886 .iter_mut()
4887 .enumerate()
4888 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4889 .min_by(|(_, a), (_, b)| {
4890 let cmp = a
4891 .range
4892 .start
4893 .cmp(&b.range.start, self)
4894 // when range is equal, sort by diagnostic severity
4895 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4896 // and stabilize order with group_id
4897 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4898 if reversed { cmp.reverse() } else { cmp }
4899 })?;
4900 iterators[next_ix]
4901 .next()
4902 .map(
4903 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4904 diagnostic,
4905 range: FromAnchor::from_anchor(&range.start, self)
4906 ..FromAnchor::from_anchor(&range.end, self),
4907 },
4908 )
4909 })
4910 }
4911
4912 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4913 /// should be used instead.
4914 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4915 &self.diagnostics
4916 }
4917
4918 /// Returns all the diagnostic groups associated with the given
4919 /// language server ID. If no language server ID is provided,
4920 /// all diagnostics groups are returned.
4921 pub fn diagnostic_groups(
4922 &self,
4923 language_server_id: Option<LanguageServerId>,
4924 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4925 let mut groups = Vec::new();
4926
4927 if let Some(language_server_id) = language_server_id {
4928 if let Ok(ix) = self
4929 .diagnostics
4930 .binary_search_by_key(&language_server_id, |e| e.0)
4931 {
4932 self.diagnostics[ix]
4933 .1
4934 .groups(language_server_id, &mut groups, self);
4935 }
4936 } else {
4937 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4938 diagnostics.groups(*language_server_id, &mut groups, self);
4939 }
4940 }
4941
4942 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4943 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4944 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4945 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4946 });
4947
4948 groups
4949 }
4950
4951 /// Returns an iterator over the diagnostics for the given group.
4952 pub fn diagnostic_group<O>(
4953 &self,
4954 group_id: usize,
4955 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4956 where
4957 O: FromAnchor + 'static,
4958 {
4959 self.diagnostics
4960 .iter()
4961 .flat_map(move |(_, set)| set.group(group_id, self))
4962 }
4963
4964 /// An integer version number that accounts for all updates besides
4965 /// the buffer's text itself (which is versioned via a version vector).
4966 pub fn non_text_state_update_count(&self) -> usize {
4967 self.non_text_state_update_count
4968 }
4969
4970 /// An integer version that changes when the buffer's syntax changes.
4971 pub fn syntax_update_count(&self) -> usize {
4972 self.syntax.update_count()
4973 }
4974
4975 /// Returns a snapshot of underlying file.
4976 pub fn file(&self) -> Option<&Arc<dyn File>> {
4977 self.file.as_ref()
4978 }
4979
4980 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4981 if let Some(file) = self.file() {
4982 if file.path().file_name().is_none() || include_root {
4983 Some(file.full_path(cx).to_string_lossy().into_owned())
4984 } else {
4985 Some(file.path().display(file.path_style(cx)).to_string())
4986 }
4987 } else {
4988 None
4989 }
4990 }
4991
4992 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4993 let query_str = query.fuzzy_contents;
4994 if query_str.is_some_and(|query| query.is_empty()) {
4995 return BTreeMap::default();
4996 }
4997
4998 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4999 language,
5000 override_id: None,
5001 }));
5002
5003 let mut query_ix = 0;
5004 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5005 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5006
5007 let mut words = BTreeMap::default();
5008 let mut current_word_start_ix = None;
5009 let mut chunk_ix = query.range.start;
5010 for chunk in self.chunks(query.range, false) {
5011 for (i, c) in chunk.text.char_indices() {
5012 let ix = chunk_ix + i;
5013 if classifier.is_word(c) {
5014 if current_word_start_ix.is_none() {
5015 current_word_start_ix = Some(ix);
5016 }
5017
5018 if let Some(query_chars) = &query_chars
5019 && query_ix < query_len
5020 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5021 {
5022 query_ix += 1;
5023 }
5024 continue;
5025 } else if let Some(word_start) = current_word_start_ix.take()
5026 && query_ix == query_len
5027 {
5028 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5029 let mut word_text = self.text_for_range(word_start..ix).peekable();
5030 let first_char = word_text
5031 .peek()
5032 .and_then(|first_chunk| first_chunk.chars().next());
5033 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5034 if !query.skip_digits
5035 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5036 {
5037 words.insert(word_text.collect(), word_range);
5038 }
5039 }
5040 query_ix = 0;
5041 }
5042 chunk_ix += chunk.text.len();
5043 }
5044
5045 words
5046 }
5047}
5048
5049pub struct WordsQuery<'a> {
5050 /// Only returns words with all chars from the fuzzy string in them.
5051 pub fuzzy_contents: Option<&'a str>,
5052 /// Skips words that start with a digit.
5053 pub skip_digits: bool,
5054 /// Buffer offset range, to look for words.
5055 pub range: Range<usize>,
5056}
5057
5058fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5059 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5060}
5061
5062fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5063 let mut result = IndentSize::spaces(0);
5064 for c in text {
5065 let kind = match c {
5066 ' ' => IndentKind::Space,
5067 '\t' => IndentKind::Tab,
5068 _ => break,
5069 };
5070 if result.len == 0 {
5071 result.kind = kind;
5072 }
5073 result.len += 1;
5074 }
5075 result
5076}
5077
5078impl Clone for BufferSnapshot {
5079 fn clone(&self) -> Self {
5080 Self {
5081 text: self.text.clone(),
5082 syntax: self.syntax.clone(),
5083 file: self.file.clone(),
5084 remote_selections: self.remote_selections.clone(),
5085 diagnostics: self.diagnostics.clone(),
5086 language: self.language.clone(),
5087 tree_sitter_data: self.tree_sitter_data.clone(),
5088 non_text_state_update_count: self.non_text_state_update_count,
5089 }
5090 }
5091}
5092
5093impl Deref for BufferSnapshot {
5094 type Target = text::BufferSnapshot;
5095
5096 fn deref(&self) -> &Self::Target {
5097 &self.text
5098 }
5099}
5100
5101unsafe impl Send for BufferChunks<'_> {}
5102
5103impl<'a> BufferChunks<'a> {
5104 pub(crate) fn new(
5105 text: &'a Rope,
5106 range: Range<usize>,
5107 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5108 diagnostics: bool,
5109 buffer_snapshot: Option<&'a BufferSnapshot>,
5110 ) -> Self {
5111 let mut highlights = None;
5112 if let Some((captures, highlight_maps)) = syntax {
5113 highlights = Some(BufferChunkHighlights {
5114 captures,
5115 next_capture: None,
5116 stack: Default::default(),
5117 highlight_maps,
5118 })
5119 }
5120
5121 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5122 let chunks = text.chunks_in_range(range.clone());
5123
5124 let mut this = BufferChunks {
5125 range,
5126 buffer_snapshot,
5127 chunks,
5128 diagnostic_endpoints,
5129 error_depth: 0,
5130 warning_depth: 0,
5131 information_depth: 0,
5132 hint_depth: 0,
5133 unnecessary_depth: 0,
5134 underline: true,
5135 highlights,
5136 };
5137 this.initialize_diagnostic_endpoints();
5138 this
5139 }
5140
5141 /// Seeks to the given byte offset in the buffer.
5142 pub fn seek(&mut self, range: Range<usize>) {
5143 let old_range = std::mem::replace(&mut self.range, range.clone());
5144 self.chunks.set_range(self.range.clone());
5145 if let Some(highlights) = self.highlights.as_mut() {
5146 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5147 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5148 highlights
5149 .stack
5150 .retain(|(end_offset, _)| *end_offset > range.start);
5151 if let Some(capture) = &highlights.next_capture
5152 && range.start >= capture.node.start_byte()
5153 {
5154 let next_capture_end = capture.node.end_byte();
5155 if range.start < next_capture_end {
5156 highlights.stack.push((
5157 next_capture_end,
5158 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5159 ));
5160 }
5161 highlights.next_capture.take();
5162 }
5163 } else if let Some(snapshot) = self.buffer_snapshot {
5164 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5165 *highlights = BufferChunkHighlights {
5166 captures,
5167 next_capture: None,
5168 stack: Default::default(),
5169 highlight_maps,
5170 };
5171 } else {
5172 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5173 // Seeking such BufferChunks is not supported.
5174 debug_assert!(
5175 false,
5176 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5177 );
5178 }
5179
5180 highlights.captures.set_byte_range(self.range.clone());
5181 self.initialize_diagnostic_endpoints();
5182 }
5183 }
5184
5185 fn initialize_diagnostic_endpoints(&mut self) {
5186 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5187 && let Some(buffer) = self.buffer_snapshot
5188 {
5189 let mut diagnostic_endpoints = Vec::new();
5190 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5191 diagnostic_endpoints.push(DiagnosticEndpoint {
5192 offset: entry.range.start,
5193 is_start: true,
5194 severity: entry.diagnostic.severity,
5195 is_unnecessary: entry.diagnostic.is_unnecessary,
5196 underline: entry.diagnostic.underline,
5197 });
5198 diagnostic_endpoints.push(DiagnosticEndpoint {
5199 offset: entry.range.end,
5200 is_start: false,
5201 severity: entry.diagnostic.severity,
5202 is_unnecessary: entry.diagnostic.is_unnecessary,
5203 underline: entry.diagnostic.underline,
5204 });
5205 }
5206 diagnostic_endpoints
5207 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5208 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5209 self.hint_depth = 0;
5210 self.error_depth = 0;
5211 self.warning_depth = 0;
5212 self.information_depth = 0;
5213 }
5214 }
5215
5216 /// The current byte offset in the buffer.
5217 pub fn offset(&self) -> usize {
5218 self.range.start
5219 }
5220
5221 pub fn range(&self) -> Range<usize> {
5222 self.range.clone()
5223 }
5224
5225 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5226 let depth = match endpoint.severity {
5227 DiagnosticSeverity::ERROR => &mut self.error_depth,
5228 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5229 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5230 DiagnosticSeverity::HINT => &mut self.hint_depth,
5231 _ => return,
5232 };
5233 if endpoint.is_start {
5234 *depth += 1;
5235 } else {
5236 *depth -= 1;
5237 }
5238
5239 if endpoint.is_unnecessary {
5240 if endpoint.is_start {
5241 self.unnecessary_depth += 1;
5242 } else {
5243 self.unnecessary_depth -= 1;
5244 }
5245 }
5246 }
5247
5248 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5249 if self.error_depth > 0 {
5250 Some(DiagnosticSeverity::ERROR)
5251 } else if self.warning_depth > 0 {
5252 Some(DiagnosticSeverity::WARNING)
5253 } else if self.information_depth > 0 {
5254 Some(DiagnosticSeverity::INFORMATION)
5255 } else if self.hint_depth > 0 {
5256 Some(DiagnosticSeverity::HINT)
5257 } else {
5258 None
5259 }
5260 }
5261
5262 fn current_code_is_unnecessary(&self) -> bool {
5263 self.unnecessary_depth > 0
5264 }
5265}
5266
5267impl<'a> Iterator for BufferChunks<'a> {
5268 type Item = Chunk<'a>;
5269
5270 fn next(&mut self) -> Option<Self::Item> {
5271 let mut next_capture_start = usize::MAX;
5272 let mut next_diagnostic_endpoint = usize::MAX;
5273
5274 if let Some(highlights) = self.highlights.as_mut() {
5275 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5276 if *parent_capture_end <= self.range.start {
5277 highlights.stack.pop();
5278 } else {
5279 break;
5280 }
5281 }
5282
5283 if highlights.next_capture.is_none() {
5284 highlights.next_capture = highlights.captures.next();
5285 }
5286
5287 while let Some(capture) = highlights.next_capture.as_ref() {
5288 if self.range.start < capture.node.start_byte() {
5289 next_capture_start = capture.node.start_byte();
5290 break;
5291 } else {
5292 let highlight_id =
5293 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5294 highlights
5295 .stack
5296 .push((capture.node.end_byte(), highlight_id));
5297 highlights.next_capture = highlights.captures.next();
5298 }
5299 }
5300 }
5301
5302 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5303 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5304 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5305 if endpoint.offset <= self.range.start {
5306 self.update_diagnostic_depths(endpoint);
5307 diagnostic_endpoints.next();
5308 self.underline = endpoint.underline;
5309 } else {
5310 next_diagnostic_endpoint = endpoint.offset;
5311 break;
5312 }
5313 }
5314 }
5315 self.diagnostic_endpoints = diagnostic_endpoints;
5316
5317 if let Some(ChunkBitmaps {
5318 text: chunk,
5319 chars: chars_map,
5320 tabs,
5321 }) = self.chunks.peek_with_bitmaps()
5322 {
5323 let chunk_start = self.range.start;
5324 let mut chunk_end = (self.chunks.offset() + chunk.len())
5325 .min(next_capture_start)
5326 .min(next_diagnostic_endpoint);
5327 let mut highlight_id = None;
5328 if let Some(highlights) = self.highlights.as_ref()
5329 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5330 {
5331 chunk_end = chunk_end.min(*parent_capture_end);
5332 highlight_id = Some(*parent_highlight_id);
5333 }
5334 let bit_start = chunk_start - self.chunks.offset();
5335 let bit_end = chunk_end - self.chunks.offset();
5336
5337 let slice = &chunk[bit_start..bit_end];
5338
5339 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5340 let tabs = (tabs >> bit_start) & mask;
5341 let chars = (chars_map >> bit_start) & mask;
5342
5343 self.range.start = chunk_end;
5344 if self.range.start == self.chunks.offset() + chunk.len() {
5345 self.chunks.next().unwrap();
5346 }
5347
5348 Some(Chunk {
5349 text: slice,
5350 syntax_highlight_id: highlight_id,
5351 underline: self.underline,
5352 diagnostic_severity: self.current_diagnostic_severity(),
5353 is_unnecessary: self.current_code_is_unnecessary(),
5354 tabs,
5355 chars,
5356 ..Chunk::default()
5357 })
5358 } else {
5359 None
5360 }
5361 }
5362}
5363
5364impl operation_queue::Operation for Operation {
5365 fn lamport_timestamp(&self) -> clock::Lamport {
5366 match self {
5367 Operation::Buffer(_) => {
5368 unreachable!("buffer operations should never be deferred at this layer")
5369 }
5370 Operation::UpdateDiagnostics {
5371 lamport_timestamp, ..
5372 }
5373 | Operation::UpdateSelections {
5374 lamport_timestamp, ..
5375 }
5376 | Operation::UpdateCompletionTriggers {
5377 lamport_timestamp, ..
5378 }
5379 | Operation::UpdateLineEnding {
5380 lamport_timestamp, ..
5381 } => *lamport_timestamp,
5382 }
5383 }
5384}
5385
5386impl Default for Diagnostic {
5387 fn default() -> Self {
5388 Self {
5389 source: Default::default(),
5390 source_kind: DiagnosticSourceKind::Other,
5391 code: None,
5392 code_description: None,
5393 severity: DiagnosticSeverity::ERROR,
5394 message: Default::default(),
5395 markdown: None,
5396 group_id: 0,
5397 is_primary: false,
5398 is_disk_based: false,
5399 is_unnecessary: false,
5400 underline: true,
5401 data: None,
5402 }
5403 }
5404}
5405
5406impl IndentSize {
5407 /// Returns an [`IndentSize`] representing the given spaces.
5408 pub fn spaces(len: u32) -> Self {
5409 Self {
5410 len,
5411 kind: IndentKind::Space,
5412 }
5413 }
5414
5415 /// Returns an [`IndentSize`] representing a tab.
5416 pub fn tab() -> Self {
5417 Self {
5418 len: 1,
5419 kind: IndentKind::Tab,
5420 }
5421 }
5422
5423 /// An iterator over the characters represented by this [`IndentSize`].
5424 pub fn chars(&self) -> impl Iterator<Item = char> {
5425 iter::repeat(self.char()).take(self.len as usize)
5426 }
5427
5428 /// The character representation of this [`IndentSize`].
5429 pub fn char(&self) -> char {
5430 match self.kind {
5431 IndentKind::Space => ' ',
5432 IndentKind::Tab => '\t',
5433 }
5434 }
5435
5436 /// Consumes the current [`IndentSize`] and returns a new one that has
5437 /// been shrunk or enlarged by the given size along the given direction.
5438 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5439 match direction {
5440 Ordering::Less => {
5441 if self.kind == size.kind && self.len >= size.len {
5442 self.len -= size.len;
5443 }
5444 }
5445 Ordering::Equal => {}
5446 Ordering::Greater => {
5447 if self.len == 0 {
5448 self = size;
5449 } else if self.kind == size.kind {
5450 self.len += size.len;
5451 }
5452 }
5453 }
5454 self
5455 }
5456
5457 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5458 match self.kind {
5459 IndentKind::Space => self.len as usize,
5460 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5461 }
5462 }
5463}
5464
5465#[cfg(any(test, feature = "test-support"))]
5466pub struct TestFile {
5467 pub path: Arc<RelPath>,
5468 pub root_name: String,
5469 pub local_root: Option<PathBuf>,
5470}
5471
5472#[cfg(any(test, feature = "test-support"))]
5473impl File for TestFile {
5474 fn path(&self) -> &Arc<RelPath> {
5475 &self.path
5476 }
5477
5478 fn full_path(&self, _: &gpui::App) -> PathBuf {
5479 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5480 }
5481
5482 fn as_local(&self) -> Option<&dyn LocalFile> {
5483 if self.local_root.is_some() {
5484 Some(self)
5485 } else {
5486 None
5487 }
5488 }
5489
5490 fn disk_state(&self) -> DiskState {
5491 unimplemented!()
5492 }
5493
5494 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5495 self.path().file_name().unwrap_or(self.root_name.as_ref())
5496 }
5497
5498 fn worktree_id(&self, _: &App) -> WorktreeId {
5499 WorktreeId::from_usize(0)
5500 }
5501
5502 fn to_proto(&self, _: &App) -> rpc::proto::File {
5503 unimplemented!()
5504 }
5505
5506 fn is_private(&self) -> bool {
5507 false
5508 }
5509
5510 fn path_style(&self, _cx: &App) -> PathStyle {
5511 PathStyle::local()
5512 }
5513}
5514
5515#[cfg(any(test, feature = "test-support"))]
5516impl LocalFile for TestFile {
5517 fn abs_path(&self, _cx: &App) -> PathBuf {
5518 PathBuf::from(self.local_root.as_ref().unwrap())
5519 .join(&self.root_name)
5520 .join(self.path.as_std_path())
5521 }
5522
5523 fn load(&self, _cx: &App) -> Task<Result<String>> {
5524 unimplemented!()
5525 }
5526
5527 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5528 unimplemented!()
5529 }
5530}
5531
5532pub(crate) fn contiguous_ranges(
5533 values: impl Iterator<Item = u32>,
5534 max_len: usize,
5535) -> impl Iterator<Item = Range<u32>> {
5536 let mut values = values;
5537 let mut current_range: Option<Range<u32>> = None;
5538 std::iter::from_fn(move || {
5539 loop {
5540 if let Some(value) = values.next() {
5541 if let Some(range) = &mut current_range
5542 && value == range.end
5543 && range.len() < max_len
5544 {
5545 range.end += 1;
5546 continue;
5547 }
5548
5549 let prev_range = current_range.clone();
5550 current_range = Some(value..(value + 1));
5551 if prev_range.is_some() {
5552 return prev_range;
5553 }
5554 } else {
5555 return current_range.take();
5556 }
5557 }
5558 })
5559}
5560
5561#[derive(Default, Debug)]
5562pub struct CharClassifier {
5563 scope: Option<LanguageScope>,
5564 scope_context: Option<CharScopeContext>,
5565 ignore_punctuation: bool,
5566}
5567
5568impl CharClassifier {
5569 pub fn new(scope: Option<LanguageScope>) -> Self {
5570 Self {
5571 scope,
5572 scope_context: None,
5573 ignore_punctuation: false,
5574 }
5575 }
5576
5577 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5578 Self {
5579 scope_context,
5580 ..self
5581 }
5582 }
5583
5584 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5585 Self {
5586 ignore_punctuation,
5587 ..self
5588 }
5589 }
5590
5591 pub fn is_whitespace(&self, c: char) -> bool {
5592 self.kind(c) == CharKind::Whitespace
5593 }
5594
5595 pub fn is_word(&self, c: char) -> bool {
5596 self.kind(c) == CharKind::Word
5597 }
5598
5599 pub fn is_punctuation(&self, c: char) -> bool {
5600 self.kind(c) == CharKind::Punctuation
5601 }
5602
5603 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5604 if c.is_alphanumeric() || c == '_' {
5605 return CharKind::Word;
5606 }
5607
5608 if let Some(scope) = &self.scope {
5609 let characters = match self.scope_context {
5610 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5611 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5612 None => scope.word_characters(),
5613 };
5614 if let Some(characters) = characters
5615 && characters.contains(&c)
5616 {
5617 return CharKind::Word;
5618 }
5619 }
5620
5621 if c.is_whitespace() {
5622 return CharKind::Whitespace;
5623 }
5624
5625 if ignore_punctuation {
5626 CharKind::Word
5627 } else {
5628 CharKind::Punctuation
5629 }
5630 }
5631
5632 pub fn kind(&self, c: char) -> CharKind {
5633 self.kind_with(c, self.ignore_punctuation)
5634 }
5635}
5636
5637/// Find all of the ranges of whitespace that occur at the ends of lines
5638/// in the given rope.
5639///
5640/// This could also be done with a regex search, but this implementation
5641/// avoids copying text.
5642pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5643 let mut ranges = Vec::new();
5644
5645 let mut offset = 0;
5646 let mut prev_chunk_trailing_whitespace_range = 0..0;
5647 for chunk in rope.chunks() {
5648 let mut prev_line_trailing_whitespace_range = 0..0;
5649 for (i, line) in chunk.split('\n').enumerate() {
5650 let line_end_offset = offset + line.len();
5651 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5652 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5653
5654 if i == 0 && trimmed_line_len == 0 {
5655 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5656 }
5657 if !prev_line_trailing_whitespace_range.is_empty() {
5658 ranges.push(prev_line_trailing_whitespace_range);
5659 }
5660
5661 offset = line_end_offset + 1;
5662 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5663 }
5664
5665 offset -= 1;
5666 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5667 }
5668
5669 if !prev_chunk_trailing_whitespace_range.is_empty() {
5670 ranges.push(prev_chunk_trailing_whitespace_range);
5671 }
5672
5673 ranges
5674}