1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, impl AsRef<str>)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot =
751 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
752
753 let unchanged_range_in_preview_snapshot =
754 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
755 if !unchanged_range_in_preview_snapshot.is_empty() {
756 highlighted_text.add_text_from_buffer_range(
757 unchanged_range_in_preview_snapshot,
758 &self.applied_edits_snapshot,
759 &self.syntax_snapshot,
760 None,
761 syntax_theme,
762 );
763 }
764
765 let range_in_current_snapshot = range.to_offset(current_snapshot);
766 if include_deletions && !range_in_current_snapshot.is_empty() {
767 highlighted_text.add_text_from_buffer_range(
768 range_in_current_snapshot,
769 ¤t_snapshot.text,
770 ¤t_snapshot.syntax,
771 Some(deletion_highlight_style),
772 syntax_theme,
773 );
774 }
775
776 if !edit_text.as_ref().is_empty() {
777 highlighted_text.add_text_from_buffer_range(
778 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
779 &self.applied_edits_snapshot,
780 &self.syntax_snapshot,
781 Some(insertion_highlight_style),
782 syntax_theme,
783 );
784 }
785
786 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
787 }
788
789 highlighted_text.add_text_from_buffer_range(
790 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
791 &self.applied_edits_snapshot,
792 &self.syntax_snapshot,
793 None,
794 syntax_theme,
795 );
796
797 highlighted_text.build()
798 }
799
800 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
801 let (first, _) = edits.first()?;
802 let (last, _) = edits.last()?;
803
804 let start = first
805 .start
806 .bias_left(&self.old_snapshot)
807 .to_point(&self.applied_edits_snapshot);
808 let end = last
809 .end
810 .bias_right(&self.old_snapshot)
811 .to_point(&self.applied_edits_snapshot);
812
813 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
814 let range = Point::new(start.row, 0)
815 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
816
817 Some(range.to_offset(&self.applied_edits_snapshot))
818 }
819}
820
821#[derive(Clone, Debug, PartialEq, Eq)]
822pub struct BracketMatch {
823 pub open_range: Range<usize>,
824 pub close_range: Range<usize>,
825 pub newline_only: bool,
826}
827
828impl Buffer {
829 /// Create a new buffer with the given base text.
830 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
831 Self::build(
832 TextBuffer::new(
833 ReplicaId::LOCAL,
834 cx.entity_id().as_non_zero_u64().into(),
835 base_text.into(),
836 ),
837 None,
838 Capability::ReadWrite,
839 )
840 }
841
842 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
843 pub fn local_normalized(
844 base_text_normalized: Rope,
845 line_ending: LineEnding,
846 cx: &Context<Self>,
847 ) -> Self {
848 Self::build(
849 TextBuffer::new_normalized(
850 ReplicaId::LOCAL,
851 cx.entity_id().as_non_zero_u64().into(),
852 line_ending,
853 base_text_normalized,
854 ),
855 None,
856 Capability::ReadWrite,
857 )
858 }
859
860 /// Create a new buffer that is a replica of a remote buffer.
861 pub fn remote(
862 remote_id: BufferId,
863 replica_id: ReplicaId,
864 capability: Capability,
865 base_text: impl Into<String>,
866 ) -> Self {
867 Self::build(
868 TextBuffer::new(replica_id, remote_id, base_text.into()),
869 None,
870 capability,
871 )
872 }
873
874 /// Create a new buffer that is a replica of a remote buffer, populating its
875 /// state from the given protobuf message.
876 pub fn from_proto(
877 replica_id: ReplicaId,
878 capability: Capability,
879 message: proto::BufferState,
880 file: Option<Arc<dyn File>>,
881 ) -> Result<Self> {
882 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
883 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
884 let mut this = Self::build(buffer, file, capability);
885 this.text.set_line_ending(proto::deserialize_line_ending(
886 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
887 ));
888 this.saved_version = proto::deserialize_version(&message.saved_version);
889 this.saved_mtime = message.saved_mtime.map(|time| time.into());
890 Ok(this)
891 }
892
893 /// Serialize the buffer's state to a protobuf message.
894 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
895 proto::BufferState {
896 id: self.remote_id().into(),
897 file: self.file.as_ref().map(|f| f.to_proto(cx)),
898 base_text: self.base_text().to_string(),
899 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
900 saved_version: proto::serialize_version(&self.saved_version),
901 saved_mtime: self.saved_mtime.map(|time| time.into()),
902 }
903 }
904
905 /// Serialize as protobufs all of the changes to the buffer since the given version.
906 pub fn serialize_ops(
907 &self,
908 since: Option<clock::Global>,
909 cx: &App,
910 ) -> Task<Vec<proto::Operation>> {
911 let mut operations = Vec::new();
912 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
913
914 operations.extend(self.remote_selections.iter().map(|(_, set)| {
915 proto::serialize_operation(&Operation::UpdateSelections {
916 selections: set.selections.clone(),
917 lamport_timestamp: set.lamport_timestamp,
918 line_mode: set.line_mode,
919 cursor_shape: set.cursor_shape,
920 })
921 }));
922
923 for (server_id, diagnostics) in &self.diagnostics {
924 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
925 lamport_timestamp: self.diagnostics_timestamp,
926 server_id: *server_id,
927 diagnostics: diagnostics.iter().cloned().collect(),
928 }));
929 }
930
931 for (server_id, completions) in &self.completion_triggers_per_language_server {
932 operations.push(proto::serialize_operation(
933 &Operation::UpdateCompletionTriggers {
934 triggers: completions.iter().cloned().collect(),
935 lamport_timestamp: self.completion_triggers_timestamp,
936 server_id: *server_id,
937 },
938 ));
939 }
940
941 let text_operations = self.text.operations().clone();
942 cx.background_spawn(async move {
943 let since = since.unwrap_or_default();
944 operations.extend(
945 text_operations
946 .iter()
947 .filter(|(_, op)| !since.observed(op.timestamp()))
948 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
949 );
950 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
951 operations
952 })
953 }
954
955 /// Assign a language to the buffer, returning the buffer.
956 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
957 self.set_language(Some(language), cx);
958 self
959 }
960
961 /// Returns the [`Capability`] of this buffer.
962 pub fn capability(&self) -> Capability {
963 self.capability
964 }
965
966 /// Whether this buffer can only be read.
967 pub fn read_only(&self) -> bool {
968 self.capability == Capability::ReadOnly
969 }
970
971 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
972 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
973 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
974 let snapshot = buffer.snapshot();
975 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
976 Self {
977 saved_mtime,
978 saved_version: buffer.version(),
979 preview_version: buffer.version(),
980 reload_task: None,
981 transaction_depth: 0,
982 was_dirty_before_starting_transaction: None,
983 has_unsaved_edits: Cell::new((buffer.version(), false)),
984 text: buffer,
985 branch_state: None,
986 file,
987 capability,
988 syntax_map,
989 reparse: None,
990 non_text_state_update_count: 0,
991 sync_parse_timeout: Duration::from_millis(1),
992 parse_status: watch::channel(ParseStatus::Idle),
993 autoindent_requests: Default::default(),
994 wait_for_autoindent_txs: Default::default(),
995 pending_autoindent: Default::default(),
996 language: None,
997 remote_selections: Default::default(),
998 diagnostics: Default::default(),
999 diagnostics_timestamp: Lamport::MIN,
1000 completion_triggers: Default::default(),
1001 completion_triggers_per_language_server: Default::default(),
1002 completion_triggers_timestamp: Lamport::MIN,
1003 deferred_ops: OperationQueue::new(),
1004 has_conflict: false,
1005 change_bits: Default::default(),
1006 _subscriptions: Vec::new(),
1007 }
1008 }
1009
1010 pub fn build_snapshot(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> impl Future<Output = BufferSnapshot> + use<> {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 async move {
1019 let text =
1020 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1021 .snapshot();
1022 let mut syntax = SyntaxMap::new(&text).snapshot();
1023 if let Some(language) = language.clone() {
1024 let language_registry = language_registry.clone();
1025 syntax.reparse(&text, language_registry, language);
1026 }
1027 BufferSnapshot {
1028 text,
1029 syntax,
1030 file: None,
1031 diagnostics: Default::default(),
1032 remote_selections: Default::default(),
1033 language,
1034 non_text_state_update_count: 0,
1035 }
1036 }
1037 }
1038
1039 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1040 let entity_id = cx.reserve_entity::<Self>().entity_id();
1041 let buffer_id = entity_id.as_non_zero_u64().into();
1042 let text = TextBuffer::new_normalized(
1043 ReplicaId::LOCAL,
1044 buffer_id,
1045 Default::default(),
1046 Rope::new(),
1047 )
1048 .snapshot();
1049 let syntax = SyntaxMap::new(&text).snapshot();
1050 BufferSnapshot {
1051 text,
1052 syntax,
1053 file: None,
1054 diagnostics: Default::default(),
1055 remote_selections: Default::default(),
1056 language: None,
1057 non_text_state_update_count: 0,
1058 }
1059 }
1060
1061 #[cfg(any(test, feature = "test-support"))]
1062 pub fn build_snapshot_sync(
1063 text: Rope,
1064 language: Option<Arc<Language>>,
1065 language_registry: Option<Arc<LanguageRegistry>>,
1066 cx: &mut App,
1067 ) -> BufferSnapshot {
1068 let entity_id = cx.reserve_entity::<Self>().entity_id();
1069 let buffer_id = entity_id.as_non_zero_u64().into();
1070 let text =
1071 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1072 .snapshot();
1073 let mut syntax = SyntaxMap::new(&text).snapshot();
1074 if let Some(language) = language.clone() {
1075 syntax.reparse(&text, language_registry, language);
1076 }
1077 BufferSnapshot {
1078 text,
1079 syntax,
1080 file: None,
1081 diagnostics: Default::default(),
1082 remote_selections: Default::default(),
1083 language,
1084 non_text_state_update_count: 0,
1085 }
1086 }
1087
1088 /// Retrieve a snapshot of the buffer's current state. This is computationally
1089 /// cheap, and allows reading from the buffer on a background thread.
1090 pub fn snapshot(&self) -> BufferSnapshot {
1091 let text = self.text.snapshot();
1092 let mut syntax_map = self.syntax_map.lock();
1093 syntax_map.interpolate(&text);
1094 let syntax = syntax_map.snapshot();
1095
1096 BufferSnapshot {
1097 text,
1098 syntax,
1099 file: self.file.clone(),
1100 remote_selections: self.remote_selections.clone(),
1101 diagnostics: self.diagnostics.clone(),
1102 language: self.language.clone(),
1103 non_text_state_update_count: self.non_text_state_update_count,
1104 }
1105 }
1106
1107 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1108 let this = cx.entity();
1109 cx.new(|cx| {
1110 let mut branch = Self {
1111 branch_state: Some(BufferBranchState {
1112 base_buffer: this.clone(),
1113 merged_operations: Default::default(),
1114 }),
1115 language: self.language.clone(),
1116 has_conflict: self.has_conflict,
1117 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1118 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1119 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1120 };
1121 if let Some(language_registry) = self.language_registry() {
1122 branch.set_language_registry(language_registry);
1123 }
1124
1125 // Reparse the branch buffer so that we get syntax highlighting immediately.
1126 branch.reparse(cx);
1127
1128 branch
1129 })
1130 }
1131
1132 pub fn preview_edits(
1133 &self,
1134 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1135 cx: &App,
1136 ) -> Task<EditPreview> {
1137 let registry = self.language_registry();
1138 let language = self.language().cloned();
1139 let old_snapshot = self.text.snapshot();
1140 let mut branch_buffer = self.text.branch();
1141 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1142 cx.background_spawn(async move {
1143 if !edits.is_empty() {
1144 if let Some(language) = language.clone() {
1145 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1146 }
1147
1148 branch_buffer.edit(edits.iter().cloned());
1149 let snapshot = branch_buffer.snapshot();
1150 syntax_snapshot.interpolate(&snapshot);
1151
1152 if let Some(language) = language {
1153 syntax_snapshot.reparse(&snapshot, registry, language);
1154 }
1155 }
1156 EditPreview {
1157 old_snapshot,
1158 applied_edits_snapshot: branch_buffer.snapshot(),
1159 syntax_snapshot,
1160 }
1161 })
1162 }
1163
1164 /// Applies all of the changes in this buffer that intersect any of the
1165 /// given `ranges` to its base buffer.
1166 ///
1167 /// If `ranges` is empty, then all changes will be applied. This buffer must
1168 /// be a branch buffer to call this method.
1169 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1170 let Some(base_buffer) = self.base_buffer() else {
1171 debug_panic!("not a branch buffer");
1172 return;
1173 };
1174
1175 let mut ranges = if ranges.is_empty() {
1176 &[0..usize::MAX]
1177 } else {
1178 ranges.as_slice()
1179 }
1180 .iter()
1181 .peekable();
1182
1183 let mut edits = Vec::new();
1184 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1185 let mut is_included = false;
1186 while let Some(range) = ranges.peek() {
1187 if range.end < edit.new.start {
1188 ranges.next().unwrap();
1189 } else {
1190 if range.start <= edit.new.end {
1191 is_included = true;
1192 }
1193 break;
1194 }
1195 }
1196
1197 if is_included {
1198 edits.push((
1199 edit.old.clone(),
1200 self.text_for_range(edit.new.clone()).collect::<String>(),
1201 ));
1202 }
1203 }
1204
1205 let operation = base_buffer.update(cx, |base_buffer, cx| {
1206 // cx.emit(BufferEvent::DiffBaseChanged);
1207 base_buffer.edit(edits, None, cx)
1208 });
1209
1210 if let Some(operation) = operation
1211 && let Some(BufferBranchState {
1212 merged_operations, ..
1213 }) = &mut self.branch_state
1214 {
1215 merged_operations.push(operation);
1216 }
1217 }
1218
1219 fn on_base_buffer_event(
1220 &mut self,
1221 _: Entity<Buffer>,
1222 event: &BufferEvent,
1223 cx: &mut Context<Self>,
1224 ) {
1225 let BufferEvent::Operation { operation, .. } = event else {
1226 return;
1227 };
1228 let Some(BufferBranchState {
1229 merged_operations, ..
1230 }) = &mut self.branch_state
1231 else {
1232 return;
1233 };
1234
1235 let mut operation_to_undo = None;
1236 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1237 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1238 {
1239 merged_operations.remove(ix);
1240 operation_to_undo = Some(operation.timestamp);
1241 }
1242
1243 self.apply_ops([operation.clone()], cx);
1244
1245 if let Some(timestamp) = operation_to_undo {
1246 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1247 self.undo_operations(counts, cx);
1248 }
1249 }
1250
1251 #[cfg(test)]
1252 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1253 &self.text
1254 }
1255
1256 /// Retrieve a snapshot of the buffer's raw text, without any
1257 /// language-related state like the syntax tree or diagnostics.
1258 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1259 self.text.snapshot()
1260 }
1261
1262 /// The file associated with the buffer, if any.
1263 pub fn file(&self) -> Option<&Arc<dyn File>> {
1264 self.file.as_ref()
1265 }
1266
1267 /// The version of the buffer that was last saved or reloaded from disk.
1268 pub fn saved_version(&self) -> &clock::Global {
1269 &self.saved_version
1270 }
1271
1272 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1273 pub fn saved_mtime(&self) -> Option<MTime> {
1274 self.saved_mtime
1275 }
1276
1277 /// Assign a language to the buffer.
1278 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1279 self.non_text_state_update_count += 1;
1280 self.syntax_map.lock().clear(&self.text);
1281 self.language = language;
1282 self.was_changed();
1283 self.reparse(cx);
1284 cx.emit(BufferEvent::LanguageChanged);
1285 }
1286
1287 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1288 /// other languages if parts of the buffer are written in different languages.
1289 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1290 self.syntax_map
1291 .lock()
1292 .set_language_registry(language_registry);
1293 }
1294
1295 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1296 self.syntax_map.lock().language_registry()
1297 }
1298
1299 /// Assign the line ending type to the buffer.
1300 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1301 self.text.set_line_ending(line_ending);
1302
1303 let lamport_timestamp = self.text.lamport_clock.tick();
1304 self.send_operation(
1305 Operation::UpdateLineEnding {
1306 line_ending,
1307 lamport_timestamp,
1308 },
1309 true,
1310 cx,
1311 );
1312 }
1313
1314 /// Assign the buffer a new [`Capability`].
1315 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1316 if self.capability != capability {
1317 self.capability = capability;
1318 cx.emit(BufferEvent::CapabilityChanged)
1319 }
1320 }
1321
1322 /// This method is called to signal that the buffer has been saved.
1323 pub fn did_save(
1324 &mut self,
1325 version: clock::Global,
1326 mtime: Option<MTime>,
1327 cx: &mut Context<Self>,
1328 ) {
1329 self.saved_version = version.clone();
1330 self.has_unsaved_edits.set((version, false));
1331 self.has_conflict = false;
1332 self.saved_mtime = mtime;
1333 self.was_changed();
1334 cx.emit(BufferEvent::Saved);
1335 cx.notify();
1336 }
1337
1338 /// Reloads the contents of the buffer from disk.
1339 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1340 let (tx, rx) = futures::channel::oneshot::channel();
1341 let prev_version = self.text.version();
1342 self.reload_task = Some(cx.spawn(async move |this, cx| {
1343 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1344 let file = this.file.as_ref()?.as_local()?;
1345
1346 Some((file.disk_state().mtime(), file.load(cx)))
1347 })?
1348 else {
1349 return Ok(());
1350 };
1351
1352 let new_text = new_text.await?;
1353 let diff = this
1354 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1355 .await;
1356 this.update(cx, |this, cx| {
1357 if this.version() == diff.base_version {
1358 this.finalize_last_transaction();
1359 this.apply_diff(diff, cx);
1360 tx.send(this.finalize_last_transaction().cloned()).ok();
1361 this.has_conflict = false;
1362 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1363 } else {
1364 if !diff.edits.is_empty()
1365 || this
1366 .edits_since::<usize>(&diff.base_version)
1367 .next()
1368 .is_some()
1369 {
1370 this.has_conflict = true;
1371 }
1372
1373 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1374 }
1375
1376 this.reload_task.take();
1377 })
1378 }));
1379 rx
1380 }
1381
1382 /// This method is called to signal that the buffer has been reloaded.
1383 pub fn did_reload(
1384 &mut self,
1385 version: clock::Global,
1386 line_ending: LineEnding,
1387 mtime: Option<MTime>,
1388 cx: &mut Context<Self>,
1389 ) {
1390 self.saved_version = version;
1391 self.has_unsaved_edits
1392 .set((self.saved_version.clone(), false));
1393 self.text.set_line_ending(line_ending);
1394 self.saved_mtime = mtime;
1395 cx.emit(BufferEvent::Reloaded);
1396 cx.notify();
1397 }
1398
1399 /// Updates the [`File`] backing this buffer. This should be called when
1400 /// the file has changed or has been deleted.
1401 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1402 let was_dirty = self.is_dirty();
1403 let mut file_changed = false;
1404
1405 if let Some(old_file) = self.file.as_ref() {
1406 if new_file.path() != old_file.path() {
1407 file_changed = true;
1408 }
1409
1410 let old_state = old_file.disk_state();
1411 let new_state = new_file.disk_state();
1412 if old_state != new_state {
1413 file_changed = true;
1414 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1415 cx.emit(BufferEvent::ReloadNeeded)
1416 }
1417 }
1418 } else {
1419 file_changed = true;
1420 };
1421
1422 self.file = Some(new_file);
1423 if file_changed {
1424 self.was_changed();
1425 self.non_text_state_update_count += 1;
1426 if was_dirty != self.is_dirty() {
1427 cx.emit(BufferEvent::DirtyChanged);
1428 }
1429 cx.emit(BufferEvent::FileHandleChanged);
1430 cx.notify();
1431 }
1432 }
1433
1434 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1435 Some(self.branch_state.as_ref()?.base_buffer.clone())
1436 }
1437
1438 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1439 pub fn language(&self) -> Option<&Arc<Language>> {
1440 self.language.as_ref()
1441 }
1442
1443 /// Returns the [`Language`] at the given location.
1444 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1445 let offset = position.to_offset(self);
1446 let mut is_first = true;
1447 let start_anchor = self.anchor_before(offset);
1448 let end_anchor = self.anchor_after(offset);
1449 self.syntax_map
1450 .lock()
1451 .layers_for_range(offset..offset, &self.text, false)
1452 .filter(|layer| {
1453 if is_first {
1454 is_first = false;
1455 return true;
1456 }
1457
1458 layer
1459 .included_sub_ranges
1460 .map(|sub_ranges| {
1461 sub_ranges.iter().any(|sub_range| {
1462 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1463 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1464 !is_before_start && !is_after_end
1465 })
1466 })
1467 .unwrap_or(true)
1468 })
1469 .last()
1470 .map(|info| info.language.clone())
1471 .or_else(|| self.language.clone())
1472 }
1473
1474 /// Returns each [`Language`] for the active syntax layers at the given location.
1475 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1476 let offset = position.to_offset(self);
1477 let mut languages: Vec<Arc<Language>> = self
1478 .syntax_map
1479 .lock()
1480 .layers_for_range(offset..offset, &self.text, false)
1481 .map(|info| info.language.clone())
1482 .collect();
1483
1484 if languages.is_empty()
1485 && let Some(buffer_language) = self.language()
1486 {
1487 languages.push(buffer_language.clone());
1488 }
1489
1490 languages
1491 }
1492
1493 /// An integer version number that accounts for all updates besides
1494 /// the buffer's text itself (which is versioned via a version vector).
1495 pub fn non_text_state_update_count(&self) -> usize {
1496 self.non_text_state_update_count
1497 }
1498
1499 /// Whether the buffer is being parsed in the background.
1500 #[cfg(any(test, feature = "test-support"))]
1501 pub fn is_parsing(&self) -> bool {
1502 self.reparse.is_some()
1503 }
1504
1505 /// Indicates whether the buffer contains any regions that may be
1506 /// written in a language that hasn't been loaded yet.
1507 pub fn contains_unknown_injections(&self) -> bool {
1508 self.syntax_map.lock().contains_unknown_injections()
1509 }
1510
1511 #[cfg(any(test, feature = "test-support"))]
1512 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1513 self.sync_parse_timeout = timeout;
1514 }
1515
1516 /// Called after an edit to synchronize the buffer's main parse tree with
1517 /// the buffer's new underlying state.
1518 ///
1519 /// Locks the syntax map and interpolates the edits since the last reparse
1520 /// into the foreground syntax tree.
1521 ///
1522 /// Then takes a stable snapshot of the syntax map before unlocking it.
1523 /// The snapshot with the interpolated edits is sent to a background thread,
1524 /// where we ask Tree-sitter to perform an incremental parse.
1525 ///
1526 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1527 /// waiting on the parse to complete. As soon as it completes, we proceed
1528 /// synchronously, unless a 1ms timeout elapses.
1529 ///
1530 /// If we time out waiting on the parse, we spawn a second task waiting
1531 /// until the parse does complete and return with the interpolated tree still
1532 /// in the foreground. When the background parse completes, call back into
1533 /// the main thread and assign the foreground parse state.
1534 ///
1535 /// If the buffer or grammar changed since the start of the background parse,
1536 /// initiate an additional reparse recursively. To avoid concurrent parses
1537 /// for the same buffer, we only initiate a new parse if we are not already
1538 /// parsing in the background.
1539 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1540 if self.reparse.is_some() {
1541 return;
1542 }
1543 let language = if let Some(language) = self.language.clone() {
1544 language
1545 } else {
1546 return;
1547 };
1548
1549 let text = self.text_snapshot();
1550 let parsed_version = self.version();
1551
1552 let mut syntax_map = self.syntax_map.lock();
1553 syntax_map.interpolate(&text);
1554 let language_registry = syntax_map.language_registry();
1555 let mut syntax_snapshot = syntax_map.snapshot();
1556 drop(syntax_map);
1557
1558 let parse_task = cx.background_spawn({
1559 let language = language.clone();
1560 let language_registry = language_registry.clone();
1561 async move {
1562 syntax_snapshot.reparse(&text, language_registry, language);
1563 syntax_snapshot
1564 }
1565 });
1566
1567 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1568 match cx
1569 .background_executor()
1570 .block_with_timeout(self.sync_parse_timeout, parse_task)
1571 {
1572 Ok(new_syntax_snapshot) => {
1573 self.did_finish_parsing(new_syntax_snapshot, cx);
1574 self.reparse = None;
1575 }
1576 Err(parse_task) => {
1577 // todo(lw): hot foreground spawn
1578 self.reparse = Some(cx.spawn(async move |this, cx| {
1579 let new_syntax_map = cx.background_spawn(parse_task).await;
1580 this.update(cx, move |this, cx| {
1581 let grammar_changed = || {
1582 this.language.as_ref().is_none_or(|current_language| {
1583 !Arc::ptr_eq(&language, current_language)
1584 })
1585 };
1586 let language_registry_changed = || {
1587 new_syntax_map.contains_unknown_injections()
1588 && language_registry.is_some_and(|registry| {
1589 registry.version() != new_syntax_map.language_registry_version()
1590 })
1591 };
1592 let parse_again = this.version.changed_since(&parsed_version)
1593 || language_registry_changed()
1594 || grammar_changed();
1595 this.did_finish_parsing(new_syntax_map, cx);
1596 this.reparse = None;
1597 if parse_again {
1598 this.reparse(cx);
1599 }
1600 })
1601 .ok();
1602 }));
1603 }
1604 }
1605 }
1606
1607 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1608 self.was_changed();
1609 self.non_text_state_update_count += 1;
1610 self.syntax_map.lock().did_parse(syntax_snapshot);
1611 self.request_autoindent(cx);
1612 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1613 cx.emit(BufferEvent::Reparsed);
1614 cx.notify();
1615 }
1616
1617 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1618 self.parse_status.1.clone()
1619 }
1620
1621 /// Wait until the buffer is no longer parsing
1622 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1623 let mut parse_status = self.parse_status();
1624 async move {
1625 while *parse_status.borrow() != ParseStatus::Idle {
1626 if parse_status.changed().await.is_err() {
1627 break;
1628 }
1629 }
1630 }
1631 }
1632
1633 /// Assign to the buffer a set of diagnostics created by a given language server.
1634 pub fn update_diagnostics(
1635 &mut self,
1636 server_id: LanguageServerId,
1637 diagnostics: DiagnosticSet,
1638 cx: &mut Context<Self>,
1639 ) {
1640 let lamport_timestamp = self.text.lamport_clock.tick();
1641 let op = Operation::UpdateDiagnostics {
1642 server_id,
1643 diagnostics: diagnostics.iter().cloned().collect(),
1644 lamport_timestamp,
1645 };
1646
1647 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1648 self.send_operation(op, true, cx);
1649 }
1650
1651 pub fn buffer_diagnostics(
1652 &self,
1653 for_server: Option<LanguageServerId>,
1654 ) -> Vec<&DiagnosticEntry<Anchor>> {
1655 match for_server {
1656 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1657 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1658 Err(_) => Vec::new(),
1659 },
1660 None => self
1661 .diagnostics
1662 .iter()
1663 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1664 .collect(),
1665 }
1666 }
1667
1668 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1669 if let Some(indent_sizes) = self.compute_autoindents() {
1670 let indent_sizes = cx.background_spawn(indent_sizes);
1671 match cx
1672 .background_executor()
1673 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1674 {
1675 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1676 Err(indent_sizes) => {
1677 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1678 let indent_sizes = indent_sizes.await;
1679 this.update(cx, |this, cx| {
1680 this.apply_autoindents(indent_sizes, cx);
1681 })
1682 .ok();
1683 }));
1684 }
1685 }
1686 } else {
1687 self.autoindent_requests.clear();
1688 for tx in self.wait_for_autoindent_txs.drain(..) {
1689 tx.send(()).ok();
1690 }
1691 }
1692 }
1693
1694 fn compute_autoindents(
1695 &self,
1696 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1697 let max_rows_between_yields = 100;
1698 let snapshot = self.snapshot();
1699 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1700 return None;
1701 }
1702
1703 let autoindent_requests = self.autoindent_requests.clone();
1704 Some(async move {
1705 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1706 for request in autoindent_requests {
1707 // Resolve each edited range to its row in the current buffer and in the
1708 // buffer before this batch of edits.
1709 let mut row_ranges = Vec::new();
1710 let mut old_to_new_rows = BTreeMap::new();
1711 let mut language_indent_sizes_by_new_row = Vec::new();
1712 for entry in &request.entries {
1713 let position = entry.range.start;
1714 let new_row = position.to_point(&snapshot).row;
1715 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1716 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1717
1718 if !entry.first_line_is_new {
1719 let old_row = position.to_point(&request.before_edit).row;
1720 old_to_new_rows.insert(old_row, new_row);
1721 }
1722 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1723 }
1724
1725 // Build a map containing the suggested indentation for each of the edited lines
1726 // with respect to the state of the buffer before these edits. This map is keyed
1727 // by the rows for these lines in the current state of the buffer.
1728 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1729 let old_edited_ranges =
1730 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1731 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1732 let mut language_indent_size = IndentSize::default();
1733 for old_edited_range in old_edited_ranges {
1734 let suggestions = request
1735 .before_edit
1736 .suggest_autoindents(old_edited_range.clone())
1737 .into_iter()
1738 .flatten();
1739 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1740 if let Some(suggestion) = suggestion {
1741 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1742
1743 // Find the indent size based on the language for this row.
1744 while let Some((row, size)) = language_indent_sizes.peek() {
1745 if *row > new_row {
1746 break;
1747 }
1748 language_indent_size = *size;
1749 language_indent_sizes.next();
1750 }
1751
1752 let suggested_indent = old_to_new_rows
1753 .get(&suggestion.basis_row)
1754 .and_then(|from_row| {
1755 Some(old_suggestions.get(from_row).copied()?.0)
1756 })
1757 .unwrap_or_else(|| {
1758 request
1759 .before_edit
1760 .indent_size_for_line(suggestion.basis_row)
1761 })
1762 .with_delta(suggestion.delta, language_indent_size);
1763 old_suggestions
1764 .insert(new_row, (suggested_indent, suggestion.within_error));
1765 }
1766 }
1767 yield_now().await;
1768 }
1769
1770 // Compute new suggestions for each line, but only include them in the result
1771 // if they differ from the old suggestion for that line.
1772 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1773 let mut language_indent_size = IndentSize::default();
1774 for (row_range, original_indent_column) in row_ranges {
1775 let new_edited_row_range = if request.is_block_mode {
1776 row_range.start..row_range.start + 1
1777 } else {
1778 row_range.clone()
1779 };
1780
1781 let suggestions = snapshot
1782 .suggest_autoindents(new_edited_row_range.clone())
1783 .into_iter()
1784 .flatten();
1785 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1786 if let Some(suggestion) = suggestion {
1787 // Find the indent size based on the language for this row.
1788 while let Some((row, size)) = language_indent_sizes.peek() {
1789 if *row > new_row {
1790 break;
1791 }
1792 language_indent_size = *size;
1793 language_indent_sizes.next();
1794 }
1795
1796 let suggested_indent = indent_sizes
1797 .get(&suggestion.basis_row)
1798 .copied()
1799 .map(|e| e.0)
1800 .unwrap_or_else(|| {
1801 snapshot.indent_size_for_line(suggestion.basis_row)
1802 })
1803 .with_delta(suggestion.delta, language_indent_size);
1804
1805 if old_suggestions.get(&new_row).is_none_or(
1806 |(old_indentation, was_within_error)| {
1807 suggested_indent != *old_indentation
1808 && (!suggestion.within_error || *was_within_error)
1809 },
1810 ) {
1811 indent_sizes.insert(
1812 new_row,
1813 (suggested_indent, request.ignore_empty_lines),
1814 );
1815 }
1816 }
1817 }
1818
1819 if let (true, Some(original_indent_column)) =
1820 (request.is_block_mode, original_indent_column)
1821 {
1822 let new_indent =
1823 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1824 *indent
1825 } else {
1826 snapshot.indent_size_for_line(row_range.start)
1827 };
1828 let delta = new_indent.len as i64 - original_indent_column as i64;
1829 if delta != 0 {
1830 for row in row_range.skip(1) {
1831 indent_sizes.entry(row).or_insert_with(|| {
1832 let mut size = snapshot.indent_size_for_line(row);
1833 if size.kind == new_indent.kind {
1834 match delta.cmp(&0) {
1835 Ordering::Greater => size.len += delta as u32,
1836 Ordering::Less => {
1837 size.len = size.len.saturating_sub(-delta as u32)
1838 }
1839 Ordering::Equal => {}
1840 }
1841 }
1842 (size, request.ignore_empty_lines)
1843 });
1844 }
1845 }
1846 }
1847
1848 yield_now().await;
1849 }
1850 }
1851
1852 indent_sizes
1853 .into_iter()
1854 .filter_map(|(row, (indent, ignore_empty_lines))| {
1855 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1856 None
1857 } else {
1858 Some((row, indent))
1859 }
1860 })
1861 .collect()
1862 })
1863 }
1864
1865 fn apply_autoindents(
1866 &mut self,
1867 indent_sizes: BTreeMap<u32, IndentSize>,
1868 cx: &mut Context<Self>,
1869 ) {
1870 self.autoindent_requests.clear();
1871 for tx in self.wait_for_autoindent_txs.drain(..) {
1872 tx.send(()).ok();
1873 }
1874
1875 let edits: Vec<_> = indent_sizes
1876 .into_iter()
1877 .filter_map(|(row, indent_size)| {
1878 let current_size = indent_size_for_line(self, row);
1879 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1880 })
1881 .collect();
1882
1883 let preserve_preview = self.preserve_preview();
1884 self.edit(edits, None, cx);
1885 if preserve_preview {
1886 self.refresh_preview();
1887 }
1888 }
1889
1890 /// Create a minimal edit that will cause the given row to be indented
1891 /// with the given size. After applying this edit, the length of the line
1892 /// will always be at least `new_size.len`.
1893 pub fn edit_for_indent_size_adjustment(
1894 row: u32,
1895 current_size: IndentSize,
1896 new_size: IndentSize,
1897 ) -> Option<(Range<Point>, String)> {
1898 if new_size.kind == current_size.kind {
1899 match new_size.len.cmp(¤t_size.len) {
1900 Ordering::Greater => {
1901 let point = Point::new(row, 0);
1902 Some((
1903 point..point,
1904 iter::repeat(new_size.char())
1905 .take((new_size.len - current_size.len) as usize)
1906 .collect::<String>(),
1907 ))
1908 }
1909
1910 Ordering::Less => Some((
1911 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1912 String::new(),
1913 )),
1914
1915 Ordering::Equal => None,
1916 }
1917 } else {
1918 Some((
1919 Point::new(row, 0)..Point::new(row, current_size.len),
1920 iter::repeat(new_size.char())
1921 .take(new_size.len as usize)
1922 .collect::<String>(),
1923 ))
1924 }
1925 }
1926
1927 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1928 /// and the given new text.
1929 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1930 let old_text = self.as_rope().clone();
1931 let base_version = self.version();
1932 cx.background_executor()
1933 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1934 let old_text = old_text.to_string();
1935 let line_ending = LineEnding::detect(&new_text);
1936 LineEnding::normalize(&mut new_text);
1937 let edits = text_diff(&old_text, &new_text);
1938 Diff {
1939 base_version,
1940 line_ending,
1941 edits,
1942 }
1943 })
1944 }
1945
1946 /// Spawns a background task that searches the buffer for any whitespace
1947 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1948 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1949 let old_text = self.as_rope().clone();
1950 let line_ending = self.line_ending();
1951 let base_version = self.version();
1952 cx.background_spawn(async move {
1953 let ranges = trailing_whitespace_ranges(&old_text);
1954 let empty = Arc::<str>::from("");
1955 Diff {
1956 base_version,
1957 line_ending,
1958 edits: ranges
1959 .into_iter()
1960 .map(|range| (range, empty.clone()))
1961 .collect(),
1962 }
1963 })
1964 }
1965
1966 /// Ensures that the buffer ends with a single newline character, and
1967 /// no other whitespace. Skips if the buffer is empty.
1968 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1969 let len = self.len();
1970 if len == 0 {
1971 return;
1972 }
1973 let mut offset = len;
1974 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1975 let non_whitespace_len = chunk
1976 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1977 .len();
1978 offset -= chunk.len();
1979 offset += non_whitespace_len;
1980 if non_whitespace_len != 0 {
1981 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1982 return;
1983 }
1984 break;
1985 }
1986 }
1987 self.edit([(offset..len, "\n")], None, cx);
1988 }
1989
1990 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1991 /// calculated, then adjust the diff to account for those changes, and discard any
1992 /// parts of the diff that conflict with those changes.
1993 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1994 let snapshot = self.snapshot();
1995 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1996 let mut delta = 0;
1997 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1998 while let Some(edit_since) = edits_since.peek() {
1999 // If the edit occurs after a diff hunk, then it does not
2000 // affect that hunk.
2001 if edit_since.old.start > range.end {
2002 break;
2003 }
2004 // If the edit precedes the diff hunk, then adjust the hunk
2005 // to reflect the edit.
2006 else if edit_since.old.end < range.start {
2007 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2008 edits_since.next();
2009 }
2010 // If the edit intersects a diff hunk, then discard that hunk.
2011 else {
2012 return None;
2013 }
2014 }
2015
2016 let start = (range.start as i64 + delta) as usize;
2017 let end = (range.end as i64 + delta) as usize;
2018 Some((start..end, new_text))
2019 });
2020
2021 self.start_transaction();
2022 self.text.set_line_ending(diff.line_ending);
2023 self.edit(adjusted_edits, None, cx);
2024 self.end_transaction(cx)
2025 }
2026
2027 pub fn has_unsaved_edits(&self) -> bool {
2028 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2029
2030 if last_version == self.version {
2031 self.has_unsaved_edits
2032 .set((last_version, has_unsaved_edits));
2033 return has_unsaved_edits;
2034 }
2035
2036 let has_edits = self.has_edits_since(&self.saved_version);
2037 self.has_unsaved_edits
2038 .set((self.version.clone(), has_edits));
2039 has_edits
2040 }
2041
2042 /// Checks if the buffer has unsaved changes.
2043 pub fn is_dirty(&self) -> bool {
2044 if self.capability == Capability::ReadOnly {
2045 return false;
2046 }
2047 if self.has_conflict {
2048 return true;
2049 }
2050 match self.file.as_ref().map(|f| f.disk_state()) {
2051 Some(DiskState::New) | Some(DiskState::Deleted) => {
2052 !self.is_empty() && self.has_unsaved_edits()
2053 }
2054 _ => self.has_unsaved_edits(),
2055 }
2056 }
2057
2058 /// Checks if the buffer and its file have both changed since the buffer
2059 /// was last saved or reloaded.
2060 pub fn has_conflict(&self) -> bool {
2061 if self.has_conflict {
2062 return true;
2063 }
2064 let Some(file) = self.file.as_ref() else {
2065 return false;
2066 };
2067 match file.disk_state() {
2068 DiskState::New => false,
2069 DiskState::Present { mtime } => match self.saved_mtime {
2070 Some(saved_mtime) => {
2071 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2072 }
2073 None => true,
2074 },
2075 DiskState::Deleted => false,
2076 }
2077 }
2078
2079 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2080 pub fn subscribe(&mut self) -> Subscription {
2081 self.text.subscribe()
2082 }
2083
2084 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2085 ///
2086 /// This allows downstream code to check if the buffer's text has changed without
2087 /// waiting for an effect cycle, which would be required if using eents.
2088 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2089 if let Err(ix) = self
2090 .change_bits
2091 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2092 {
2093 self.change_bits.insert(ix, bit);
2094 }
2095 }
2096
2097 /// Set the change bit for all "listeners".
2098 fn was_changed(&mut self) {
2099 self.change_bits.retain(|change_bit| {
2100 change_bit
2101 .upgrade()
2102 .inspect(|bit| {
2103 _ = bit.replace(true);
2104 })
2105 .is_some()
2106 });
2107 }
2108
2109 /// Starts a transaction, if one is not already in-progress. When undoing or
2110 /// redoing edits, all of the edits performed within a transaction are undone
2111 /// or redone together.
2112 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2113 self.start_transaction_at(Instant::now())
2114 }
2115
2116 /// Starts a transaction, providing the current time. Subsequent transactions
2117 /// that occur within a short period of time will be grouped together. This
2118 /// is controlled by the buffer's undo grouping duration.
2119 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2120 self.transaction_depth += 1;
2121 if self.was_dirty_before_starting_transaction.is_none() {
2122 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2123 }
2124 self.text.start_transaction_at(now)
2125 }
2126
2127 /// Terminates the current transaction, if this is the outermost transaction.
2128 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2129 self.end_transaction_at(Instant::now(), cx)
2130 }
2131
2132 /// Terminates the current transaction, providing the current time. Subsequent transactions
2133 /// that occur within a short period of time will be grouped together. This
2134 /// is controlled by the buffer's undo grouping duration.
2135 pub fn end_transaction_at(
2136 &mut self,
2137 now: Instant,
2138 cx: &mut Context<Self>,
2139 ) -> Option<TransactionId> {
2140 assert!(self.transaction_depth > 0);
2141 self.transaction_depth -= 1;
2142 let was_dirty = if self.transaction_depth == 0 {
2143 self.was_dirty_before_starting_transaction.take().unwrap()
2144 } else {
2145 false
2146 };
2147 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2148 self.did_edit(&start_version, was_dirty, cx);
2149 Some(transaction_id)
2150 } else {
2151 None
2152 }
2153 }
2154
2155 /// Manually add a transaction to the buffer's undo history.
2156 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2157 self.text.push_transaction(transaction, now);
2158 }
2159
2160 /// Differs from `push_transaction` in that it does not clear the redo
2161 /// stack. Intended to be used to create a parent transaction to merge
2162 /// potential child transactions into.
2163 ///
2164 /// The caller is responsible for removing it from the undo history using
2165 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2166 /// are merged into this transaction, the caller is responsible for ensuring
2167 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2168 /// cleared is to create transactions with the usual `start_transaction` and
2169 /// `end_transaction` methods and merging the resulting transactions into
2170 /// the transaction created by this method
2171 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2172 self.text.push_empty_transaction(now)
2173 }
2174
2175 /// Prevent the last transaction from being grouped with any subsequent transactions,
2176 /// even if they occur with the buffer's undo grouping duration.
2177 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2178 self.text.finalize_last_transaction()
2179 }
2180
2181 /// Manually group all changes since a given transaction.
2182 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2183 self.text.group_until_transaction(transaction_id);
2184 }
2185
2186 /// Manually remove a transaction from the buffer's undo history
2187 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2188 self.text.forget_transaction(transaction_id)
2189 }
2190
2191 /// Retrieve a transaction from the buffer's undo history
2192 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2193 self.text.get_transaction(transaction_id)
2194 }
2195
2196 /// Manually merge two transactions in the buffer's undo history.
2197 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2198 self.text.merge_transactions(transaction, destination);
2199 }
2200
2201 /// Waits for the buffer to receive operations with the given timestamps.
2202 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2203 &mut self,
2204 edit_ids: It,
2205 ) -> impl Future<Output = Result<()>> + use<It> {
2206 self.text.wait_for_edits(edit_ids)
2207 }
2208
2209 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2210 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2211 &mut self,
2212 anchors: It,
2213 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2214 self.text.wait_for_anchors(anchors)
2215 }
2216
2217 /// Waits for the buffer to receive operations up to the given version.
2218 pub fn wait_for_version(
2219 &mut self,
2220 version: clock::Global,
2221 ) -> impl Future<Output = Result<()>> + use<> {
2222 self.text.wait_for_version(version)
2223 }
2224
2225 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2226 /// [`Buffer::wait_for_version`] to resolve with an error.
2227 pub fn give_up_waiting(&mut self) {
2228 self.text.give_up_waiting();
2229 }
2230
2231 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2232 let mut rx = None;
2233 if !self.autoindent_requests.is_empty() {
2234 let channel = oneshot::channel();
2235 self.wait_for_autoindent_txs.push(channel.0);
2236 rx = Some(channel.1);
2237 }
2238 rx
2239 }
2240
2241 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2242 pub fn set_active_selections(
2243 &mut self,
2244 selections: Arc<[Selection<Anchor>]>,
2245 line_mode: bool,
2246 cursor_shape: CursorShape,
2247 cx: &mut Context<Self>,
2248 ) {
2249 let lamport_timestamp = self.text.lamport_clock.tick();
2250 self.remote_selections.insert(
2251 self.text.replica_id(),
2252 SelectionSet {
2253 selections: selections.clone(),
2254 lamport_timestamp,
2255 line_mode,
2256 cursor_shape,
2257 },
2258 );
2259 self.send_operation(
2260 Operation::UpdateSelections {
2261 selections,
2262 line_mode,
2263 lamport_timestamp,
2264 cursor_shape,
2265 },
2266 true,
2267 cx,
2268 );
2269 self.non_text_state_update_count += 1;
2270 cx.notify();
2271 }
2272
2273 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2274 /// this replica.
2275 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2276 if self
2277 .remote_selections
2278 .get(&self.text.replica_id())
2279 .is_none_or(|set| !set.selections.is_empty())
2280 {
2281 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2282 }
2283 }
2284
2285 pub fn set_agent_selections(
2286 &mut self,
2287 selections: Arc<[Selection<Anchor>]>,
2288 line_mode: bool,
2289 cursor_shape: CursorShape,
2290 cx: &mut Context<Self>,
2291 ) {
2292 let lamport_timestamp = self.text.lamport_clock.tick();
2293 self.remote_selections.insert(
2294 ReplicaId::AGENT,
2295 SelectionSet {
2296 selections,
2297 lamport_timestamp,
2298 line_mode,
2299 cursor_shape,
2300 },
2301 );
2302 self.non_text_state_update_count += 1;
2303 cx.notify();
2304 }
2305
2306 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2307 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2308 }
2309
2310 /// Replaces the buffer's entire text.
2311 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2312 where
2313 T: Into<Arc<str>>,
2314 {
2315 self.autoindent_requests.clear();
2316 self.edit([(0..self.len(), text)], None, cx)
2317 }
2318
2319 /// Appends the given text to the end of the buffer.
2320 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2321 where
2322 T: Into<Arc<str>>,
2323 {
2324 self.edit([(self.len()..self.len(), text)], None, cx)
2325 }
2326
2327 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2328 /// delete, and a string of text to insert at that location.
2329 ///
2330 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2331 /// request for the edited ranges, which will be processed when the buffer finishes
2332 /// parsing.
2333 ///
2334 /// Parsing takes place at the end of a transaction, and may compute synchronously
2335 /// or asynchronously, depending on the changes.
2336 pub fn edit<I, S, T>(
2337 &mut self,
2338 edits_iter: I,
2339 autoindent_mode: Option<AutoindentMode>,
2340 cx: &mut Context<Self>,
2341 ) -> Option<clock::Lamport>
2342 where
2343 I: IntoIterator<Item = (Range<S>, T)>,
2344 S: ToOffset,
2345 T: Into<Arc<str>>,
2346 {
2347 // Skip invalid edits and coalesce contiguous ones.
2348 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2349
2350 for (range, new_text) in edits_iter {
2351 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2352
2353 if range.start > range.end {
2354 mem::swap(&mut range.start, &mut range.end);
2355 }
2356 let new_text = new_text.into();
2357 if !new_text.is_empty() || !range.is_empty() {
2358 if let Some((prev_range, prev_text)) = edits.last_mut()
2359 && prev_range.end >= range.start
2360 {
2361 prev_range.end = cmp::max(prev_range.end, range.end);
2362 *prev_text = format!("{prev_text}{new_text}").into();
2363 } else {
2364 edits.push((range, new_text));
2365 }
2366 }
2367 }
2368 if edits.is_empty() {
2369 return None;
2370 }
2371
2372 self.start_transaction();
2373 self.pending_autoindent.take();
2374 let autoindent_request = autoindent_mode
2375 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2376
2377 let edit_operation = self.text.edit(edits.iter().cloned());
2378 let edit_id = edit_operation.timestamp();
2379
2380 if let Some((before_edit, mode)) = autoindent_request {
2381 let mut delta = 0isize;
2382 let mut previous_setting = None;
2383 let entries: Vec<_> = edits
2384 .into_iter()
2385 .enumerate()
2386 .zip(&edit_operation.as_edit().unwrap().new_text)
2387 .filter(|((_, (range, _)), _)| {
2388 let language = before_edit.language_at(range.start);
2389 let language_id = language.map(|l| l.id());
2390 if let Some((cached_language_id, auto_indent)) = previous_setting
2391 && cached_language_id == language_id
2392 {
2393 auto_indent
2394 } else {
2395 // The auto-indent setting is not present in editorconfigs, hence
2396 // we can avoid passing the file here.
2397 let auto_indent =
2398 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2399 previous_setting = Some((language_id, auto_indent));
2400 auto_indent
2401 }
2402 })
2403 .map(|((ix, (range, _)), new_text)| {
2404 let new_text_length = new_text.len();
2405 let old_start = range.start.to_point(&before_edit);
2406 let new_start = (delta + range.start as isize) as usize;
2407 let range_len = range.end - range.start;
2408 delta += new_text_length as isize - range_len as isize;
2409
2410 // Decide what range of the insertion to auto-indent, and whether
2411 // the first line of the insertion should be considered a newly-inserted line
2412 // or an edit to an existing line.
2413 let mut range_of_insertion_to_indent = 0..new_text_length;
2414 let mut first_line_is_new = true;
2415
2416 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2417 let old_line_end = before_edit.line_len(old_start.row);
2418
2419 if old_start.column > old_line_start {
2420 first_line_is_new = false;
2421 }
2422
2423 if !new_text.contains('\n')
2424 && (old_start.column + (range_len as u32) < old_line_end
2425 || old_line_end == old_line_start)
2426 {
2427 first_line_is_new = false;
2428 }
2429
2430 // When inserting text starting with a newline, avoid auto-indenting the
2431 // previous line.
2432 if new_text.starts_with('\n') {
2433 range_of_insertion_to_indent.start += 1;
2434 first_line_is_new = true;
2435 }
2436
2437 let mut original_indent_column = None;
2438 if let AutoindentMode::Block {
2439 original_indent_columns,
2440 } = &mode
2441 {
2442 original_indent_column = Some(if new_text.starts_with('\n') {
2443 indent_size_for_text(
2444 new_text[range_of_insertion_to_indent.clone()].chars(),
2445 )
2446 .len
2447 } else {
2448 original_indent_columns
2449 .get(ix)
2450 .copied()
2451 .flatten()
2452 .unwrap_or_else(|| {
2453 indent_size_for_text(
2454 new_text[range_of_insertion_to_indent.clone()].chars(),
2455 )
2456 .len
2457 })
2458 });
2459
2460 // Avoid auto-indenting the line after the edit.
2461 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2462 range_of_insertion_to_indent.end -= 1;
2463 }
2464 }
2465
2466 AutoindentRequestEntry {
2467 first_line_is_new,
2468 original_indent_column,
2469 indent_size: before_edit.language_indent_size_at(range.start, cx),
2470 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2471 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2472 }
2473 })
2474 .collect();
2475
2476 if !entries.is_empty() {
2477 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2478 before_edit,
2479 entries,
2480 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2481 ignore_empty_lines: false,
2482 }));
2483 }
2484 }
2485
2486 self.end_transaction(cx);
2487 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2488 Some(edit_id)
2489 }
2490
2491 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2492 self.was_changed();
2493
2494 if self.edits_since::<usize>(old_version).next().is_none() {
2495 return;
2496 }
2497
2498 self.reparse(cx);
2499 cx.emit(BufferEvent::Edited);
2500 if was_dirty != self.is_dirty() {
2501 cx.emit(BufferEvent::DirtyChanged);
2502 }
2503 cx.notify();
2504 }
2505
2506 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2507 where
2508 I: IntoIterator<Item = Range<T>>,
2509 T: ToOffset + Copy,
2510 {
2511 let before_edit = self.snapshot();
2512 let entries = ranges
2513 .into_iter()
2514 .map(|range| AutoindentRequestEntry {
2515 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2516 first_line_is_new: true,
2517 indent_size: before_edit.language_indent_size_at(range.start, cx),
2518 original_indent_column: None,
2519 })
2520 .collect();
2521 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2522 before_edit,
2523 entries,
2524 is_block_mode: false,
2525 ignore_empty_lines: true,
2526 }));
2527 self.request_autoindent(cx);
2528 }
2529
2530 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2531 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2532 pub fn insert_empty_line(
2533 &mut self,
2534 position: impl ToPoint,
2535 space_above: bool,
2536 space_below: bool,
2537 cx: &mut Context<Self>,
2538 ) -> Point {
2539 let mut position = position.to_point(self);
2540
2541 self.start_transaction();
2542
2543 self.edit(
2544 [(position..position, "\n")],
2545 Some(AutoindentMode::EachLine),
2546 cx,
2547 );
2548
2549 if position.column > 0 {
2550 position += Point::new(1, 0);
2551 }
2552
2553 if !self.is_line_blank(position.row) {
2554 self.edit(
2555 [(position..position, "\n")],
2556 Some(AutoindentMode::EachLine),
2557 cx,
2558 );
2559 }
2560
2561 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2562 self.edit(
2563 [(position..position, "\n")],
2564 Some(AutoindentMode::EachLine),
2565 cx,
2566 );
2567 position.row += 1;
2568 }
2569
2570 if space_below
2571 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2572 {
2573 self.edit(
2574 [(position..position, "\n")],
2575 Some(AutoindentMode::EachLine),
2576 cx,
2577 );
2578 }
2579
2580 self.end_transaction(cx);
2581
2582 position
2583 }
2584
2585 /// Applies the given remote operations to the buffer.
2586 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2587 self.pending_autoindent.take();
2588 let was_dirty = self.is_dirty();
2589 let old_version = self.version.clone();
2590 let mut deferred_ops = Vec::new();
2591 let buffer_ops = ops
2592 .into_iter()
2593 .filter_map(|op| match op {
2594 Operation::Buffer(op) => Some(op),
2595 _ => {
2596 if self.can_apply_op(&op) {
2597 self.apply_op(op, cx);
2598 } else {
2599 deferred_ops.push(op);
2600 }
2601 None
2602 }
2603 })
2604 .collect::<Vec<_>>();
2605 for operation in buffer_ops.iter() {
2606 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2607 }
2608 self.text.apply_ops(buffer_ops);
2609 self.deferred_ops.insert(deferred_ops);
2610 self.flush_deferred_ops(cx);
2611 self.did_edit(&old_version, was_dirty, cx);
2612 // Notify independently of whether the buffer was edited as the operations could include a
2613 // selection update.
2614 cx.notify();
2615 }
2616
2617 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2618 let mut deferred_ops = Vec::new();
2619 for op in self.deferred_ops.drain().iter().cloned() {
2620 if self.can_apply_op(&op) {
2621 self.apply_op(op, cx);
2622 } else {
2623 deferred_ops.push(op);
2624 }
2625 }
2626 self.deferred_ops.insert(deferred_ops);
2627 }
2628
2629 pub fn has_deferred_ops(&self) -> bool {
2630 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2631 }
2632
2633 fn can_apply_op(&self, operation: &Operation) -> bool {
2634 match operation {
2635 Operation::Buffer(_) => {
2636 unreachable!("buffer operations should never be applied at this layer")
2637 }
2638 Operation::UpdateDiagnostics {
2639 diagnostics: diagnostic_set,
2640 ..
2641 } => diagnostic_set.iter().all(|diagnostic| {
2642 self.text.can_resolve(&diagnostic.range.start)
2643 && self.text.can_resolve(&diagnostic.range.end)
2644 }),
2645 Operation::UpdateSelections { selections, .. } => selections
2646 .iter()
2647 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2648 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2649 }
2650 }
2651
2652 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2653 match operation {
2654 Operation::Buffer(_) => {
2655 unreachable!("buffer operations should never be applied at this layer")
2656 }
2657 Operation::UpdateDiagnostics {
2658 server_id,
2659 diagnostics: diagnostic_set,
2660 lamport_timestamp,
2661 } => {
2662 let snapshot = self.snapshot();
2663 self.apply_diagnostic_update(
2664 server_id,
2665 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2666 lamport_timestamp,
2667 cx,
2668 );
2669 }
2670 Operation::UpdateSelections {
2671 selections,
2672 lamport_timestamp,
2673 line_mode,
2674 cursor_shape,
2675 } => {
2676 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2677 && set.lamport_timestamp > lamport_timestamp
2678 {
2679 return;
2680 }
2681
2682 self.remote_selections.insert(
2683 lamport_timestamp.replica_id,
2684 SelectionSet {
2685 selections,
2686 lamport_timestamp,
2687 line_mode,
2688 cursor_shape,
2689 },
2690 );
2691 self.text.lamport_clock.observe(lamport_timestamp);
2692 self.non_text_state_update_count += 1;
2693 }
2694 Operation::UpdateCompletionTriggers {
2695 triggers,
2696 lamport_timestamp,
2697 server_id,
2698 } => {
2699 if triggers.is_empty() {
2700 self.completion_triggers_per_language_server
2701 .remove(&server_id);
2702 self.completion_triggers = self
2703 .completion_triggers_per_language_server
2704 .values()
2705 .flat_map(|triggers| triggers.iter().cloned())
2706 .collect();
2707 } else {
2708 self.completion_triggers_per_language_server
2709 .insert(server_id, triggers.iter().cloned().collect());
2710 self.completion_triggers.extend(triggers);
2711 }
2712 self.text.lamport_clock.observe(lamport_timestamp);
2713 }
2714 Operation::UpdateLineEnding {
2715 line_ending,
2716 lamport_timestamp,
2717 } => {
2718 self.text.set_line_ending(line_ending);
2719 self.text.lamport_clock.observe(lamport_timestamp);
2720 }
2721 }
2722 }
2723
2724 fn apply_diagnostic_update(
2725 &mut self,
2726 server_id: LanguageServerId,
2727 diagnostics: DiagnosticSet,
2728 lamport_timestamp: clock::Lamport,
2729 cx: &mut Context<Self>,
2730 ) {
2731 if lamport_timestamp > self.diagnostics_timestamp {
2732 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2733 if diagnostics.is_empty() {
2734 if let Ok(ix) = ix {
2735 self.diagnostics.remove(ix);
2736 }
2737 } else {
2738 match ix {
2739 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2740 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2741 };
2742 }
2743 self.diagnostics_timestamp = lamport_timestamp;
2744 self.non_text_state_update_count += 1;
2745 self.text.lamport_clock.observe(lamport_timestamp);
2746 cx.notify();
2747 cx.emit(BufferEvent::DiagnosticsUpdated);
2748 }
2749 }
2750
2751 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2752 self.was_changed();
2753 cx.emit(BufferEvent::Operation {
2754 operation,
2755 is_local,
2756 });
2757 }
2758
2759 /// Removes the selections for a given peer.
2760 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2761 self.remote_selections.remove(&replica_id);
2762 cx.notify();
2763 }
2764
2765 /// Undoes the most recent transaction.
2766 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2767 let was_dirty = self.is_dirty();
2768 let old_version = self.version.clone();
2769
2770 if let Some((transaction_id, operation)) = self.text.undo() {
2771 self.send_operation(Operation::Buffer(operation), true, cx);
2772 self.did_edit(&old_version, was_dirty, cx);
2773 Some(transaction_id)
2774 } else {
2775 None
2776 }
2777 }
2778
2779 /// Manually undoes a specific transaction in the buffer's undo history.
2780 pub fn undo_transaction(
2781 &mut self,
2782 transaction_id: TransactionId,
2783 cx: &mut Context<Self>,
2784 ) -> bool {
2785 let was_dirty = self.is_dirty();
2786 let old_version = self.version.clone();
2787 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2788 self.send_operation(Operation::Buffer(operation), true, cx);
2789 self.did_edit(&old_version, was_dirty, cx);
2790 true
2791 } else {
2792 false
2793 }
2794 }
2795
2796 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2797 pub fn undo_to_transaction(
2798 &mut self,
2799 transaction_id: TransactionId,
2800 cx: &mut Context<Self>,
2801 ) -> bool {
2802 let was_dirty = self.is_dirty();
2803 let old_version = self.version.clone();
2804
2805 let operations = self.text.undo_to_transaction(transaction_id);
2806 let undone = !operations.is_empty();
2807 for operation in operations {
2808 self.send_operation(Operation::Buffer(operation), true, cx);
2809 }
2810 if undone {
2811 self.did_edit(&old_version, was_dirty, cx)
2812 }
2813 undone
2814 }
2815
2816 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2817 let was_dirty = self.is_dirty();
2818 let operation = self.text.undo_operations(counts);
2819 let old_version = self.version.clone();
2820 self.send_operation(Operation::Buffer(operation), true, cx);
2821 self.did_edit(&old_version, was_dirty, cx);
2822 }
2823
2824 /// Manually redoes a specific transaction in the buffer's redo history.
2825 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2826 let was_dirty = self.is_dirty();
2827 let old_version = self.version.clone();
2828
2829 if let Some((transaction_id, operation)) = self.text.redo() {
2830 self.send_operation(Operation::Buffer(operation), true, cx);
2831 self.did_edit(&old_version, was_dirty, cx);
2832 Some(transaction_id)
2833 } else {
2834 None
2835 }
2836 }
2837
2838 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2839 pub fn redo_to_transaction(
2840 &mut self,
2841 transaction_id: TransactionId,
2842 cx: &mut Context<Self>,
2843 ) -> bool {
2844 let was_dirty = self.is_dirty();
2845 let old_version = self.version.clone();
2846
2847 let operations = self.text.redo_to_transaction(transaction_id);
2848 let redone = !operations.is_empty();
2849 for operation in operations {
2850 self.send_operation(Operation::Buffer(operation), true, cx);
2851 }
2852 if redone {
2853 self.did_edit(&old_version, was_dirty, cx)
2854 }
2855 redone
2856 }
2857
2858 /// Override current completion triggers with the user-provided completion triggers.
2859 pub fn set_completion_triggers(
2860 &mut self,
2861 server_id: LanguageServerId,
2862 triggers: BTreeSet<String>,
2863 cx: &mut Context<Self>,
2864 ) {
2865 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2866 if triggers.is_empty() {
2867 self.completion_triggers_per_language_server
2868 .remove(&server_id);
2869 self.completion_triggers = self
2870 .completion_triggers_per_language_server
2871 .values()
2872 .flat_map(|triggers| triggers.iter().cloned())
2873 .collect();
2874 } else {
2875 self.completion_triggers_per_language_server
2876 .insert(server_id, triggers.clone());
2877 self.completion_triggers.extend(triggers.iter().cloned());
2878 }
2879 self.send_operation(
2880 Operation::UpdateCompletionTriggers {
2881 triggers: triggers.into_iter().collect(),
2882 lamport_timestamp: self.completion_triggers_timestamp,
2883 server_id,
2884 },
2885 true,
2886 cx,
2887 );
2888 cx.notify();
2889 }
2890
2891 /// Returns a list of strings which trigger a completion menu for this language.
2892 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2893 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2894 &self.completion_triggers
2895 }
2896
2897 /// Call this directly after performing edits to prevent the preview tab
2898 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2899 /// to return false until there are additional edits.
2900 pub fn refresh_preview(&mut self) {
2901 self.preview_version = self.version.clone();
2902 }
2903
2904 /// Whether we should preserve the preview status of a tab containing this buffer.
2905 pub fn preserve_preview(&self) -> bool {
2906 !self.has_edits_since(&self.preview_version)
2907 }
2908}
2909
2910#[doc(hidden)]
2911#[cfg(any(test, feature = "test-support"))]
2912impl Buffer {
2913 pub fn edit_via_marked_text(
2914 &mut self,
2915 marked_string: &str,
2916 autoindent_mode: Option<AutoindentMode>,
2917 cx: &mut Context<Self>,
2918 ) {
2919 let edits = self.edits_for_marked_text(marked_string);
2920 self.edit(edits, autoindent_mode, cx);
2921 }
2922
2923 pub fn set_group_interval(&mut self, group_interval: Duration) {
2924 self.text.set_group_interval(group_interval);
2925 }
2926
2927 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2928 where
2929 T: rand::Rng,
2930 {
2931 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2932 let mut last_end = None;
2933 for _ in 0..old_range_count {
2934 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2935 break;
2936 }
2937
2938 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2939 let mut range = self.random_byte_range(new_start, rng);
2940 if rng.random_bool(0.2) {
2941 mem::swap(&mut range.start, &mut range.end);
2942 }
2943 last_end = Some(range.end);
2944
2945 let new_text_len = rng.random_range(0..10);
2946 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2947 new_text = new_text.to_uppercase();
2948
2949 edits.push((range, new_text));
2950 }
2951 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2952 self.edit(edits, None, cx);
2953 }
2954
2955 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2956 let was_dirty = self.is_dirty();
2957 let old_version = self.version.clone();
2958
2959 let ops = self.text.randomly_undo_redo(rng);
2960 if !ops.is_empty() {
2961 for op in ops {
2962 self.send_operation(Operation::Buffer(op), true, cx);
2963 self.did_edit(&old_version, was_dirty, cx);
2964 }
2965 }
2966 }
2967}
2968
2969impl EventEmitter<BufferEvent> for Buffer {}
2970
2971impl Deref for Buffer {
2972 type Target = TextBuffer;
2973
2974 fn deref(&self) -> &Self::Target {
2975 &self.text
2976 }
2977}
2978
2979impl BufferSnapshot {
2980 /// Returns [`IndentSize`] for a given line that respects user settings and
2981 /// language preferences.
2982 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2983 indent_size_for_line(self, row)
2984 }
2985
2986 /// Returns [`IndentSize`] for a given position that respects user settings
2987 /// and language preferences.
2988 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2989 let settings = language_settings(
2990 self.language_at(position).map(|l| l.name()),
2991 self.file(),
2992 cx,
2993 );
2994 if settings.hard_tabs {
2995 IndentSize::tab()
2996 } else {
2997 IndentSize::spaces(settings.tab_size.get())
2998 }
2999 }
3000
3001 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3002 /// is passed in as `single_indent_size`.
3003 pub fn suggested_indents(
3004 &self,
3005 rows: impl Iterator<Item = u32>,
3006 single_indent_size: IndentSize,
3007 ) -> BTreeMap<u32, IndentSize> {
3008 let mut result = BTreeMap::new();
3009
3010 for row_range in contiguous_ranges(rows, 10) {
3011 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3012 Some(suggestions) => suggestions,
3013 _ => break,
3014 };
3015
3016 for (row, suggestion) in row_range.zip(suggestions) {
3017 let indent_size = if let Some(suggestion) = suggestion {
3018 result
3019 .get(&suggestion.basis_row)
3020 .copied()
3021 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3022 .with_delta(suggestion.delta, single_indent_size)
3023 } else {
3024 self.indent_size_for_line(row)
3025 };
3026
3027 result.insert(row, indent_size);
3028 }
3029 }
3030
3031 result
3032 }
3033
3034 fn suggest_autoindents(
3035 &self,
3036 row_range: Range<u32>,
3037 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3038 let config = &self.language.as_ref()?.config;
3039 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3040
3041 #[derive(Debug, Clone)]
3042 struct StartPosition {
3043 start: Point,
3044 suffix: SharedString,
3045 }
3046
3047 // Find the suggested indentation ranges based on the syntax tree.
3048 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3049 let end = Point::new(row_range.end, 0);
3050 let range = (start..end).to_offset(&self.text);
3051 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3052 Some(&grammar.indents_config.as_ref()?.query)
3053 });
3054 let indent_configs = matches
3055 .grammars()
3056 .iter()
3057 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3058 .collect::<Vec<_>>();
3059
3060 let mut indent_ranges = Vec::<Range<Point>>::new();
3061 let mut start_positions = Vec::<StartPosition>::new();
3062 let mut outdent_positions = Vec::<Point>::new();
3063 while let Some(mat) = matches.peek() {
3064 let mut start: Option<Point> = None;
3065 let mut end: Option<Point> = None;
3066
3067 let config = indent_configs[mat.grammar_index];
3068 for capture in mat.captures {
3069 if capture.index == config.indent_capture_ix {
3070 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3071 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3072 } else if Some(capture.index) == config.start_capture_ix {
3073 start = Some(Point::from_ts_point(capture.node.end_position()));
3074 } else if Some(capture.index) == config.end_capture_ix {
3075 end = Some(Point::from_ts_point(capture.node.start_position()));
3076 } else if Some(capture.index) == config.outdent_capture_ix {
3077 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3078 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3079 start_positions.push(StartPosition {
3080 start: Point::from_ts_point(capture.node.start_position()),
3081 suffix: suffix.clone(),
3082 });
3083 }
3084 }
3085
3086 matches.advance();
3087 if let Some((start, end)) = start.zip(end) {
3088 if start.row == end.row {
3089 continue;
3090 }
3091 let range = start..end;
3092 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3093 Err(ix) => indent_ranges.insert(ix, range),
3094 Ok(ix) => {
3095 let prev_range = &mut indent_ranges[ix];
3096 prev_range.end = prev_range.end.max(range.end);
3097 }
3098 }
3099 }
3100 }
3101
3102 let mut error_ranges = Vec::<Range<Point>>::new();
3103 let mut matches = self
3104 .syntax
3105 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3106 while let Some(mat) = matches.peek() {
3107 let node = mat.captures[0].node;
3108 let start = Point::from_ts_point(node.start_position());
3109 let end = Point::from_ts_point(node.end_position());
3110 let range = start..end;
3111 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3112 Ok(ix) | Err(ix) => ix,
3113 };
3114 let mut end_ix = ix;
3115 while let Some(existing_range) = error_ranges.get(end_ix) {
3116 if existing_range.end < end {
3117 end_ix += 1;
3118 } else {
3119 break;
3120 }
3121 }
3122 error_ranges.splice(ix..end_ix, [range]);
3123 matches.advance();
3124 }
3125
3126 outdent_positions.sort();
3127 for outdent_position in outdent_positions {
3128 // find the innermost indent range containing this outdent_position
3129 // set its end to the outdent position
3130 if let Some(range_to_truncate) = indent_ranges
3131 .iter_mut()
3132 .filter(|indent_range| indent_range.contains(&outdent_position))
3133 .next_back()
3134 {
3135 range_to_truncate.end = outdent_position;
3136 }
3137 }
3138
3139 start_positions.sort_by_key(|b| b.start);
3140
3141 // Find the suggested indentation increases and decreased based on regexes.
3142 let mut regex_outdent_map = HashMap::default();
3143 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3144 let mut start_positions_iter = start_positions.iter().peekable();
3145
3146 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3147 self.for_each_line(
3148 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3149 ..Point::new(row_range.end, 0),
3150 |row, line| {
3151 if config
3152 .decrease_indent_pattern
3153 .as_ref()
3154 .is_some_and(|regex| regex.is_match(line))
3155 {
3156 indent_change_rows.push((row, Ordering::Less));
3157 }
3158 if config
3159 .increase_indent_pattern
3160 .as_ref()
3161 .is_some_and(|regex| regex.is_match(line))
3162 {
3163 indent_change_rows.push((row + 1, Ordering::Greater));
3164 }
3165 while let Some(pos) = start_positions_iter.peek() {
3166 if pos.start.row < row {
3167 let pos = start_positions_iter.next().unwrap();
3168 last_seen_suffix
3169 .entry(pos.suffix.to_string())
3170 .or_default()
3171 .push(pos.start);
3172 } else {
3173 break;
3174 }
3175 }
3176 for rule in &config.decrease_indent_patterns {
3177 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3178 let row_start_column = self.indent_size_for_line(row).len;
3179 let basis_row = rule
3180 .valid_after
3181 .iter()
3182 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3183 .flatten()
3184 .filter(|start_point| start_point.column <= row_start_column)
3185 .max_by_key(|start_point| start_point.row);
3186 if let Some(outdent_to_row) = basis_row {
3187 regex_outdent_map.insert(row, outdent_to_row.row);
3188 }
3189 break;
3190 }
3191 }
3192 },
3193 );
3194
3195 let mut indent_changes = indent_change_rows.into_iter().peekable();
3196 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3197 prev_non_blank_row.unwrap_or(0)
3198 } else {
3199 row_range.start.saturating_sub(1)
3200 };
3201
3202 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3203 Some(row_range.map(move |row| {
3204 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3205
3206 let mut indent_from_prev_row = false;
3207 let mut outdent_from_prev_row = false;
3208 let mut outdent_to_row = u32::MAX;
3209 let mut from_regex = false;
3210
3211 while let Some((indent_row, delta)) = indent_changes.peek() {
3212 match indent_row.cmp(&row) {
3213 Ordering::Equal => match delta {
3214 Ordering::Less => {
3215 from_regex = true;
3216 outdent_from_prev_row = true
3217 }
3218 Ordering::Greater => {
3219 indent_from_prev_row = true;
3220 from_regex = true
3221 }
3222 _ => {}
3223 },
3224
3225 Ordering::Greater => break,
3226 Ordering::Less => {}
3227 }
3228
3229 indent_changes.next();
3230 }
3231
3232 for range in &indent_ranges {
3233 if range.start.row >= row {
3234 break;
3235 }
3236 if range.start.row == prev_row && range.end > row_start {
3237 indent_from_prev_row = true;
3238 }
3239 if range.end > prev_row_start && range.end <= row_start {
3240 outdent_to_row = outdent_to_row.min(range.start.row);
3241 }
3242 }
3243
3244 if let Some(basis_row) = regex_outdent_map.get(&row) {
3245 indent_from_prev_row = false;
3246 outdent_to_row = *basis_row;
3247 from_regex = true;
3248 }
3249
3250 let within_error = error_ranges
3251 .iter()
3252 .any(|e| e.start.row < row && e.end > row_start);
3253
3254 let suggestion = if outdent_to_row == prev_row
3255 || (outdent_from_prev_row && indent_from_prev_row)
3256 {
3257 Some(IndentSuggestion {
3258 basis_row: prev_row,
3259 delta: Ordering::Equal,
3260 within_error: within_error && !from_regex,
3261 })
3262 } else if indent_from_prev_row {
3263 Some(IndentSuggestion {
3264 basis_row: prev_row,
3265 delta: Ordering::Greater,
3266 within_error: within_error && !from_regex,
3267 })
3268 } else if outdent_to_row < prev_row {
3269 Some(IndentSuggestion {
3270 basis_row: outdent_to_row,
3271 delta: Ordering::Equal,
3272 within_error: within_error && !from_regex,
3273 })
3274 } else if outdent_from_prev_row {
3275 Some(IndentSuggestion {
3276 basis_row: prev_row,
3277 delta: Ordering::Less,
3278 within_error: within_error && !from_regex,
3279 })
3280 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3281 {
3282 Some(IndentSuggestion {
3283 basis_row: prev_row,
3284 delta: Ordering::Equal,
3285 within_error: within_error && !from_regex,
3286 })
3287 } else {
3288 None
3289 };
3290
3291 prev_row = row;
3292 prev_row_start = row_start;
3293 suggestion
3294 }))
3295 }
3296
3297 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3298 while row > 0 {
3299 row -= 1;
3300 if !self.is_line_blank(row) {
3301 return Some(row);
3302 }
3303 }
3304 None
3305 }
3306
3307 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3308 let captures = self.syntax.captures(range, &self.text, |grammar| {
3309 grammar
3310 .highlights_config
3311 .as_ref()
3312 .map(|config| &config.query)
3313 });
3314 let highlight_maps = captures
3315 .grammars()
3316 .iter()
3317 .map(|grammar| grammar.highlight_map())
3318 .collect();
3319 (captures, highlight_maps)
3320 }
3321
3322 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3323 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3324 /// returned in chunks where each chunk has a single syntax highlighting style and
3325 /// diagnostic status.
3326 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3327 let range = range.start.to_offset(self)..range.end.to_offset(self);
3328
3329 let mut syntax = None;
3330 if language_aware {
3331 syntax = Some(self.get_highlights(range.clone()));
3332 }
3333 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3334 let diagnostics = language_aware;
3335 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3336 }
3337
3338 pub fn highlighted_text_for_range<T: ToOffset>(
3339 &self,
3340 range: Range<T>,
3341 override_style: Option<HighlightStyle>,
3342 syntax_theme: &SyntaxTheme,
3343 ) -> HighlightedText {
3344 HighlightedText::from_buffer_range(
3345 range,
3346 &self.text,
3347 &self.syntax,
3348 override_style,
3349 syntax_theme,
3350 )
3351 }
3352
3353 /// Invokes the given callback for each line of text in the given range of the buffer.
3354 /// Uses callback to avoid allocating a string for each line.
3355 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3356 let mut line = String::new();
3357 let mut row = range.start.row;
3358 for chunk in self
3359 .as_rope()
3360 .chunks_in_range(range.to_offset(self))
3361 .chain(["\n"])
3362 {
3363 for (newline_ix, text) in chunk.split('\n').enumerate() {
3364 if newline_ix > 0 {
3365 callback(row, &line);
3366 row += 1;
3367 line.clear();
3368 }
3369 line.push_str(text);
3370 }
3371 }
3372 }
3373
3374 /// Iterates over every [`SyntaxLayer`] in the buffer.
3375 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3376 self.syntax_layers_for_range(0..self.len(), true)
3377 }
3378
3379 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3380 let offset = position.to_offset(self);
3381 self.syntax_layers_for_range(offset..offset, false)
3382 .filter(|l| {
3383 if let Some(ranges) = l.included_sub_ranges {
3384 ranges.iter().any(|range| {
3385 let start = range.start.to_offset(self);
3386 start <= offset && {
3387 let end = range.end.to_offset(self);
3388 offset < end
3389 }
3390 })
3391 } else {
3392 l.node().start_byte() <= offset && l.node().end_byte() > offset
3393 }
3394 })
3395 .last()
3396 }
3397
3398 pub fn syntax_layers_for_range<D: ToOffset>(
3399 &self,
3400 range: Range<D>,
3401 include_hidden: bool,
3402 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3403 self.syntax
3404 .layers_for_range(range, &self.text, include_hidden)
3405 }
3406
3407 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3408 &self,
3409 range: Range<D>,
3410 ) -> Option<SyntaxLayer<'_>> {
3411 let range = range.to_offset(self);
3412 self.syntax
3413 .layers_for_range(range, &self.text, false)
3414 .max_by(|a, b| {
3415 if a.depth != b.depth {
3416 a.depth.cmp(&b.depth)
3417 } else if a.offset.0 != b.offset.0 {
3418 a.offset.0.cmp(&b.offset.0)
3419 } else {
3420 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3421 }
3422 })
3423 }
3424
3425 /// Returns the main [`Language`].
3426 pub fn language(&self) -> Option<&Arc<Language>> {
3427 self.language.as_ref()
3428 }
3429
3430 /// Returns the [`Language`] at the given location.
3431 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3432 self.syntax_layer_at(position)
3433 .map(|info| info.language)
3434 .or(self.language.as_ref())
3435 }
3436
3437 /// Returns the settings for the language at the given location.
3438 pub fn settings_at<'a, D: ToOffset>(
3439 &'a self,
3440 position: D,
3441 cx: &'a App,
3442 ) -> Cow<'a, LanguageSettings> {
3443 language_settings(
3444 self.language_at(position).map(|l| l.name()),
3445 self.file.as_ref(),
3446 cx,
3447 )
3448 }
3449
3450 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3451 CharClassifier::new(self.language_scope_at(point))
3452 }
3453
3454 /// Returns the [`LanguageScope`] at the given location.
3455 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3456 let offset = position.to_offset(self);
3457 let mut scope = None;
3458 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3459
3460 // Use the layer that has the smallest node intersecting the given point.
3461 for layer in self
3462 .syntax
3463 .layers_for_range(offset..offset, &self.text, false)
3464 {
3465 let mut cursor = layer.node().walk();
3466
3467 let mut range = None;
3468 loop {
3469 let child_range = cursor.node().byte_range();
3470 if !child_range.contains(&offset) {
3471 break;
3472 }
3473
3474 range = Some(child_range);
3475 if cursor.goto_first_child_for_byte(offset).is_none() {
3476 break;
3477 }
3478 }
3479
3480 if let Some(range) = range
3481 && smallest_range_and_depth.as_ref().is_none_or(
3482 |(smallest_range, smallest_range_depth)| {
3483 if layer.depth > *smallest_range_depth {
3484 true
3485 } else if layer.depth == *smallest_range_depth {
3486 range.len() < smallest_range.len()
3487 } else {
3488 false
3489 }
3490 },
3491 )
3492 {
3493 smallest_range_and_depth = Some((range, layer.depth));
3494 scope = Some(LanguageScope {
3495 language: layer.language.clone(),
3496 override_id: layer.override_id(offset, &self.text),
3497 });
3498 }
3499 }
3500
3501 scope.or_else(|| {
3502 self.language.clone().map(|language| LanguageScope {
3503 language,
3504 override_id: None,
3505 })
3506 })
3507 }
3508
3509 /// Returns a tuple of the range and character kind of the word
3510 /// surrounding the given position.
3511 pub fn surrounding_word<T: ToOffset>(
3512 &self,
3513 start: T,
3514 scope_context: Option<CharScopeContext>,
3515 ) -> (Range<usize>, Option<CharKind>) {
3516 let mut start = start.to_offset(self);
3517 let mut end = start;
3518 let mut next_chars = self.chars_at(start).take(128).peekable();
3519 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3520
3521 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3522 let word_kind = cmp::max(
3523 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3524 next_chars.peek().copied().map(|c| classifier.kind(c)),
3525 );
3526
3527 for ch in prev_chars {
3528 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3529 start -= ch.len_utf8();
3530 } else {
3531 break;
3532 }
3533 }
3534
3535 for ch in next_chars {
3536 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3537 end += ch.len_utf8();
3538 } else {
3539 break;
3540 }
3541 }
3542
3543 (start..end, word_kind)
3544 }
3545
3546 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3547 /// range. When `require_larger` is true, the node found must be larger than the query range.
3548 ///
3549 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3550 /// be moved to the root of the tree.
3551 fn goto_node_enclosing_range(
3552 cursor: &mut tree_sitter::TreeCursor,
3553 query_range: &Range<usize>,
3554 require_larger: bool,
3555 ) -> bool {
3556 let mut ascending = false;
3557 loop {
3558 let mut range = cursor.node().byte_range();
3559 if query_range.is_empty() {
3560 // When the query range is empty and the current node starts after it, move to the
3561 // previous sibling to find the node the containing node.
3562 if range.start > query_range.start {
3563 cursor.goto_previous_sibling();
3564 range = cursor.node().byte_range();
3565 }
3566 } else {
3567 // When the query range is non-empty and the current node ends exactly at the start,
3568 // move to the next sibling to find a node that extends beyond the start.
3569 if range.end == query_range.start {
3570 cursor.goto_next_sibling();
3571 range = cursor.node().byte_range();
3572 }
3573 }
3574
3575 let encloses = range.contains_inclusive(query_range)
3576 && (!require_larger || range.len() > query_range.len());
3577 if !encloses {
3578 ascending = true;
3579 if !cursor.goto_parent() {
3580 return false;
3581 }
3582 continue;
3583 } else if ascending {
3584 return true;
3585 }
3586
3587 // Descend into the current node.
3588 if cursor
3589 .goto_first_child_for_byte(query_range.start)
3590 .is_none()
3591 {
3592 return true;
3593 }
3594 }
3595 }
3596
3597 pub fn syntax_ancestor<'a, T: ToOffset>(
3598 &'a self,
3599 range: Range<T>,
3600 ) -> Option<tree_sitter::Node<'a>> {
3601 let range = range.start.to_offset(self)..range.end.to_offset(self);
3602 let mut result: Option<tree_sitter::Node<'a>> = None;
3603 for layer in self
3604 .syntax
3605 .layers_for_range(range.clone(), &self.text, true)
3606 {
3607 let mut cursor = layer.node().walk();
3608
3609 // Find the node that both contains the range and is larger than it.
3610 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3611 continue;
3612 }
3613
3614 let left_node = cursor.node();
3615 let mut layer_result = left_node;
3616
3617 // For an empty range, try to find another node immediately to the right of the range.
3618 if left_node.end_byte() == range.start {
3619 let mut right_node = None;
3620 while !cursor.goto_next_sibling() {
3621 if !cursor.goto_parent() {
3622 break;
3623 }
3624 }
3625
3626 while cursor.node().start_byte() == range.start {
3627 right_node = Some(cursor.node());
3628 if !cursor.goto_first_child() {
3629 break;
3630 }
3631 }
3632
3633 // If there is a candidate node on both sides of the (empty) range, then
3634 // decide between the two by favoring a named node over an anonymous token.
3635 // If both nodes are the same in that regard, favor the right one.
3636 if let Some(right_node) = right_node
3637 && (right_node.is_named() || !left_node.is_named())
3638 {
3639 layer_result = right_node;
3640 }
3641 }
3642
3643 if let Some(previous_result) = &result
3644 && previous_result.byte_range().len() < layer_result.byte_range().len()
3645 {
3646 continue;
3647 }
3648 result = Some(layer_result);
3649 }
3650
3651 result
3652 }
3653
3654 /// Find the previous sibling syntax node at the given range.
3655 ///
3656 /// This function locates the syntax node that precedes the node containing
3657 /// the given range. It searches hierarchically by:
3658 /// 1. Finding the node that contains the given range
3659 /// 2. Looking for the previous sibling at the same tree level
3660 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3661 ///
3662 /// Returns `None` if there is no previous sibling at any ancestor level.
3663 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3664 &'a self,
3665 range: Range<T>,
3666 ) -> Option<tree_sitter::Node<'a>> {
3667 let range = range.start.to_offset(self)..range.end.to_offset(self);
3668 let mut result: Option<tree_sitter::Node<'a>> = None;
3669
3670 for layer in self
3671 .syntax
3672 .layers_for_range(range.clone(), &self.text, true)
3673 {
3674 let mut cursor = layer.node().walk();
3675
3676 // Find the node that contains the range
3677 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3678 continue;
3679 }
3680
3681 // Look for the previous sibling, moving up ancestor levels if needed
3682 loop {
3683 if cursor.goto_previous_sibling() {
3684 let layer_result = cursor.node();
3685
3686 if let Some(previous_result) = &result {
3687 if previous_result.byte_range().end < layer_result.byte_range().end {
3688 continue;
3689 }
3690 }
3691 result = Some(layer_result);
3692 break;
3693 }
3694
3695 // No sibling found at this level, try moving up to parent
3696 if !cursor.goto_parent() {
3697 break;
3698 }
3699 }
3700 }
3701
3702 result
3703 }
3704
3705 /// Find the next sibling syntax node at the given range.
3706 ///
3707 /// This function locates the syntax node that follows the node containing
3708 /// the given range. It searches hierarchically by:
3709 /// 1. Finding the node that contains the given range
3710 /// 2. Looking for the next sibling at the same tree level
3711 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3712 ///
3713 /// Returns `None` if there is no next sibling at any ancestor level.
3714 pub fn syntax_next_sibling<'a, T: ToOffset>(
3715 &'a self,
3716 range: Range<T>,
3717 ) -> Option<tree_sitter::Node<'a>> {
3718 let range = range.start.to_offset(self)..range.end.to_offset(self);
3719 let mut result: Option<tree_sitter::Node<'a>> = None;
3720
3721 for layer in self
3722 .syntax
3723 .layers_for_range(range.clone(), &self.text, true)
3724 {
3725 let mut cursor = layer.node().walk();
3726
3727 // Find the node that contains the range
3728 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3729 continue;
3730 }
3731
3732 // Look for the next sibling, moving up ancestor levels if needed
3733 loop {
3734 if cursor.goto_next_sibling() {
3735 let layer_result = cursor.node();
3736
3737 if let Some(previous_result) = &result {
3738 if previous_result.byte_range().start > layer_result.byte_range().start {
3739 continue;
3740 }
3741 }
3742 result = Some(layer_result);
3743 break;
3744 }
3745
3746 // No sibling found at this level, try moving up to parent
3747 if !cursor.goto_parent() {
3748 break;
3749 }
3750 }
3751 }
3752
3753 result
3754 }
3755
3756 /// Returns the root syntax node within the given row
3757 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3758 let start_offset = position.to_offset(self);
3759
3760 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3761
3762 let layer = self
3763 .syntax
3764 .layers_for_range(start_offset..start_offset, &self.text, true)
3765 .next()?;
3766
3767 let mut cursor = layer.node().walk();
3768
3769 // Descend to the first leaf that touches the start of the range.
3770 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3771 if cursor.node().end_byte() == start_offset {
3772 cursor.goto_next_sibling();
3773 }
3774 }
3775
3776 // Ascend to the root node within the same row.
3777 while cursor.goto_parent() {
3778 if cursor.node().start_position().row != row {
3779 break;
3780 }
3781 }
3782
3783 Some(cursor.node())
3784 }
3785
3786 /// Returns the outline for the buffer.
3787 ///
3788 /// This method allows passing an optional [`SyntaxTheme`] to
3789 /// syntax-highlight the returned symbols.
3790 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3791 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3792 }
3793
3794 /// Returns all the symbols that contain the given position.
3795 ///
3796 /// This method allows passing an optional [`SyntaxTheme`] to
3797 /// syntax-highlight the returned symbols.
3798 pub fn symbols_containing<T: ToOffset>(
3799 &self,
3800 position: T,
3801 theme: Option<&SyntaxTheme>,
3802 ) -> Vec<OutlineItem<Anchor>> {
3803 let position = position.to_offset(self);
3804 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3805 let end = self.clip_offset(position + 1, Bias::Right);
3806 let mut items = self.outline_items_containing(start..end, false, theme);
3807 let mut prev_depth = None;
3808 items.retain(|item| {
3809 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3810 prev_depth = Some(item.depth);
3811 result
3812 });
3813 items
3814 }
3815
3816 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3817 let range = range.to_offset(self);
3818 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3819 grammar.outline_config.as_ref().map(|c| &c.query)
3820 });
3821 let configs = matches
3822 .grammars()
3823 .iter()
3824 .map(|g| g.outline_config.as_ref().unwrap())
3825 .collect::<Vec<_>>();
3826
3827 while let Some(mat) = matches.peek() {
3828 let config = &configs[mat.grammar_index];
3829 let containing_item_node = maybe!({
3830 let item_node = mat.captures.iter().find_map(|cap| {
3831 if cap.index == config.item_capture_ix {
3832 Some(cap.node)
3833 } else {
3834 None
3835 }
3836 })?;
3837
3838 let item_byte_range = item_node.byte_range();
3839 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3840 None
3841 } else {
3842 Some(item_node)
3843 }
3844 });
3845
3846 if let Some(item_node) = containing_item_node {
3847 return Some(
3848 Point::from_ts_point(item_node.start_position())
3849 ..Point::from_ts_point(item_node.end_position()),
3850 );
3851 }
3852
3853 matches.advance();
3854 }
3855 None
3856 }
3857
3858 pub fn outline_items_containing<T: ToOffset>(
3859 &self,
3860 range: Range<T>,
3861 include_extra_context: bool,
3862 theme: Option<&SyntaxTheme>,
3863 ) -> Vec<OutlineItem<Anchor>> {
3864 self.outline_items_containing_internal(
3865 range,
3866 include_extra_context,
3867 theme,
3868 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3869 )
3870 }
3871
3872 pub fn outline_items_as_points_containing<T: ToOffset>(
3873 &self,
3874 range: Range<T>,
3875 include_extra_context: bool,
3876 theme: Option<&SyntaxTheme>,
3877 ) -> Vec<OutlineItem<Point>> {
3878 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3879 range
3880 })
3881 }
3882
3883 fn outline_items_containing_internal<T: ToOffset, U>(
3884 &self,
3885 range: Range<T>,
3886 include_extra_context: bool,
3887 theme: Option<&SyntaxTheme>,
3888 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3889 ) -> Vec<OutlineItem<U>> {
3890 let range = range.to_offset(self);
3891 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3892 grammar.outline_config.as_ref().map(|c| &c.query)
3893 });
3894
3895 let mut items = Vec::new();
3896 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3897 while let Some(mat) = matches.peek() {
3898 let config = matches.grammars()[mat.grammar_index]
3899 .outline_config
3900 .as_ref()
3901 .unwrap();
3902 if let Some(item) =
3903 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3904 {
3905 items.push(item);
3906 } else if let Some(capture) = mat
3907 .captures
3908 .iter()
3909 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3910 {
3911 let capture_range = capture.node.start_position()..capture.node.end_position();
3912 let mut capture_row_range =
3913 capture_range.start.row as u32..capture_range.end.row as u32;
3914 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3915 {
3916 capture_row_range.end -= 1;
3917 }
3918 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3919 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3920 last_row_range.end = capture_row_range.end;
3921 } else {
3922 annotation_row_ranges.push(capture_row_range);
3923 }
3924 } else {
3925 annotation_row_ranges.push(capture_row_range);
3926 }
3927 }
3928 matches.advance();
3929 }
3930
3931 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3932
3933 // Assign depths based on containment relationships and convert to anchors.
3934 let mut item_ends_stack = Vec::<Point>::new();
3935 let mut anchor_items = Vec::new();
3936 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3937 for item in items {
3938 while let Some(last_end) = item_ends_stack.last().copied() {
3939 if last_end < item.range.end {
3940 item_ends_stack.pop();
3941 } else {
3942 break;
3943 }
3944 }
3945
3946 let mut annotation_row_range = None;
3947 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3948 let row_preceding_item = item.range.start.row.saturating_sub(1);
3949 if next_annotation_row_range.end < row_preceding_item {
3950 annotation_row_ranges.next();
3951 } else {
3952 if next_annotation_row_range.end == row_preceding_item {
3953 annotation_row_range = Some(next_annotation_row_range.clone());
3954 annotation_row_ranges.next();
3955 }
3956 break;
3957 }
3958 }
3959
3960 anchor_items.push(OutlineItem {
3961 depth: item_ends_stack.len(),
3962 range: range_callback(self, item.range.clone()),
3963 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3964 text: item.text,
3965 highlight_ranges: item.highlight_ranges,
3966 name_ranges: item.name_ranges,
3967 body_range: item.body_range.map(|r| range_callback(self, r)),
3968 annotation_range: annotation_row_range.map(|annotation_range| {
3969 let point_range = Point::new(annotation_range.start, 0)
3970 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3971 range_callback(self, point_range)
3972 }),
3973 });
3974 item_ends_stack.push(item.range.end);
3975 }
3976
3977 anchor_items
3978 }
3979
3980 fn next_outline_item(
3981 &self,
3982 config: &OutlineConfig,
3983 mat: &SyntaxMapMatch,
3984 range: &Range<usize>,
3985 include_extra_context: bool,
3986 theme: Option<&SyntaxTheme>,
3987 ) -> Option<OutlineItem<Point>> {
3988 let item_node = mat.captures.iter().find_map(|cap| {
3989 if cap.index == config.item_capture_ix {
3990 Some(cap.node)
3991 } else {
3992 None
3993 }
3994 })?;
3995
3996 let item_byte_range = item_node.byte_range();
3997 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3998 return None;
3999 }
4000 let item_point_range = Point::from_ts_point(item_node.start_position())
4001 ..Point::from_ts_point(item_node.end_position());
4002
4003 let mut open_point = None;
4004 let mut close_point = None;
4005
4006 let mut buffer_ranges = Vec::new();
4007 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4008 let mut range = node.start_byte()..node.end_byte();
4009 let start = node.start_position();
4010 if node.end_position().row > start.row {
4011 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4012 }
4013
4014 if !range.is_empty() {
4015 buffer_ranges.push((range, node_is_name));
4016 }
4017 };
4018
4019 for capture in mat.captures {
4020 if capture.index == config.name_capture_ix {
4021 add_to_buffer_ranges(capture.node, true);
4022 } else if Some(capture.index) == config.context_capture_ix
4023 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4024 {
4025 add_to_buffer_ranges(capture.node, false);
4026 } else {
4027 if Some(capture.index) == config.open_capture_ix {
4028 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4029 } else if Some(capture.index) == config.close_capture_ix {
4030 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4031 }
4032 }
4033 }
4034
4035 if buffer_ranges.is_empty() {
4036 return None;
4037 }
4038 let source_range_for_text =
4039 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4040
4041 let mut text = String::new();
4042 let mut highlight_ranges = Vec::new();
4043 let mut name_ranges = Vec::new();
4044 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4045 let mut last_buffer_range_end = 0;
4046 for (buffer_range, is_name) in buffer_ranges {
4047 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4048 if space_added {
4049 text.push(' ');
4050 }
4051 let before_append_len = text.len();
4052 let mut offset = buffer_range.start;
4053 chunks.seek(buffer_range.clone());
4054 for mut chunk in chunks.by_ref() {
4055 if chunk.text.len() > buffer_range.end - offset {
4056 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4057 offset = buffer_range.end;
4058 } else {
4059 offset += chunk.text.len();
4060 }
4061 let style = chunk
4062 .syntax_highlight_id
4063 .zip(theme)
4064 .and_then(|(highlight, theme)| highlight.style(theme));
4065 if let Some(style) = style {
4066 let start = text.len();
4067 let end = start + chunk.text.len();
4068 highlight_ranges.push((start..end, style));
4069 }
4070 text.push_str(chunk.text);
4071 if offset >= buffer_range.end {
4072 break;
4073 }
4074 }
4075 if is_name {
4076 let after_append_len = text.len();
4077 let start = if space_added && !name_ranges.is_empty() {
4078 before_append_len - 1
4079 } else {
4080 before_append_len
4081 };
4082 name_ranges.push(start..after_append_len);
4083 }
4084 last_buffer_range_end = buffer_range.end;
4085 }
4086
4087 Some(OutlineItem {
4088 depth: 0, // We'll calculate the depth later
4089 range: item_point_range,
4090 source_range_for_text: source_range_for_text.to_point(self),
4091 text,
4092 highlight_ranges,
4093 name_ranges,
4094 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4095 annotation_range: None,
4096 })
4097 }
4098
4099 pub fn function_body_fold_ranges<T: ToOffset>(
4100 &self,
4101 within: Range<T>,
4102 ) -> impl Iterator<Item = Range<usize>> + '_ {
4103 self.text_object_ranges(within, TreeSitterOptions::default())
4104 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4105 }
4106
4107 /// For each grammar in the language, runs the provided
4108 /// [`tree_sitter::Query`] against the given range.
4109 pub fn matches(
4110 &self,
4111 range: Range<usize>,
4112 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4113 ) -> SyntaxMapMatches<'_> {
4114 self.syntax.matches(range, self, query)
4115 }
4116
4117 pub fn all_bracket_ranges(
4118 &self,
4119 range: Range<usize>,
4120 ) -> impl Iterator<Item = BracketMatch> + '_ {
4121 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4122 grammar.brackets_config.as_ref().map(|c| &c.query)
4123 });
4124 let configs = matches
4125 .grammars()
4126 .iter()
4127 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4128 .collect::<Vec<_>>();
4129
4130 iter::from_fn(move || {
4131 while let Some(mat) = matches.peek() {
4132 let mut open = None;
4133 let mut close = None;
4134 let config = &configs[mat.grammar_index];
4135 let pattern = &config.patterns[mat.pattern_index];
4136 for capture in mat.captures {
4137 if capture.index == config.open_capture_ix {
4138 open = Some(capture.node.byte_range());
4139 } else if capture.index == config.close_capture_ix {
4140 close = Some(capture.node.byte_range());
4141 }
4142 }
4143
4144 matches.advance();
4145
4146 let Some((open_range, close_range)) = open.zip(close) else {
4147 continue;
4148 };
4149
4150 let bracket_range = open_range.start..=close_range.end;
4151 if !bracket_range.overlaps(&range) {
4152 continue;
4153 }
4154
4155 return Some(BracketMatch {
4156 open_range,
4157 close_range,
4158 newline_only: pattern.newline_only,
4159 });
4160 }
4161 None
4162 })
4163 }
4164
4165 /// Returns bracket range pairs overlapping or adjacent to `range`
4166 pub fn bracket_ranges<T: ToOffset>(
4167 &self,
4168 range: Range<T>,
4169 ) -> impl Iterator<Item = BracketMatch> + '_ {
4170 // Find bracket pairs that *inclusively* contain the given range.
4171 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4172 self.all_bracket_ranges(range)
4173 .filter(|pair| !pair.newline_only)
4174 }
4175
4176 pub fn debug_variables_query<T: ToOffset>(
4177 &self,
4178 range: Range<T>,
4179 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4180 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4181
4182 let mut matches = self.syntax.matches_with_options(
4183 range.clone(),
4184 &self.text,
4185 TreeSitterOptions::default(),
4186 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4187 );
4188
4189 let configs = matches
4190 .grammars()
4191 .iter()
4192 .map(|grammar| grammar.debug_variables_config.as_ref())
4193 .collect::<Vec<_>>();
4194
4195 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4196
4197 iter::from_fn(move || {
4198 loop {
4199 while let Some(capture) = captures.pop() {
4200 if capture.0.overlaps(&range) {
4201 return Some(capture);
4202 }
4203 }
4204
4205 let mat = matches.peek()?;
4206
4207 let Some(config) = configs[mat.grammar_index].as_ref() else {
4208 matches.advance();
4209 continue;
4210 };
4211
4212 for capture in mat.captures {
4213 let Some(ix) = config
4214 .objects_by_capture_ix
4215 .binary_search_by_key(&capture.index, |e| e.0)
4216 .ok()
4217 else {
4218 continue;
4219 };
4220 let text_object = config.objects_by_capture_ix[ix].1;
4221 let byte_range = capture.node.byte_range();
4222
4223 let mut found = false;
4224 for (range, existing) in captures.iter_mut() {
4225 if existing == &text_object {
4226 range.start = range.start.min(byte_range.start);
4227 range.end = range.end.max(byte_range.end);
4228 found = true;
4229 break;
4230 }
4231 }
4232
4233 if !found {
4234 captures.push((byte_range, text_object));
4235 }
4236 }
4237
4238 matches.advance();
4239 }
4240 })
4241 }
4242
4243 pub fn text_object_ranges<T: ToOffset>(
4244 &self,
4245 range: Range<T>,
4246 options: TreeSitterOptions,
4247 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4248 let range =
4249 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4250
4251 let mut matches =
4252 self.syntax
4253 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4254 grammar.text_object_config.as_ref().map(|c| &c.query)
4255 });
4256
4257 let configs = matches
4258 .grammars()
4259 .iter()
4260 .map(|grammar| grammar.text_object_config.as_ref())
4261 .collect::<Vec<_>>();
4262
4263 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4264
4265 iter::from_fn(move || {
4266 loop {
4267 while let Some(capture) = captures.pop() {
4268 if capture.0.overlaps(&range) {
4269 return Some(capture);
4270 }
4271 }
4272
4273 let mat = matches.peek()?;
4274
4275 let Some(config) = configs[mat.grammar_index].as_ref() else {
4276 matches.advance();
4277 continue;
4278 };
4279
4280 for capture in mat.captures {
4281 let Some(ix) = config
4282 .text_objects_by_capture_ix
4283 .binary_search_by_key(&capture.index, |e| e.0)
4284 .ok()
4285 else {
4286 continue;
4287 };
4288 let text_object = config.text_objects_by_capture_ix[ix].1;
4289 let byte_range = capture.node.byte_range();
4290
4291 let mut found = false;
4292 for (range, existing) in captures.iter_mut() {
4293 if existing == &text_object {
4294 range.start = range.start.min(byte_range.start);
4295 range.end = range.end.max(byte_range.end);
4296 found = true;
4297 break;
4298 }
4299 }
4300
4301 if !found {
4302 captures.push((byte_range, text_object));
4303 }
4304 }
4305
4306 matches.advance();
4307 }
4308 })
4309 }
4310
4311 /// Returns enclosing bracket ranges containing the given range
4312 pub fn enclosing_bracket_ranges<T: ToOffset>(
4313 &self,
4314 range: Range<T>,
4315 ) -> impl Iterator<Item = BracketMatch> + '_ {
4316 let range = range.start.to_offset(self)..range.end.to_offset(self);
4317
4318 self.bracket_ranges(range.clone()).filter(move |pair| {
4319 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4320 })
4321 }
4322
4323 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4324 ///
4325 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4326 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4327 &self,
4328 range: Range<T>,
4329 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4330 ) -> Option<(Range<usize>, Range<usize>)> {
4331 let range = range.start.to_offset(self)..range.end.to_offset(self);
4332
4333 // Get the ranges of the innermost pair of brackets.
4334 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4335
4336 for pair in self.enclosing_bracket_ranges(range) {
4337 if let Some(range_filter) = range_filter
4338 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4339 {
4340 continue;
4341 }
4342
4343 let len = pair.close_range.end - pair.open_range.start;
4344
4345 if let Some((existing_open, existing_close)) = &result {
4346 let existing_len = existing_close.end - existing_open.start;
4347 if len > existing_len {
4348 continue;
4349 }
4350 }
4351
4352 result = Some((pair.open_range, pair.close_range));
4353 }
4354
4355 result
4356 }
4357
4358 /// Returns anchor ranges for any matches of the redaction query.
4359 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4360 /// will be run on the relevant section of the buffer.
4361 pub fn redacted_ranges<T: ToOffset>(
4362 &self,
4363 range: Range<T>,
4364 ) -> impl Iterator<Item = Range<usize>> + '_ {
4365 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4366 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4367 grammar
4368 .redactions_config
4369 .as_ref()
4370 .map(|config| &config.query)
4371 });
4372
4373 let configs = syntax_matches
4374 .grammars()
4375 .iter()
4376 .map(|grammar| grammar.redactions_config.as_ref())
4377 .collect::<Vec<_>>();
4378
4379 iter::from_fn(move || {
4380 let redacted_range = syntax_matches
4381 .peek()
4382 .and_then(|mat| {
4383 configs[mat.grammar_index].and_then(|config| {
4384 mat.captures
4385 .iter()
4386 .find(|capture| capture.index == config.redaction_capture_ix)
4387 })
4388 })
4389 .map(|mat| mat.node.byte_range());
4390 syntax_matches.advance();
4391 redacted_range
4392 })
4393 }
4394
4395 pub fn injections_intersecting_range<T: ToOffset>(
4396 &self,
4397 range: Range<T>,
4398 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4399 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4400
4401 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4402 grammar
4403 .injection_config
4404 .as_ref()
4405 .map(|config| &config.query)
4406 });
4407
4408 let configs = syntax_matches
4409 .grammars()
4410 .iter()
4411 .map(|grammar| grammar.injection_config.as_ref())
4412 .collect::<Vec<_>>();
4413
4414 iter::from_fn(move || {
4415 let ranges = syntax_matches.peek().and_then(|mat| {
4416 let config = &configs[mat.grammar_index]?;
4417 let content_capture_range = mat.captures.iter().find_map(|capture| {
4418 if capture.index == config.content_capture_ix {
4419 Some(capture.node.byte_range())
4420 } else {
4421 None
4422 }
4423 })?;
4424 let language = self.language_at(content_capture_range.start)?;
4425 Some((content_capture_range, language))
4426 });
4427 syntax_matches.advance();
4428 ranges
4429 })
4430 }
4431
4432 pub fn runnable_ranges(
4433 &self,
4434 offset_range: Range<usize>,
4435 ) -> impl Iterator<Item = RunnableRange> + '_ {
4436 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4437 grammar.runnable_config.as_ref().map(|config| &config.query)
4438 });
4439
4440 let test_configs = syntax_matches
4441 .grammars()
4442 .iter()
4443 .map(|grammar| grammar.runnable_config.as_ref())
4444 .collect::<Vec<_>>();
4445
4446 iter::from_fn(move || {
4447 loop {
4448 let mat = syntax_matches.peek()?;
4449
4450 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4451 let mut run_range = None;
4452 let full_range = mat.captures.iter().fold(
4453 Range {
4454 start: usize::MAX,
4455 end: 0,
4456 },
4457 |mut acc, next| {
4458 let byte_range = next.node.byte_range();
4459 if acc.start > byte_range.start {
4460 acc.start = byte_range.start;
4461 }
4462 if acc.end < byte_range.end {
4463 acc.end = byte_range.end;
4464 }
4465 acc
4466 },
4467 );
4468 if full_range.start > full_range.end {
4469 // We did not find a full spanning range of this match.
4470 return None;
4471 }
4472 let extra_captures: SmallVec<[_; 1]> =
4473 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4474 test_configs
4475 .extra_captures
4476 .get(capture.index as usize)
4477 .cloned()
4478 .and_then(|tag_name| match tag_name {
4479 RunnableCapture::Named(name) => {
4480 Some((capture.node.byte_range(), name))
4481 }
4482 RunnableCapture::Run => {
4483 let _ = run_range.insert(capture.node.byte_range());
4484 None
4485 }
4486 })
4487 }));
4488 let run_range = run_range?;
4489 let tags = test_configs
4490 .query
4491 .property_settings(mat.pattern_index)
4492 .iter()
4493 .filter_map(|property| {
4494 if *property.key == *"tag" {
4495 property
4496 .value
4497 .as_ref()
4498 .map(|value| RunnableTag(value.to_string().into()))
4499 } else {
4500 None
4501 }
4502 })
4503 .collect();
4504 let extra_captures = extra_captures
4505 .into_iter()
4506 .map(|(range, name)| {
4507 (
4508 name.to_string(),
4509 self.text_for_range(range).collect::<String>(),
4510 )
4511 })
4512 .collect();
4513 // All tags should have the same range.
4514 Some(RunnableRange {
4515 run_range,
4516 full_range,
4517 runnable: Runnable {
4518 tags,
4519 language: mat.language,
4520 buffer: self.remote_id(),
4521 },
4522 extra_captures,
4523 buffer_id: self.remote_id(),
4524 })
4525 });
4526
4527 syntax_matches.advance();
4528 if test_range.is_some() {
4529 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4530 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4531 return test_range;
4532 }
4533 }
4534 })
4535 }
4536
4537 /// Returns selections for remote peers intersecting the given range.
4538 #[allow(clippy::type_complexity)]
4539 pub fn selections_in_range(
4540 &self,
4541 range: Range<Anchor>,
4542 include_local: bool,
4543 ) -> impl Iterator<
4544 Item = (
4545 ReplicaId,
4546 bool,
4547 CursorShape,
4548 impl Iterator<Item = &Selection<Anchor>> + '_,
4549 ),
4550 > + '_ {
4551 self.remote_selections
4552 .iter()
4553 .filter(move |(replica_id, set)| {
4554 (include_local || **replica_id != self.text.replica_id())
4555 && !set.selections.is_empty()
4556 })
4557 .map(move |(replica_id, set)| {
4558 let start_ix = match set.selections.binary_search_by(|probe| {
4559 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4560 }) {
4561 Ok(ix) | Err(ix) => ix,
4562 };
4563 let end_ix = match set.selections.binary_search_by(|probe| {
4564 probe.start.cmp(&range.end, self).then(Ordering::Less)
4565 }) {
4566 Ok(ix) | Err(ix) => ix,
4567 };
4568
4569 (
4570 *replica_id,
4571 set.line_mode,
4572 set.cursor_shape,
4573 set.selections[start_ix..end_ix].iter(),
4574 )
4575 })
4576 }
4577
4578 /// Returns if the buffer contains any diagnostics.
4579 pub fn has_diagnostics(&self) -> bool {
4580 !self.diagnostics.is_empty()
4581 }
4582
4583 /// Returns all the diagnostics intersecting the given range.
4584 pub fn diagnostics_in_range<'a, T, O>(
4585 &'a self,
4586 search_range: Range<T>,
4587 reversed: bool,
4588 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4589 where
4590 T: 'a + Clone + ToOffset,
4591 O: 'a + FromAnchor,
4592 {
4593 let mut iterators: Vec<_> = self
4594 .diagnostics
4595 .iter()
4596 .map(|(_, collection)| {
4597 collection
4598 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4599 .peekable()
4600 })
4601 .collect();
4602
4603 std::iter::from_fn(move || {
4604 let (next_ix, _) = iterators
4605 .iter_mut()
4606 .enumerate()
4607 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4608 .min_by(|(_, a), (_, b)| {
4609 let cmp = a
4610 .range
4611 .start
4612 .cmp(&b.range.start, self)
4613 // when range is equal, sort by diagnostic severity
4614 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4615 // and stabilize order with group_id
4616 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4617 if reversed { cmp.reverse() } else { cmp }
4618 })?;
4619 iterators[next_ix]
4620 .next()
4621 .map(
4622 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4623 diagnostic,
4624 range: FromAnchor::from_anchor(&range.start, self)
4625 ..FromAnchor::from_anchor(&range.end, self),
4626 },
4627 )
4628 })
4629 }
4630
4631 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4632 /// should be used instead.
4633 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4634 &self.diagnostics
4635 }
4636
4637 /// Returns all the diagnostic groups associated with the given
4638 /// language server ID. If no language server ID is provided,
4639 /// all diagnostics groups are returned.
4640 pub fn diagnostic_groups(
4641 &self,
4642 language_server_id: Option<LanguageServerId>,
4643 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4644 let mut groups = Vec::new();
4645
4646 if let Some(language_server_id) = language_server_id {
4647 if let Ok(ix) = self
4648 .diagnostics
4649 .binary_search_by_key(&language_server_id, |e| e.0)
4650 {
4651 self.diagnostics[ix]
4652 .1
4653 .groups(language_server_id, &mut groups, self);
4654 }
4655 } else {
4656 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4657 diagnostics.groups(*language_server_id, &mut groups, self);
4658 }
4659 }
4660
4661 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4662 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4663 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4664 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4665 });
4666
4667 groups
4668 }
4669
4670 /// Returns an iterator over the diagnostics for the given group.
4671 pub fn diagnostic_group<O>(
4672 &self,
4673 group_id: usize,
4674 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4675 where
4676 O: FromAnchor + 'static,
4677 {
4678 self.diagnostics
4679 .iter()
4680 .flat_map(move |(_, set)| set.group(group_id, self))
4681 }
4682
4683 /// An integer version number that accounts for all updates besides
4684 /// the buffer's text itself (which is versioned via a version vector).
4685 pub fn non_text_state_update_count(&self) -> usize {
4686 self.non_text_state_update_count
4687 }
4688
4689 /// An integer version that changes when the buffer's syntax changes.
4690 pub fn syntax_update_count(&self) -> usize {
4691 self.syntax.update_count()
4692 }
4693
4694 /// Returns a snapshot of underlying file.
4695 pub fn file(&self) -> Option<&Arc<dyn File>> {
4696 self.file.as_ref()
4697 }
4698
4699 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4700 if let Some(file) = self.file() {
4701 if file.path().file_name().is_none() || include_root {
4702 Some(file.full_path(cx).to_string_lossy().into_owned())
4703 } else {
4704 Some(file.path().display(file.path_style(cx)).to_string())
4705 }
4706 } else {
4707 None
4708 }
4709 }
4710
4711 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4712 let query_str = query.fuzzy_contents;
4713 if query_str.is_some_and(|query| query.is_empty()) {
4714 return BTreeMap::default();
4715 }
4716
4717 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4718 language,
4719 override_id: None,
4720 }));
4721
4722 let mut query_ix = 0;
4723 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4724 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4725
4726 let mut words = BTreeMap::default();
4727 let mut current_word_start_ix = None;
4728 let mut chunk_ix = query.range.start;
4729 for chunk in self.chunks(query.range, false) {
4730 for (i, c) in chunk.text.char_indices() {
4731 let ix = chunk_ix + i;
4732 if classifier.is_word(c) {
4733 if current_word_start_ix.is_none() {
4734 current_word_start_ix = Some(ix);
4735 }
4736
4737 if let Some(query_chars) = &query_chars
4738 && query_ix < query_len
4739 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4740 {
4741 query_ix += 1;
4742 }
4743 continue;
4744 } else if let Some(word_start) = current_word_start_ix.take()
4745 && query_ix == query_len
4746 {
4747 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4748 let mut word_text = self.text_for_range(word_start..ix).peekable();
4749 let first_char = word_text
4750 .peek()
4751 .and_then(|first_chunk| first_chunk.chars().next());
4752 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4753 if !query.skip_digits
4754 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4755 {
4756 words.insert(word_text.collect(), word_range);
4757 }
4758 }
4759 query_ix = 0;
4760 }
4761 chunk_ix += chunk.text.len();
4762 }
4763
4764 words
4765 }
4766}
4767
4768pub struct WordsQuery<'a> {
4769 /// Only returns words with all chars from the fuzzy string in them.
4770 pub fuzzy_contents: Option<&'a str>,
4771 /// Skips words that start with a digit.
4772 pub skip_digits: bool,
4773 /// Buffer offset range, to look for words.
4774 pub range: Range<usize>,
4775}
4776
4777fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4778 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4779}
4780
4781fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4782 let mut result = IndentSize::spaces(0);
4783 for c in text {
4784 let kind = match c {
4785 ' ' => IndentKind::Space,
4786 '\t' => IndentKind::Tab,
4787 _ => break,
4788 };
4789 if result.len == 0 {
4790 result.kind = kind;
4791 }
4792 result.len += 1;
4793 }
4794 result
4795}
4796
4797impl Clone for BufferSnapshot {
4798 fn clone(&self) -> Self {
4799 Self {
4800 text: self.text.clone(),
4801 syntax: self.syntax.clone(),
4802 file: self.file.clone(),
4803 remote_selections: self.remote_selections.clone(),
4804 diagnostics: self.diagnostics.clone(),
4805 language: self.language.clone(),
4806 non_text_state_update_count: self.non_text_state_update_count,
4807 }
4808 }
4809}
4810
4811impl Deref for BufferSnapshot {
4812 type Target = text::BufferSnapshot;
4813
4814 fn deref(&self) -> &Self::Target {
4815 &self.text
4816 }
4817}
4818
4819unsafe impl Send for BufferChunks<'_> {}
4820
4821impl<'a> BufferChunks<'a> {
4822 pub(crate) fn new(
4823 text: &'a Rope,
4824 range: Range<usize>,
4825 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4826 diagnostics: bool,
4827 buffer_snapshot: Option<&'a BufferSnapshot>,
4828 ) -> Self {
4829 let mut highlights = None;
4830 if let Some((captures, highlight_maps)) = syntax {
4831 highlights = Some(BufferChunkHighlights {
4832 captures,
4833 next_capture: None,
4834 stack: Default::default(),
4835 highlight_maps,
4836 })
4837 }
4838
4839 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4840 let chunks = text.chunks_in_range(range.clone());
4841
4842 let mut this = BufferChunks {
4843 range,
4844 buffer_snapshot,
4845 chunks,
4846 diagnostic_endpoints,
4847 error_depth: 0,
4848 warning_depth: 0,
4849 information_depth: 0,
4850 hint_depth: 0,
4851 unnecessary_depth: 0,
4852 underline: true,
4853 highlights,
4854 };
4855 this.initialize_diagnostic_endpoints();
4856 this
4857 }
4858
4859 /// Seeks to the given byte offset in the buffer.
4860 pub fn seek(&mut self, range: Range<usize>) {
4861 let old_range = std::mem::replace(&mut self.range, range.clone());
4862 self.chunks.set_range(self.range.clone());
4863 if let Some(highlights) = self.highlights.as_mut() {
4864 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4865 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4866 highlights
4867 .stack
4868 .retain(|(end_offset, _)| *end_offset > range.start);
4869 if let Some(capture) = &highlights.next_capture
4870 && range.start >= capture.node.start_byte()
4871 {
4872 let next_capture_end = capture.node.end_byte();
4873 if range.start < next_capture_end {
4874 highlights.stack.push((
4875 next_capture_end,
4876 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4877 ));
4878 }
4879 highlights.next_capture.take();
4880 }
4881 } else if let Some(snapshot) = self.buffer_snapshot {
4882 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4883 *highlights = BufferChunkHighlights {
4884 captures,
4885 next_capture: None,
4886 stack: Default::default(),
4887 highlight_maps,
4888 };
4889 } else {
4890 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4891 // Seeking such BufferChunks is not supported.
4892 debug_assert!(
4893 false,
4894 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4895 );
4896 }
4897
4898 highlights.captures.set_byte_range(self.range.clone());
4899 self.initialize_diagnostic_endpoints();
4900 }
4901 }
4902
4903 fn initialize_diagnostic_endpoints(&mut self) {
4904 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4905 && let Some(buffer) = self.buffer_snapshot
4906 {
4907 let mut diagnostic_endpoints = Vec::new();
4908 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4909 diagnostic_endpoints.push(DiagnosticEndpoint {
4910 offset: entry.range.start,
4911 is_start: true,
4912 severity: entry.diagnostic.severity,
4913 is_unnecessary: entry.diagnostic.is_unnecessary,
4914 underline: entry.diagnostic.underline,
4915 });
4916 diagnostic_endpoints.push(DiagnosticEndpoint {
4917 offset: entry.range.end,
4918 is_start: false,
4919 severity: entry.diagnostic.severity,
4920 is_unnecessary: entry.diagnostic.is_unnecessary,
4921 underline: entry.diagnostic.underline,
4922 });
4923 }
4924 diagnostic_endpoints
4925 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4926 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4927 self.hint_depth = 0;
4928 self.error_depth = 0;
4929 self.warning_depth = 0;
4930 self.information_depth = 0;
4931 }
4932 }
4933
4934 /// The current byte offset in the buffer.
4935 pub fn offset(&self) -> usize {
4936 self.range.start
4937 }
4938
4939 pub fn range(&self) -> Range<usize> {
4940 self.range.clone()
4941 }
4942
4943 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4944 let depth = match endpoint.severity {
4945 DiagnosticSeverity::ERROR => &mut self.error_depth,
4946 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4947 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4948 DiagnosticSeverity::HINT => &mut self.hint_depth,
4949 _ => return,
4950 };
4951 if endpoint.is_start {
4952 *depth += 1;
4953 } else {
4954 *depth -= 1;
4955 }
4956
4957 if endpoint.is_unnecessary {
4958 if endpoint.is_start {
4959 self.unnecessary_depth += 1;
4960 } else {
4961 self.unnecessary_depth -= 1;
4962 }
4963 }
4964 }
4965
4966 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4967 if self.error_depth > 0 {
4968 Some(DiagnosticSeverity::ERROR)
4969 } else if self.warning_depth > 0 {
4970 Some(DiagnosticSeverity::WARNING)
4971 } else if self.information_depth > 0 {
4972 Some(DiagnosticSeverity::INFORMATION)
4973 } else if self.hint_depth > 0 {
4974 Some(DiagnosticSeverity::HINT)
4975 } else {
4976 None
4977 }
4978 }
4979
4980 fn current_code_is_unnecessary(&self) -> bool {
4981 self.unnecessary_depth > 0
4982 }
4983}
4984
4985impl<'a> Iterator for BufferChunks<'a> {
4986 type Item = Chunk<'a>;
4987
4988 fn next(&mut self) -> Option<Self::Item> {
4989 let mut next_capture_start = usize::MAX;
4990 let mut next_diagnostic_endpoint = usize::MAX;
4991
4992 if let Some(highlights) = self.highlights.as_mut() {
4993 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4994 if *parent_capture_end <= self.range.start {
4995 highlights.stack.pop();
4996 } else {
4997 break;
4998 }
4999 }
5000
5001 if highlights.next_capture.is_none() {
5002 highlights.next_capture = highlights.captures.next();
5003 }
5004
5005 while let Some(capture) = highlights.next_capture.as_ref() {
5006 if self.range.start < capture.node.start_byte() {
5007 next_capture_start = capture.node.start_byte();
5008 break;
5009 } else {
5010 let highlight_id =
5011 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5012 highlights
5013 .stack
5014 .push((capture.node.end_byte(), highlight_id));
5015 highlights.next_capture = highlights.captures.next();
5016 }
5017 }
5018 }
5019
5020 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5021 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5022 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5023 if endpoint.offset <= self.range.start {
5024 self.update_diagnostic_depths(endpoint);
5025 diagnostic_endpoints.next();
5026 self.underline = endpoint.underline;
5027 } else {
5028 next_diagnostic_endpoint = endpoint.offset;
5029 break;
5030 }
5031 }
5032 }
5033 self.diagnostic_endpoints = diagnostic_endpoints;
5034
5035 if let Some(ChunkBitmaps {
5036 text: chunk,
5037 chars: chars_map,
5038 tabs,
5039 }) = self.chunks.peek_with_bitmaps()
5040 {
5041 let chunk_start = self.range.start;
5042 let mut chunk_end = (self.chunks.offset() + chunk.len())
5043 .min(next_capture_start)
5044 .min(next_diagnostic_endpoint);
5045 let mut highlight_id = None;
5046 if let Some(highlights) = self.highlights.as_ref()
5047 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5048 {
5049 chunk_end = chunk_end.min(*parent_capture_end);
5050 highlight_id = Some(*parent_highlight_id);
5051 }
5052 let bit_start = chunk_start - self.chunks.offset();
5053 let bit_end = chunk_end - self.chunks.offset();
5054
5055 let slice = &chunk[bit_start..bit_end];
5056
5057 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5058 let tabs = (tabs >> bit_start) & mask;
5059 let chars = (chars_map >> bit_start) & mask;
5060
5061 self.range.start = chunk_end;
5062 if self.range.start == self.chunks.offset() + chunk.len() {
5063 self.chunks.next().unwrap();
5064 }
5065
5066 Some(Chunk {
5067 text: slice,
5068 syntax_highlight_id: highlight_id,
5069 underline: self.underline,
5070 diagnostic_severity: self.current_diagnostic_severity(),
5071 is_unnecessary: self.current_code_is_unnecessary(),
5072 tabs,
5073 chars,
5074 ..Chunk::default()
5075 })
5076 } else {
5077 None
5078 }
5079 }
5080}
5081
5082impl operation_queue::Operation for Operation {
5083 fn lamport_timestamp(&self) -> clock::Lamport {
5084 match self {
5085 Operation::Buffer(_) => {
5086 unreachable!("buffer operations should never be deferred at this layer")
5087 }
5088 Operation::UpdateDiagnostics {
5089 lamport_timestamp, ..
5090 }
5091 | Operation::UpdateSelections {
5092 lamport_timestamp, ..
5093 }
5094 | Operation::UpdateCompletionTriggers {
5095 lamport_timestamp, ..
5096 }
5097 | Operation::UpdateLineEnding {
5098 lamport_timestamp, ..
5099 } => *lamport_timestamp,
5100 }
5101 }
5102}
5103
5104impl Default for Diagnostic {
5105 fn default() -> Self {
5106 Self {
5107 source: Default::default(),
5108 source_kind: DiagnosticSourceKind::Other,
5109 code: None,
5110 code_description: None,
5111 severity: DiagnosticSeverity::ERROR,
5112 message: Default::default(),
5113 markdown: None,
5114 group_id: 0,
5115 is_primary: false,
5116 is_disk_based: false,
5117 is_unnecessary: false,
5118 underline: true,
5119 data: None,
5120 }
5121 }
5122}
5123
5124impl IndentSize {
5125 /// Returns an [`IndentSize`] representing the given spaces.
5126 pub fn spaces(len: u32) -> Self {
5127 Self {
5128 len,
5129 kind: IndentKind::Space,
5130 }
5131 }
5132
5133 /// Returns an [`IndentSize`] representing a tab.
5134 pub fn tab() -> Self {
5135 Self {
5136 len: 1,
5137 kind: IndentKind::Tab,
5138 }
5139 }
5140
5141 /// An iterator over the characters represented by this [`IndentSize`].
5142 pub fn chars(&self) -> impl Iterator<Item = char> {
5143 iter::repeat(self.char()).take(self.len as usize)
5144 }
5145
5146 /// The character representation of this [`IndentSize`].
5147 pub fn char(&self) -> char {
5148 match self.kind {
5149 IndentKind::Space => ' ',
5150 IndentKind::Tab => '\t',
5151 }
5152 }
5153
5154 /// Consumes the current [`IndentSize`] and returns a new one that has
5155 /// been shrunk or enlarged by the given size along the given direction.
5156 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5157 match direction {
5158 Ordering::Less => {
5159 if self.kind == size.kind && self.len >= size.len {
5160 self.len -= size.len;
5161 }
5162 }
5163 Ordering::Equal => {}
5164 Ordering::Greater => {
5165 if self.len == 0 {
5166 self = size;
5167 } else if self.kind == size.kind {
5168 self.len += size.len;
5169 }
5170 }
5171 }
5172 self
5173 }
5174
5175 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5176 match self.kind {
5177 IndentKind::Space => self.len as usize,
5178 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5179 }
5180 }
5181}
5182
5183#[cfg(any(test, feature = "test-support"))]
5184pub struct TestFile {
5185 pub path: Arc<RelPath>,
5186 pub root_name: String,
5187 pub local_root: Option<PathBuf>,
5188}
5189
5190#[cfg(any(test, feature = "test-support"))]
5191impl File for TestFile {
5192 fn path(&self) -> &Arc<RelPath> {
5193 &self.path
5194 }
5195
5196 fn full_path(&self, _: &gpui::App) -> PathBuf {
5197 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5198 }
5199
5200 fn as_local(&self) -> Option<&dyn LocalFile> {
5201 if self.local_root.is_some() {
5202 Some(self)
5203 } else {
5204 None
5205 }
5206 }
5207
5208 fn disk_state(&self) -> DiskState {
5209 unimplemented!()
5210 }
5211
5212 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5213 self.path().file_name().unwrap_or(self.root_name.as_ref())
5214 }
5215
5216 fn worktree_id(&self, _: &App) -> WorktreeId {
5217 WorktreeId::from_usize(0)
5218 }
5219
5220 fn to_proto(&self, _: &App) -> rpc::proto::File {
5221 unimplemented!()
5222 }
5223
5224 fn is_private(&self) -> bool {
5225 false
5226 }
5227
5228 fn path_style(&self, _cx: &App) -> PathStyle {
5229 PathStyle::local()
5230 }
5231}
5232
5233#[cfg(any(test, feature = "test-support"))]
5234impl LocalFile for TestFile {
5235 fn abs_path(&self, _cx: &App) -> PathBuf {
5236 PathBuf::from(self.local_root.as_ref().unwrap())
5237 .join(&self.root_name)
5238 .join(self.path.as_std_path())
5239 }
5240
5241 fn load(&self, _cx: &App) -> Task<Result<String>> {
5242 unimplemented!()
5243 }
5244
5245 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5246 unimplemented!()
5247 }
5248}
5249
5250pub(crate) fn contiguous_ranges(
5251 values: impl Iterator<Item = u32>,
5252 max_len: usize,
5253) -> impl Iterator<Item = Range<u32>> {
5254 let mut values = values;
5255 let mut current_range: Option<Range<u32>> = None;
5256 std::iter::from_fn(move || {
5257 loop {
5258 if let Some(value) = values.next() {
5259 if let Some(range) = &mut current_range
5260 && value == range.end
5261 && range.len() < max_len
5262 {
5263 range.end += 1;
5264 continue;
5265 }
5266
5267 let prev_range = current_range.clone();
5268 current_range = Some(value..(value + 1));
5269 if prev_range.is_some() {
5270 return prev_range;
5271 }
5272 } else {
5273 return current_range.take();
5274 }
5275 }
5276 })
5277}
5278
5279#[derive(Default, Debug)]
5280pub struct CharClassifier {
5281 scope: Option<LanguageScope>,
5282 scope_context: Option<CharScopeContext>,
5283 ignore_punctuation: bool,
5284}
5285
5286impl CharClassifier {
5287 pub fn new(scope: Option<LanguageScope>) -> Self {
5288 Self {
5289 scope,
5290 scope_context: None,
5291 ignore_punctuation: false,
5292 }
5293 }
5294
5295 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5296 Self {
5297 scope_context,
5298 ..self
5299 }
5300 }
5301
5302 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5303 Self {
5304 ignore_punctuation,
5305 ..self
5306 }
5307 }
5308
5309 pub fn is_whitespace(&self, c: char) -> bool {
5310 self.kind(c) == CharKind::Whitespace
5311 }
5312
5313 pub fn is_word(&self, c: char) -> bool {
5314 self.kind(c) == CharKind::Word
5315 }
5316
5317 pub fn is_punctuation(&self, c: char) -> bool {
5318 self.kind(c) == CharKind::Punctuation
5319 }
5320
5321 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5322 if c.is_alphanumeric() || c == '_' {
5323 return CharKind::Word;
5324 }
5325
5326 if let Some(scope) = &self.scope {
5327 let characters = match self.scope_context {
5328 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5329 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5330 None => scope.word_characters(),
5331 };
5332 if let Some(characters) = characters
5333 && characters.contains(&c)
5334 {
5335 return CharKind::Word;
5336 }
5337 }
5338
5339 if c.is_whitespace() {
5340 return CharKind::Whitespace;
5341 }
5342
5343 if ignore_punctuation {
5344 CharKind::Word
5345 } else {
5346 CharKind::Punctuation
5347 }
5348 }
5349
5350 pub fn kind(&self, c: char) -> CharKind {
5351 self.kind_with(c, self.ignore_punctuation)
5352 }
5353}
5354
5355/// Find all of the ranges of whitespace that occur at the ends of lines
5356/// in the given rope.
5357///
5358/// This could also be done with a regex search, but this implementation
5359/// avoids copying text.
5360pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5361 let mut ranges = Vec::new();
5362
5363 let mut offset = 0;
5364 let mut prev_chunk_trailing_whitespace_range = 0..0;
5365 for chunk in rope.chunks() {
5366 let mut prev_line_trailing_whitespace_range = 0..0;
5367 for (i, line) in chunk.split('\n').enumerate() {
5368 let line_end_offset = offset + line.len();
5369 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5370 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5371
5372 if i == 0 && trimmed_line_len == 0 {
5373 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5374 }
5375 if !prev_line_trailing_whitespace_range.is_empty() {
5376 ranges.push(prev_line_trailing_whitespace_range);
5377 }
5378
5379 offset = line_end_offset + 1;
5380 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5381 }
5382
5383 offset -= 1;
5384 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5385 }
5386
5387 if !prev_chunk_trailing_whitespace_range.is_empty() {
5388 ranges.push(prev_chunk_trailing_whitespace_range);
5389 }
5390
5391 ranges
5392}