1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// Whether this chunk of text is marked as unnecessary.
510 pub is_unnecessary: bool,
511 /// Whether this chunk of text was originally a tab character.
512 pub is_tab: bool,
513 /// A bitset of which characters are tabs in this string.
514 pub tabs: u128,
515 /// Bitmap of character indices in this chunk
516 pub chars: u128,
517 /// Whether this chunk of text was originally a tab character.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .take_while(|(range, _)| range.start < newline_ix)
628 .filter_map(|(mut range, highlight)| {
629 range.start = range.start.saturating_sub(preview_start_ix);
630 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
631 if range.is_empty() {
632 None
633 } else {
634 Some((range, highlight))
635 }
636 });
637
638 let preview = Self {
639 text: SharedString::new(preview_text),
640 highlights: preview_highlights.collect(),
641 };
642
643 (preview, self.text.len() > newline_ix)
644 }
645}
646
647impl HighlightedTextBuilder {
648 pub fn build(self) -> HighlightedText {
649 HighlightedText {
650 text: self.text.into(),
651 highlights: self.highlights,
652 }
653 }
654
655 pub fn add_text_from_buffer_range<T: ToOffset>(
656 &mut self,
657 range: Range<T>,
658 snapshot: &text::BufferSnapshot,
659 syntax_snapshot: &SyntaxSnapshot,
660 override_style: Option<HighlightStyle>,
661 syntax_theme: &SyntaxTheme,
662 ) {
663 let range = range.to_offset(snapshot);
664 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
665 let start = self.text.len();
666 self.text.push_str(chunk.text);
667 let end = self.text.len();
668
669 if let Some(highlight_style) = chunk
670 .syntax_highlight_id
671 .and_then(|id| id.style(syntax_theme))
672 {
673 let highlight_style = override_style.map_or(highlight_style, |override_style| {
674 highlight_style.highlight(override_style)
675 });
676 self.highlights.push((start..end, highlight_style));
677 } else if let Some(override_style) = override_style {
678 self.highlights.push((start..end, override_style));
679 }
680 }
681 }
682
683 fn highlighted_chunks<'a>(
684 range: Range<usize>,
685 snapshot: &'a text::BufferSnapshot,
686 syntax_snapshot: &'a SyntaxSnapshot,
687 ) -> BufferChunks<'a> {
688 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
689 grammar
690 .highlights_config
691 .as_ref()
692 .map(|config| &config.query)
693 });
694
695 let highlight_maps = captures
696 .grammars()
697 .iter()
698 .map(|grammar| grammar.highlight_map())
699 .collect();
700
701 BufferChunks::new(
702 snapshot.as_rope(),
703 range,
704 Some((captures, highlight_maps)),
705 false,
706 None,
707 )
708 }
709}
710
711#[derive(Clone)]
712pub struct EditPreview {
713 old_snapshot: text::BufferSnapshot,
714 applied_edits_snapshot: text::BufferSnapshot,
715 syntax_snapshot: SyntaxSnapshot,
716}
717
718impl EditPreview {
719 pub fn highlight_edits(
720 &self,
721 current_snapshot: &BufferSnapshot,
722 edits: &[(Range<Anchor>, String)],
723 include_deletions: bool,
724 cx: &App,
725 ) -> HighlightedText {
726 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
727 return HighlightedText::default();
728 };
729
730 let mut highlighted_text = HighlightedTextBuilder::default();
731
732 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
733
734 let insertion_highlight_style = HighlightStyle {
735 background_color: Some(cx.theme().status().created_background),
736 ..Default::default()
737 };
738 let deletion_highlight_style = HighlightStyle {
739 background_color: Some(cx.theme().status().deleted_background),
740 ..Default::default()
741 };
742 let syntax_theme = cx.theme().syntax();
743
744 for (range, edit_text) in edits {
745 let edit_new_end_in_preview_snapshot = range
746 .end
747 .bias_right(&self.old_snapshot)
748 .to_offset(&self.applied_edits_snapshot);
749 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
750
751 let unchanged_range_in_preview_snapshot =
752 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
753 if !unchanged_range_in_preview_snapshot.is_empty() {
754 highlighted_text.add_text_from_buffer_range(
755 unchanged_range_in_preview_snapshot,
756 &self.applied_edits_snapshot,
757 &self.syntax_snapshot,
758 None,
759 syntax_theme,
760 );
761 }
762
763 let range_in_current_snapshot = range.to_offset(current_snapshot);
764 if include_deletions && !range_in_current_snapshot.is_empty() {
765 highlighted_text.add_text_from_buffer_range(
766 range_in_current_snapshot,
767 ¤t_snapshot.text,
768 ¤t_snapshot.syntax,
769 Some(deletion_highlight_style),
770 syntax_theme,
771 );
772 }
773
774 if !edit_text.is_empty() {
775 highlighted_text.add_text_from_buffer_range(
776 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
777 &self.applied_edits_snapshot,
778 &self.syntax_snapshot,
779 Some(insertion_highlight_style),
780 syntax_theme,
781 );
782 }
783
784 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
785 }
786
787 highlighted_text.add_text_from_buffer_range(
788 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
789 &self.applied_edits_snapshot,
790 &self.syntax_snapshot,
791 None,
792 syntax_theme,
793 );
794
795 highlighted_text.build()
796 }
797
798 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
799 let (first, _) = edits.first()?;
800 let (last, _) = edits.last()?;
801
802 let start = first
803 .start
804 .bias_left(&self.old_snapshot)
805 .to_point(&self.applied_edits_snapshot);
806 let end = last
807 .end
808 .bias_right(&self.old_snapshot)
809 .to_point(&self.applied_edits_snapshot);
810
811 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
812 let range = Point::new(start.row, 0)
813 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
814
815 Some(range.to_offset(&self.applied_edits_snapshot))
816 }
817}
818
819#[derive(Clone, Debug, PartialEq, Eq)]
820pub struct BracketMatch {
821 pub open_range: Range<usize>,
822 pub close_range: Range<usize>,
823 pub newline_only: bool,
824}
825
826impl Buffer {
827 /// Create a new buffer with the given base text.
828 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
829 Self::build(
830 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
831 None,
832 Capability::ReadWrite,
833 )
834 }
835
836 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
837 pub fn local_normalized(
838 base_text_normalized: Rope,
839 line_ending: LineEnding,
840 cx: &Context<Self>,
841 ) -> Self {
842 Self::build(
843 TextBuffer::new_normalized(
844 0,
845 cx.entity_id().as_non_zero_u64().into(),
846 line_ending,
847 base_text_normalized,
848 ),
849 None,
850 Capability::ReadWrite,
851 )
852 }
853
854 /// Create a new buffer that is a replica of a remote buffer.
855 pub fn remote(
856 remote_id: BufferId,
857 replica_id: ReplicaId,
858 capability: Capability,
859 base_text: impl Into<String>,
860 ) -> Self {
861 Self::build(
862 TextBuffer::new(replica_id, remote_id, base_text.into()),
863 None,
864 capability,
865 )
866 }
867
868 /// Create a new buffer that is a replica of a remote buffer, populating its
869 /// state from the given protobuf message.
870 pub fn from_proto(
871 replica_id: ReplicaId,
872 capability: Capability,
873 message: proto::BufferState,
874 file: Option<Arc<dyn File>>,
875 ) -> Result<Self> {
876 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
877 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
878 let mut this = Self::build(buffer, file, capability);
879 this.text.set_line_ending(proto::deserialize_line_ending(
880 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
881 ));
882 this.saved_version = proto::deserialize_version(&message.saved_version);
883 this.saved_mtime = message.saved_mtime.map(|time| time.into());
884 Ok(this)
885 }
886
887 /// Serialize the buffer's state to a protobuf message.
888 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
889 proto::BufferState {
890 id: self.remote_id().into(),
891 file: self.file.as_ref().map(|f| f.to_proto(cx)),
892 base_text: self.base_text().to_string(),
893 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
894 saved_version: proto::serialize_version(&self.saved_version),
895 saved_mtime: self.saved_mtime.map(|time| time.into()),
896 }
897 }
898
899 /// Serialize as protobufs all of the changes to the buffer since the given version.
900 pub fn serialize_ops(
901 &self,
902 since: Option<clock::Global>,
903 cx: &App,
904 ) -> Task<Vec<proto::Operation>> {
905 let mut operations = Vec::new();
906 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
907
908 operations.extend(self.remote_selections.iter().map(|(_, set)| {
909 proto::serialize_operation(&Operation::UpdateSelections {
910 selections: set.selections.clone(),
911 lamport_timestamp: set.lamport_timestamp,
912 line_mode: set.line_mode,
913 cursor_shape: set.cursor_shape,
914 })
915 }));
916
917 for (server_id, diagnostics) in &self.diagnostics {
918 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
919 lamport_timestamp: self.diagnostics_timestamp,
920 server_id: *server_id,
921 diagnostics: diagnostics.iter().cloned().collect(),
922 }));
923 }
924
925 for (server_id, completions) in &self.completion_triggers_per_language_server {
926 operations.push(proto::serialize_operation(
927 &Operation::UpdateCompletionTriggers {
928 triggers: completions.iter().cloned().collect(),
929 lamport_timestamp: self.completion_triggers_timestamp,
930 server_id: *server_id,
931 },
932 ));
933 }
934
935 let text_operations = self.text.operations().clone();
936 cx.background_spawn(async move {
937 let since = since.unwrap_or_default();
938 operations.extend(
939 text_operations
940 .iter()
941 .filter(|(_, op)| !since.observed(op.timestamp()))
942 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
943 );
944 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
945 operations
946 })
947 }
948
949 /// Assign a language to the buffer, returning the buffer.
950 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
951 self.set_language(Some(language), cx);
952 self
953 }
954
955 /// Returns the [`Capability`] of this buffer.
956 pub fn capability(&self) -> Capability {
957 self.capability
958 }
959
960 /// Whether this buffer can only be read.
961 pub fn read_only(&self) -> bool {
962 self.capability == Capability::ReadOnly
963 }
964
965 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
966 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
967 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
968 let snapshot = buffer.snapshot();
969 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
970 Self {
971 saved_mtime,
972 saved_version: buffer.version(),
973 preview_version: buffer.version(),
974 reload_task: None,
975 transaction_depth: 0,
976 was_dirty_before_starting_transaction: None,
977 has_unsaved_edits: Cell::new((buffer.version(), false)),
978 text: buffer,
979 branch_state: None,
980 file,
981 capability,
982 syntax_map,
983 reparse: None,
984 non_text_state_update_count: 0,
985 sync_parse_timeout: Duration::from_millis(1),
986 parse_status: watch::channel(ParseStatus::Idle),
987 autoindent_requests: Default::default(),
988 wait_for_autoindent_txs: Default::default(),
989 pending_autoindent: Default::default(),
990 language: None,
991 remote_selections: Default::default(),
992 diagnostics: Default::default(),
993 diagnostics_timestamp: Default::default(),
994 completion_triggers: Default::default(),
995 completion_triggers_per_language_server: Default::default(),
996 completion_triggers_timestamp: Default::default(),
997 deferred_ops: OperationQueue::new(),
998 has_conflict: false,
999 change_bits: Default::default(),
1000 _subscriptions: Vec::new(),
1001 }
1002 }
1003
1004 pub fn build_snapshot(
1005 text: Rope,
1006 language: Option<Arc<Language>>,
1007 language_registry: Option<Arc<LanguageRegistry>>,
1008 cx: &mut App,
1009 ) -> impl Future<Output = BufferSnapshot> + use<> {
1010 let entity_id = cx.reserve_entity::<Self>().entity_id();
1011 let buffer_id = entity_id.as_non_zero_u64().into();
1012 async move {
1013 let text =
1014 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1015 let mut syntax = SyntaxMap::new(&text).snapshot();
1016 if let Some(language) = language.clone() {
1017 let language_registry = language_registry.clone();
1018 syntax.reparse(&text, language_registry, language);
1019 }
1020 BufferSnapshot {
1021 text,
1022 syntax,
1023 file: None,
1024 diagnostics: Default::default(),
1025 remote_selections: Default::default(),
1026 language,
1027 non_text_state_update_count: 0,
1028 }
1029 }
1030 }
1031
1032 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1033 let entity_id = cx.reserve_entity::<Self>().entity_id();
1034 let buffer_id = entity_id.as_non_zero_u64().into();
1035 let text =
1036 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1037 let syntax = SyntaxMap::new(&text).snapshot();
1038 BufferSnapshot {
1039 text,
1040 syntax,
1041 file: None,
1042 diagnostics: Default::default(),
1043 remote_selections: Default::default(),
1044 language: None,
1045 non_text_state_update_count: 0,
1046 }
1047 }
1048
1049 #[cfg(any(test, feature = "test-support"))]
1050 pub fn build_snapshot_sync(
1051 text: Rope,
1052 language: Option<Arc<Language>>,
1053 language_registry: Option<Arc<LanguageRegistry>>,
1054 cx: &mut App,
1055 ) -> BufferSnapshot {
1056 let entity_id = cx.reserve_entity::<Self>().entity_id();
1057 let buffer_id = entity_id.as_non_zero_u64().into();
1058 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1059 let mut syntax = SyntaxMap::new(&text).snapshot();
1060 if let Some(language) = language.clone() {
1061 syntax.reparse(&text, language_registry, language);
1062 }
1063 BufferSnapshot {
1064 text,
1065 syntax,
1066 file: None,
1067 diagnostics: Default::default(),
1068 remote_selections: Default::default(),
1069 language,
1070 non_text_state_update_count: 0,
1071 }
1072 }
1073
1074 /// Retrieve a snapshot of the buffer's current state. This is computationally
1075 /// cheap, and allows reading from the buffer on a background thread.
1076 pub fn snapshot(&self) -> BufferSnapshot {
1077 let text = self.text.snapshot();
1078 let mut syntax_map = self.syntax_map.lock();
1079 syntax_map.interpolate(&text);
1080 let syntax = syntax_map.snapshot();
1081
1082 BufferSnapshot {
1083 text,
1084 syntax,
1085 file: self.file.clone(),
1086 remote_selections: self.remote_selections.clone(),
1087 diagnostics: self.diagnostics.clone(),
1088 language: self.language.clone(),
1089 non_text_state_update_count: self.non_text_state_update_count,
1090 }
1091 }
1092
1093 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1094 let this = cx.entity();
1095 cx.new(|cx| {
1096 let mut branch = Self {
1097 branch_state: Some(BufferBranchState {
1098 base_buffer: this.clone(),
1099 merged_operations: Default::default(),
1100 }),
1101 language: self.language.clone(),
1102 has_conflict: self.has_conflict,
1103 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1104 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1105 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1106 };
1107 if let Some(language_registry) = self.language_registry() {
1108 branch.set_language_registry(language_registry);
1109 }
1110
1111 // Reparse the branch buffer so that we get syntax highlighting immediately.
1112 branch.reparse(cx);
1113
1114 branch
1115 })
1116 }
1117
1118 pub fn preview_edits(
1119 &self,
1120 edits: Arc<[(Range<Anchor>, String)]>,
1121 cx: &App,
1122 ) -> Task<EditPreview> {
1123 let registry = self.language_registry();
1124 let language = self.language().cloned();
1125 let old_snapshot = self.text.snapshot();
1126 let mut branch_buffer = self.text.branch();
1127 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1128 cx.background_spawn(async move {
1129 if !edits.is_empty() {
1130 if let Some(language) = language.clone() {
1131 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1132 }
1133
1134 branch_buffer.edit(edits.iter().cloned());
1135 let snapshot = branch_buffer.snapshot();
1136 syntax_snapshot.interpolate(&snapshot);
1137
1138 if let Some(language) = language {
1139 syntax_snapshot.reparse(&snapshot, registry, language);
1140 }
1141 }
1142 EditPreview {
1143 old_snapshot,
1144 applied_edits_snapshot: branch_buffer.snapshot(),
1145 syntax_snapshot,
1146 }
1147 })
1148 }
1149
1150 /// Applies all of the changes in this buffer that intersect any of the
1151 /// given `ranges` to its base buffer.
1152 ///
1153 /// If `ranges` is empty, then all changes will be applied. This buffer must
1154 /// be a branch buffer to call this method.
1155 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1156 let Some(base_buffer) = self.base_buffer() else {
1157 debug_panic!("not a branch buffer");
1158 return;
1159 };
1160
1161 let mut ranges = if ranges.is_empty() {
1162 &[0..usize::MAX]
1163 } else {
1164 ranges.as_slice()
1165 }
1166 .iter()
1167 .peekable();
1168
1169 let mut edits = Vec::new();
1170 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1171 let mut is_included = false;
1172 while let Some(range) = ranges.peek() {
1173 if range.end < edit.new.start {
1174 ranges.next().unwrap();
1175 } else {
1176 if range.start <= edit.new.end {
1177 is_included = true;
1178 }
1179 break;
1180 }
1181 }
1182
1183 if is_included {
1184 edits.push((
1185 edit.old.clone(),
1186 self.text_for_range(edit.new.clone()).collect::<String>(),
1187 ));
1188 }
1189 }
1190
1191 let operation = base_buffer.update(cx, |base_buffer, cx| {
1192 // cx.emit(BufferEvent::DiffBaseChanged);
1193 base_buffer.edit(edits, None, cx)
1194 });
1195
1196 if let Some(operation) = operation
1197 && let Some(BufferBranchState {
1198 merged_operations, ..
1199 }) = &mut self.branch_state
1200 {
1201 merged_operations.push(operation);
1202 }
1203 }
1204
1205 fn on_base_buffer_event(
1206 &mut self,
1207 _: Entity<Buffer>,
1208 event: &BufferEvent,
1209 cx: &mut Context<Self>,
1210 ) {
1211 let BufferEvent::Operation { operation, .. } = event else {
1212 return;
1213 };
1214 let Some(BufferBranchState {
1215 merged_operations, ..
1216 }) = &mut self.branch_state
1217 else {
1218 return;
1219 };
1220
1221 let mut operation_to_undo = None;
1222 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1223 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1224 {
1225 merged_operations.remove(ix);
1226 operation_to_undo = Some(operation.timestamp);
1227 }
1228
1229 self.apply_ops([operation.clone()], cx);
1230
1231 if let Some(timestamp) = operation_to_undo {
1232 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1233 self.undo_operations(counts, cx);
1234 }
1235 }
1236
1237 #[cfg(test)]
1238 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1239 &self.text
1240 }
1241
1242 /// Retrieve a snapshot of the buffer's raw text, without any
1243 /// language-related state like the syntax tree or diagnostics.
1244 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1245 self.text.snapshot()
1246 }
1247
1248 /// The file associated with the buffer, if any.
1249 pub fn file(&self) -> Option<&Arc<dyn File>> {
1250 self.file.as_ref()
1251 }
1252
1253 /// The version of the buffer that was last saved or reloaded from disk.
1254 pub fn saved_version(&self) -> &clock::Global {
1255 &self.saved_version
1256 }
1257
1258 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1259 pub fn saved_mtime(&self) -> Option<MTime> {
1260 self.saved_mtime
1261 }
1262
1263 /// Assign a language to the buffer.
1264 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1265 self.non_text_state_update_count += 1;
1266 self.syntax_map.lock().clear(&self.text);
1267 self.language = language;
1268 self.was_changed();
1269 self.reparse(cx);
1270 cx.emit(BufferEvent::LanguageChanged);
1271 }
1272
1273 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1274 /// other languages if parts of the buffer are written in different languages.
1275 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1276 self.syntax_map
1277 .lock()
1278 .set_language_registry(language_registry);
1279 }
1280
1281 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1282 self.syntax_map.lock().language_registry()
1283 }
1284
1285 /// Assign the line ending type to the buffer.
1286 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1287 self.text.set_line_ending(line_ending);
1288
1289 let lamport_timestamp = self.text.lamport_clock.tick();
1290 self.send_operation(
1291 Operation::UpdateLineEnding {
1292 line_ending,
1293 lamport_timestamp,
1294 },
1295 true,
1296 cx,
1297 );
1298 }
1299
1300 /// Assign the buffer a new [`Capability`].
1301 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1302 if self.capability != capability {
1303 self.capability = capability;
1304 cx.emit(BufferEvent::CapabilityChanged)
1305 }
1306 }
1307
1308 /// This method is called to signal that the buffer has been saved.
1309 pub fn did_save(
1310 &mut self,
1311 version: clock::Global,
1312 mtime: Option<MTime>,
1313 cx: &mut Context<Self>,
1314 ) {
1315 self.saved_version = version.clone();
1316 self.has_unsaved_edits.set((version, false));
1317 self.has_conflict = false;
1318 self.saved_mtime = mtime;
1319 self.was_changed();
1320 cx.emit(BufferEvent::Saved);
1321 cx.notify();
1322 }
1323
1324 /// Reloads the contents of the buffer from disk.
1325 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1326 let (tx, rx) = futures::channel::oneshot::channel();
1327 let prev_version = self.text.version();
1328 self.reload_task = Some(cx.spawn(async move |this, cx| {
1329 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1330 let file = this.file.as_ref()?.as_local()?;
1331
1332 Some((file.disk_state().mtime(), file.load(cx)))
1333 })?
1334 else {
1335 return Ok(());
1336 };
1337
1338 let new_text = new_text.await?;
1339 let diff = this
1340 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1341 .await;
1342 this.update(cx, |this, cx| {
1343 if this.version() == diff.base_version {
1344 this.finalize_last_transaction();
1345 this.apply_diff(diff, cx);
1346 tx.send(this.finalize_last_transaction().cloned()).ok();
1347 this.has_conflict = false;
1348 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1349 } else {
1350 if !diff.edits.is_empty()
1351 || this
1352 .edits_since::<usize>(&diff.base_version)
1353 .next()
1354 .is_some()
1355 {
1356 this.has_conflict = true;
1357 }
1358
1359 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1360 }
1361
1362 this.reload_task.take();
1363 })
1364 }));
1365 rx
1366 }
1367
1368 /// This method is called to signal that the buffer has been reloaded.
1369 pub fn did_reload(
1370 &mut self,
1371 version: clock::Global,
1372 line_ending: LineEnding,
1373 mtime: Option<MTime>,
1374 cx: &mut Context<Self>,
1375 ) {
1376 self.saved_version = version;
1377 self.has_unsaved_edits
1378 .set((self.saved_version.clone(), false));
1379 self.text.set_line_ending(line_ending);
1380 self.saved_mtime = mtime;
1381 cx.emit(BufferEvent::Reloaded);
1382 cx.notify();
1383 }
1384
1385 /// Updates the [`File`] backing this buffer. This should be called when
1386 /// the file has changed or has been deleted.
1387 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1388 let was_dirty = self.is_dirty();
1389 let mut file_changed = false;
1390
1391 if let Some(old_file) = self.file.as_ref() {
1392 if new_file.path() != old_file.path() {
1393 file_changed = true;
1394 }
1395
1396 let old_state = old_file.disk_state();
1397 let new_state = new_file.disk_state();
1398 if old_state != new_state {
1399 file_changed = true;
1400 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1401 cx.emit(BufferEvent::ReloadNeeded)
1402 }
1403 }
1404 } else {
1405 file_changed = true;
1406 };
1407
1408 self.file = Some(new_file);
1409 if file_changed {
1410 self.was_changed();
1411 self.non_text_state_update_count += 1;
1412 if was_dirty != self.is_dirty() {
1413 cx.emit(BufferEvent::DirtyChanged);
1414 }
1415 cx.emit(BufferEvent::FileHandleChanged);
1416 cx.notify();
1417 }
1418 }
1419
1420 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1421 Some(self.branch_state.as_ref()?.base_buffer.clone())
1422 }
1423
1424 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1425 pub fn language(&self) -> Option<&Arc<Language>> {
1426 self.language.as_ref()
1427 }
1428
1429 /// Returns the [`Language`] at the given location.
1430 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1431 let offset = position.to_offset(self);
1432 let mut is_first = true;
1433 let start_anchor = self.anchor_before(offset);
1434 let end_anchor = self.anchor_after(offset);
1435 self.syntax_map
1436 .lock()
1437 .layers_for_range(offset..offset, &self.text, false)
1438 .filter(|layer| {
1439 if is_first {
1440 is_first = false;
1441 return true;
1442 }
1443
1444 layer
1445 .included_sub_ranges
1446 .map(|sub_ranges| {
1447 sub_ranges.iter().any(|sub_range| {
1448 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1449 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1450 !is_before_start && !is_after_end
1451 })
1452 })
1453 .unwrap_or(true)
1454 })
1455 .last()
1456 .map(|info| info.language.clone())
1457 .or_else(|| self.language.clone())
1458 }
1459
1460 /// Returns each [`Language`] for the active syntax layers at the given location.
1461 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1462 let offset = position.to_offset(self);
1463 let mut languages: Vec<Arc<Language>> = self
1464 .syntax_map
1465 .lock()
1466 .layers_for_range(offset..offset, &self.text, false)
1467 .map(|info| info.language.clone())
1468 .collect();
1469
1470 if languages.is_empty()
1471 && let Some(buffer_language) = self.language()
1472 {
1473 languages.push(buffer_language.clone());
1474 }
1475
1476 languages
1477 }
1478
1479 /// An integer version number that accounts for all updates besides
1480 /// the buffer's text itself (which is versioned via a version vector).
1481 pub fn non_text_state_update_count(&self) -> usize {
1482 self.non_text_state_update_count
1483 }
1484
1485 /// Whether the buffer is being parsed in the background.
1486 #[cfg(any(test, feature = "test-support"))]
1487 pub fn is_parsing(&self) -> bool {
1488 self.reparse.is_some()
1489 }
1490
1491 /// Indicates whether the buffer contains any regions that may be
1492 /// written in a language that hasn't been loaded yet.
1493 pub fn contains_unknown_injections(&self) -> bool {
1494 self.syntax_map.lock().contains_unknown_injections()
1495 }
1496
1497 #[cfg(any(test, feature = "test-support"))]
1498 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1499 self.sync_parse_timeout = timeout;
1500 }
1501
1502 /// Called after an edit to synchronize the buffer's main parse tree with
1503 /// the buffer's new underlying state.
1504 ///
1505 /// Locks the syntax map and interpolates the edits since the last reparse
1506 /// into the foreground syntax tree.
1507 ///
1508 /// Then takes a stable snapshot of the syntax map before unlocking it.
1509 /// The snapshot with the interpolated edits is sent to a background thread,
1510 /// where we ask Tree-sitter to perform an incremental parse.
1511 ///
1512 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1513 /// waiting on the parse to complete. As soon as it completes, we proceed
1514 /// synchronously, unless a 1ms timeout elapses.
1515 ///
1516 /// If we time out waiting on the parse, we spawn a second task waiting
1517 /// until the parse does complete and return with the interpolated tree still
1518 /// in the foreground. When the background parse completes, call back into
1519 /// the main thread and assign the foreground parse state.
1520 ///
1521 /// If the buffer or grammar changed since the start of the background parse,
1522 /// initiate an additional reparse recursively. To avoid concurrent parses
1523 /// for the same buffer, we only initiate a new parse if we are not already
1524 /// parsing in the background.
1525 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1526 if self.reparse.is_some() {
1527 return;
1528 }
1529 let language = if let Some(language) = self.language.clone() {
1530 language
1531 } else {
1532 return;
1533 };
1534
1535 let text = self.text_snapshot();
1536 let parsed_version = self.version();
1537
1538 let mut syntax_map = self.syntax_map.lock();
1539 syntax_map.interpolate(&text);
1540 let language_registry = syntax_map.language_registry();
1541 let mut syntax_snapshot = syntax_map.snapshot();
1542 drop(syntax_map);
1543
1544 let parse_task = cx.background_spawn({
1545 let language = language.clone();
1546 let language_registry = language_registry.clone();
1547 async move {
1548 syntax_snapshot.reparse(&text, language_registry, language);
1549 syntax_snapshot
1550 }
1551 });
1552
1553 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1554 match cx
1555 .background_executor()
1556 .block_with_timeout(self.sync_parse_timeout, parse_task)
1557 {
1558 Ok(new_syntax_snapshot) => {
1559 self.did_finish_parsing(new_syntax_snapshot, cx);
1560 self.reparse = None;
1561 }
1562 Err(parse_task) => {
1563 self.reparse = Some(cx.spawn(async move |this, cx| {
1564 let new_syntax_map = parse_task.await;
1565 this.update(cx, move |this, cx| {
1566 let grammar_changed =
1567 this.language.as_ref().is_none_or(|current_language| {
1568 !Arc::ptr_eq(&language, current_language)
1569 });
1570 let language_registry_changed = new_syntax_map
1571 .contains_unknown_injections()
1572 && language_registry.is_some_and(|registry| {
1573 registry.version() != new_syntax_map.language_registry_version()
1574 });
1575 let parse_again = language_registry_changed
1576 || grammar_changed
1577 || this.version.changed_since(&parsed_version);
1578 this.did_finish_parsing(new_syntax_map, cx);
1579 this.reparse = None;
1580 if parse_again {
1581 this.reparse(cx);
1582 }
1583 })
1584 .ok();
1585 }));
1586 }
1587 }
1588 }
1589
1590 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1591 self.was_changed();
1592 self.non_text_state_update_count += 1;
1593 self.syntax_map.lock().did_parse(syntax_snapshot);
1594 self.request_autoindent(cx);
1595 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1596 cx.emit(BufferEvent::Reparsed);
1597 cx.notify();
1598 }
1599
1600 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1601 self.parse_status.1.clone()
1602 }
1603
1604 /// Assign to the buffer a set of diagnostics created by a given language server.
1605 pub fn update_diagnostics(
1606 &mut self,
1607 server_id: LanguageServerId,
1608 diagnostics: DiagnosticSet,
1609 cx: &mut Context<Self>,
1610 ) {
1611 let lamport_timestamp = self.text.lamport_clock.tick();
1612 let op = Operation::UpdateDiagnostics {
1613 server_id,
1614 diagnostics: diagnostics.iter().cloned().collect(),
1615 lamport_timestamp,
1616 };
1617
1618 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1619 self.send_operation(op, true, cx);
1620 }
1621
1622 pub fn buffer_diagnostics(
1623 &self,
1624 for_server: Option<LanguageServerId>,
1625 ) -> Vec<&DiagnosticEntry<Anchor>> {
1626 match for_server {
1627 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1628 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1629 Err(_) => Vec::new(),
1630 },
1631 None => self
1632 .diagnostics
1633 .iter()
1634 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1635 .collect(),
1636 }
1637 }
1638
1639 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1640 if let Some(indent_sizes) = self.compute_autoindents() {
1641 let indent_sizes = cx.background_spawn(indent_sizes);
1642 match cx
1643 .background_executor()
1644 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1645 {
1646 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1647 Err(indent_sizes) => {
1648 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1649 let indent_sizes = indent_sizes.await;
1650 this.update(cx, |this, cx| {
1651 this.apply_autoindents(indent_sizes, cx);
1652 })
1653 .ok();
1654 }));
1655 }
1656 }
1657 } else {
1658 self.autoindent_requests.clear();
1659 for tx in self.wait_for_autoindent_txs.drain(..) {
1660 tx.send(()).ok();
1661 }
1662 }
1663 }
1664
1665 fn compute_autoindents(
1666 &self,
1667 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1668 let max_rows_between_yields = 100;
1669 let snapshot = self.snapshot();
1670 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1671 return None;
1672 }
1673
1674 let autoindent_requests = self.autoindent_requests.clone();
1675 Some(async move {
1676 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1677 for request in autoindent_requests {
1678 // Resolve each edited range to its row in the current buffer and in the
1679 // buffer before this batch of edits.
1680 let mut row_ranges = Vec::new();
1681 let mut old_to_new_rows = BTreeMap::new();
1682 let mut language_indent_sizes_by_new_row = Vec::new();
1683 for entry in &request.entries {
1684 let position = entry.range.start;
1685 let new_row = position.to_point(&snapshot).row;
1686 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1687 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1688
1689 if !entry.first_line_is_new {
1690 let old_row = position.to_point(&request.before_edit).row;
1691 old_to_new_rows.insert(old_row, new_row);
1692 }
1693 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1694 }
1695
1696 // Build a map containing the suggested indentation for each of the edited lines
1697 // with respect to the state of the buffer before these edits. This map is keyed
1698 // by the rows for these lines in the current state of the buffer.
1699 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1700 let old_edited_ranges =
1701 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1702 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1703 let mut language_indent_size = IndentSize::default();
1704 for old_edited_range in old_edited_ranges {
1705 let suggestions = request
1706 .before_edit
1707 .suggest_autoindents(old_edited_range.clone())
1708 .into_iter()
1709 .flatten();
1710 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1711 if let Some(suggestion) = suggestion {
1712 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1713
1714 // Find the indent size based on the language for this row.
1715 while let Some((row, size)) = language_indent_sizes.peek() {
1716 if *row > new_row {
1717 break;
1718 }
1719 language_indent_size = *size;
1720 language_indent_sizes.next();
1721 }
1722
1723 let suggested_indent = old_to_new_rows
1724 .get(&suggestion.basis_row)
1725 .and_then(|from_row| {
1726 Some(old_suggestions.get(from_row).copied()?.0)
1727 })
1728 .unwrap_or_else(|| {
1729 request
1730 .before_edit
1731 .indent_size_for_line(suggestion.basis_row)
1732 })
1733 .with_delta(suggestion.delta, language_indent_size);
1734 old_suggestions
1735 .insert(new_row, (suggested_indent, suggestion.within_error));
1736 }
1737 }
1738 yield_now().await;
1739 }
1740
1741 // Compute new suggestions for each line, but only include them in the result
1742 // if they differ from the old suggestion for that line.
1743 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1744 let mut language_indent_size = IndentSize::default();
1745 for (row_range, original_indent_column) in row_ranges {
1746 let new_edited_row_range = if request.is_block_mode {
1747 row_range.start..row_range.start + 1
1748 } else {
1749 row_range.clone()
1750 };
1751
1752 let suggestions = snapshot
1753 .suggest_autoindents(new_edited_row_range.clone())
1754 .into_iter()
1755 .flatten();
1756 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1757 if let Some(suggestion) = suggestion {
1758 // Find the indent size based on the language for this row.
1759 while let Some((row, size)) = language_indent_sizes.peek() {
1760 if *row > new_row {
1761 break;
1762 }
1763 language_indent_size = *size;
1764 language_indent_sizes.next();
1765 }
1766
1767 let suggested_indent = indent_sizes
1768 .get(&suggestion.basis_row)
1769 .copied()
1770 .map(|e| e.0)
1771 .unwrap_or_else(|| {
1772 snapshot.indent_size_for_line(suggestion.basis_row)
1773 })
1774 .with_delta(suggestion.delta, language_indent_size);
1775
1776 if old_suggestions.get(&new_row).is_none_or(
1777 |(old_indentation, was_within_error)| {
1778 suggested_indent != *old_indentation
1779 && (!suggestion.within_error || *was_within_error)
1780 },
1781 ) {
1782 indent_sizes.insert(
1783 new_row,
1784 (suggested_indent, request.ignore_empty_lines),
1785 );
1786 }
1787 }
1788 }
1789
1790 if let (true, Some(original_indent_column)) =
1791 (request.is_block_mode, original_indent_column)
1792 {
1793 let new_indent =
1794 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1795 *indent
1796 } else {
1797 snapshot.indent_size_for_line(row_range.start)
1798 };
1799 let delta = new_indent.len as i64 - original_indent_column as i64;
1800 if delta != 0 {
1801 for row in row_range.skip(1) {
1802 indent_sizes.entry(row).or_insert_with(|| {
1803 let mut size = snapshot.indent_size_for_line(row);
1804 if size.kind == new_indent.kind {
1805 match delta.cmp(&0) {
1806 Ordering::Greater => size.len += delta as u32,
1807 Ordering::Less => {
1808 size.len = size.len.saturating_sub(-delta as u32)
1809 }
1810 Ordering::Equal => {}
1811 }
1812 }
1813 (size, request.ignore_empty_lines)
1814 });
1815 }
1816 }
1817 }
1818
1819 yield_now().await;
1820 }
1821 }
1822
1823 indent_sizes
1824 .into_iter()
1825 .filter_map(|(row, (indent, ignore_empty_lines))| {
1826 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1827 None
1828 } else {
1829 Some((row, indent))
1830 }
1831 })
1832 .collect()
1833 })
1834 }
1835
1836 fn apply_autoindents(
1837 &mut self,
1838 indent_sizes: BTreeMap<u32, IndentSize>,
1839 cx: &mut Context<Self>,
1840 ) {
1841 self.autoindent_requests.clear();
1842 for tx in self.wait_for_autoindent_txs.drain(..) {
1843 tx.send(()).ok();
1844 }
1845
1846 let edits: Vec<_> = indent_sizes
1847 .into_iter()
1848 .filter_map(|(row, indent_size)| {
1849 let current_size = indent_size_for_line(self, row);
1850 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1851 })
1852 .collect();
1853
1854 let preserve_preview = self.preserve_preview();
1855 self.edit(edits, None, cx);
1856 if preserve_preview {
1857 self.refresh_preview();
1858 }
1859 }
1860
1861 /// Create a minimal edit that will cause the given row to be indented
1862 /// with the given size. After applying this edit, the length of the line
1863 /// will always be at least `new_size.len`.
1864 pub fn edit_for_indent_size_adjustment(
1865 row: u32,
1866 current_size: IndentSize,
1867 new_size: IndentSize,
1868 ) -> Option<(Range<Point>, String)> {
1869 if new_size.kind == current_size.kind {
1870 match new_size.len.cmp(¤t_size.len) {
1871 Ordering::Greater => {
1872 let point = Point::new(row, 0);
1873 Some((
1874 point..point,
1875 iter::repeat(new_size.char())
1876 .take((new_size.len - current_size.len) as usize)
1877 .collect::<String>(),
1878 ))
1879 }
1880
1881 Ordering::Less => Some((
1882 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1883 String::new(),
1884 )),
1885
1886 Ordering::Equal => None,
1887 }
1888 } else {
1889 Some((
1890 Point::new(row, 0)..Point::new(row, current_size.len),
1891 iter::repeat(new_size.char())
1892 .take(new_size.len as usize)
1893 .collect::<String>(),
1894 ))
1895 }
1896 }
1897
1898 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1899 /// and the given new text.
1900 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1901 let old_text = self.as_rope().clone();
1902 let base_version = self.version();
1903 cx.background_executor()
1904 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1905 let old_text = old_text.to_string();
1906 let line_ending = LineEnding::detect(&new_text);
1907 LineEnding::normalize(&mut new_text);
1908 let edits = text_diff(&old_text, &new_text);
1909 Diff {
1910 base_version,
1911 line_ending,
1912 edits,
1913 }
1914 })
1915 }
1916
1917 /// Spawns a background task that searches the buffer for any whitespace
1918 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1919 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1920 let old_text = self.as_rope().clone();
1921 let line_ending = self.line_ending();
1922 let base_version = self.version();
1923 cx.background_spawn(async move {
1924 let ranges = trailing_whitespace_ranges(&old_text);
1925 let empty = Arc::<str>::from("");
1926 Diff {
1927 base_version,
1928 line_ending,
1929 edits: ranges
1930 .into_iter()
1931 .map(|range| (range, empty.clone()))
1932 .collect(),
1933 }
1934 })
1935 }
1936
1937 /// Ensures that the buffer ends with a single newline character, and
1938 /// no other whitespace. Skips if the buffer is empty.
1939 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1940 let len = self.len();
1941 if len == 0 {
1942 return;
1943 }
1944 let mut offset = len;
1945 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1946 let non_whitespace_len = chunk
1947 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1948 .len();
1949 offset -= chunk.len();
1950 offset += non_whitespace_len;
1951 if non_whitespace_len != 0 {
1952 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1953 return;
1954 }
1955 break;
1956 }
1957 }
1958 self.edit([(offset..len, "\n")], None, cx);
1959 }
1960
1961 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1962 /// calculated, then adjust the diff to account for those changes, and discard any
1963 /// parts of the diff that conflict with those changes.
1964 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1965 let snapshot = self.snapshot();
1966 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1967 let mut delta = 0;
1968 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1969 while let Some(edit_since) = edits_since.peek() {
1970 // If the edit occurs after a diff hunk, then it does not
1971 // affect that hunk.
1972 if edit_since.old.start > range.end {
1973 break;
1974 }
1975 // If the edit precedes the diff hunk, then adjust the hunk
1976 // to reflect the edit.
1977 else if edit_since.old.end < range.start {
1978 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1979 edits_since.next();
1980 }
1981 // If the edit intersects a diff hunk, then discard that hunk.
1982 else {
1983 return None;
1984 }
1985 }
1986
1987 let start = (range.start as i64 + delta) as usize;
1988 let end = (range.end as i64 + delta) as usize;
1989 Some((start..end, new_text))
1990 });
1991
1992 self.start_transaction();
1993 self.text.set_line_ending(diff.line_ending);
1994 self.edit(adjusted_edits, None, cx);
1995 self.end_transaction(cx)
1996 }
1997
1998 fn has_unsaved_edits(&self) -> bool {
1999 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2000
2001 if last_version == self.version {
2002 self.has_unsaved_edits
2003 .set((last_version, has_unsaved_edits));
2004 return has_unsaved_edits;
2005 }
2006
2007 let has_edits = self.has_edits_since(&self.saved_version);
2008 self.has_unsaved_edits
2009 .set((self.version.clone(), has_edits));
2010 has_edits
2011 }
2012
2013 /// Checks if the buffer has unsaved changes.
2014 pub fn is_dirty(&self) -> bool {
2015 if self.capability == Capability::ReadOnly {
2016 return false;
2017 }
2018 if self.has_conflict {
2019 return true;
2020 }
2021 match self.file.as_ref().map(|f| f.disk_state()) {
2022 Some(DiskState::New) | Some(DiskState::Deleted) => {
2023 !self.is_empty() && self.has_unsaved_edits()
2024 }
2025 _ => self.has_unsaved_edits(),
2026 }
2027 }
2028
2029 /// Checks if the buffer and its file have both changed since the buffer
2030 /// was last saved or reloaded.
2031 pub fn has_conflict(&self) -> bool {
2032 if self.has_conflict {
2033 return true;
2034 }
2035 let Some(file) = self.file.as_ref() else {
2036 return false;
2037 };
2038 match file.disk_state() {
2039 DiskState::New => false,
2040 DiskState::Present { mtime } => match self.saved_mtime {
2041 Some(saved_mtime) => {
2042 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2043 }
2044 None => true,
2045 },
2046 DiskState::Deleted => false,
2047 }
2048 }
2049
2050 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2051 pub fn subscribe(&mut self) -> Subscription {
2052 self.text.subscribe()
2053 }
2054
2055 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2056 ///
2057 /// This allows downstream code to check if the buffer's text has changed without
2058 /// waiting for an effect cycle, which would be required if using eents.
2059 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2060 if let Err(ix) = self
2061 .change_bits
2062 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2063 {
2064 self.change_bits.insert(ix, bit);
2065 }
2066 }
2067
2068 fn was_changed(&mut self) {
2069 self.change_bits.retain(|change_bit| {
2070 change_bit.upgrade().is_some_and(|bit| {
2071 bit.replace(true);
2072 true
2073 })
2074 });
2075 }
2076
2077 /// Starts a transaction, if one is not already in-progress. When undoing or
2078 /// redoing edits, all of the edits performed within a transaction are undone
2079 /// or redone together.
2080 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2081 self.start_transaction_at(Instant::now())
2082 }
2083
2084 /// Starts a transaction, providing the current time. Subsequent transactions
2085 /// that occur within a short period of time will be grouped together. This
2086 /// is controlled by the buffer's undo grouping duration.
2087 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2088 self.transaction_depth += 1;
2089 if self.was_dirty_before_starting_transaction.is_none() {
2090 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2091 }
2092 self.text.start_transaction_at(now)
2093 }
2094
2095 /// Terminates the current transaction, if this is the outermost transaction.
2096 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2097 self.end_transaction_at(Instant::now(), cx)
2098 }
2099
2100 /// Terminates the current transaction, providing the current time. Subsequent transactions
2101 /// that occur within a short period of time will be grouped together. This
2102 /// is controlled by the buffer's undo grouping duration.
2103 pub fn end_transaction_at(
2104 &mut self,
2105 now: Instant,
2106 cx: &mut Context<Self>,
2107 ) -> Option<TransactionId> {
2108 assert!(self.transaction_depth > 0);
2109 self.transaction_depth -= 1;
2110 let was_dirty = if self.transaction_depth == 0 {
2111 self.was_dirty_before_starting_transaction.take().unwrap()
2112 } else {
2113 false
2114 };
2115 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2116 self.did_edit(&start_version, was_dirty, cx);
2117 Some(transaction_id)
2118 } else {
2119 None
2120 }
2121 }
2122
2123 /// Manually add a transaction to the buffer's undo history.
2124 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2125 self.text.push_transaction(transaction, now);
2126 }
2127
2128 /// Differs from `push_transaction` in that it does not clear the redo
2129 /// stack. Intended to be used to create a parent transaction to merge
2130 /// potential child transactions into.
2131 ///
2132 /// The caller is responsible for removing it from the undo history using
2133 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2134 /// are merged into this transaction, the caller is responsible for ensuring
2135 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2136 /// cleared is to create transactions with the usual `start_transaction` and
2137 /// `end_transaction` methods and merging the resulting transactions into
2138 /// the transaction created by this method
2139 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2140 self.text.push_empty_transaction(now)
2141 }
2142
2143 /// Prevent the last transaction from being grouped with any subsequent transactions,
2144 /// even if they occur with the buffer's undo grouping duration.
2145 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2146 self.text.finalize_last_transaction()
2147 }
2148
2149 /// Manually group all changes since a given transaction.
2150 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2151 self.text.group_until_transaction(transaction_id);
2152 }
2153
2154 /// Manually remove a transaction from the buffer's undo history
2155 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2156 self.text.forget_transaction(transaction_id)
2157 }
2158
2159 /// Retrieve a transaction from the buffer's undo history
2160 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2161 self.text.get_transaction(transaction_id)
2162 }
2163
2164 /// Manually merge two transactions in the buffer's undo history.
2165 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2166 self.text.merge_transactions(transaction, destination);
2167 }
2168
2169 /// Waits for the buffer to receive operations with the given timestamps.
2170 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2171 &mut self,
2172 edit_ids: It,
2173 ) -> impl Future<Output = Result<()>> + use<It> {
2174 self.text.wait_for_edits(edit_ids)
2175 }
2176
2177 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2178 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2179 &mut self,
2180 anchors: It,
2181 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2182 self.text.wait_for_anchors(anchors)
2183 }
2184
2185 /// Waits for the buffer to receive operations up to the given version.
2186 pub fn wait_for_version(
2187 &mut self,
2188 version: clock::Global,
2189 ) -> impl Future<Output = Result<()>> + use<> {
2190 self.text.wait_for_version(version)
2191 }
2192
2193 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2194 /// [`Buffer::wait_for_version`] to resolve with an error.
2195 pub fn give_up_waiting(&mut self) {
2196 self.text.give_up_waiting();
2197 }
2198
2199 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2200 let mut rx = None;
2201 if !self.autoindent_requests.is_empty() {
2202 let channel = oneshot::channel();
2203 self.wait_for_autoindent_txs.push(channel.0);
2204 rx = Some(channel.1);
2205 }
2206 rx
2207 }
2208
2209 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2210 pub fn set_active_selections(
2211 &mut self,
2212 selections: Arc<[Selection<Anchor>]>,
2213 line_mode: bool,
2214 cursor_shape: CursorShape,
2215 cx: &mut Context<Self>,
2216 ) {
2217 let lamport_timestamp = self.text.lamport_clock.tick();
2218 self.remote_selections.insert(
2219 self.text.replica_id(),
2220 SelectionSet {
2221 selections: selections.clone(),
2222 lamport_timestamp,
2223 line_mode,
2224 cursor_shape,
2225 },
2226 );
2227 self.send_operation(
2228 Operation::UpdateSelections {
2229 selections,
2230 line_mode,
2231 lamport_timestamp,
2232 cursor_shape,
2233 },
2234 true,
2235 cx,
2236 );
2237 self.non_text_state_update_count += 1;
2238 cx.notify();
2239 }
2240
2241 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2242 /// this replica.
2243 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2244 if self
2245 .remote_selections
2246 .get(&self.text.replica_id())
2247 .is_none_or(|set| !set.selections.is_empty())
2248 {
2249 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2250 }
2251 }
2252
2253 pub fn set_agent_selections(
2254 &mut self,
2255 selections: Arc<[Selection<Anchor>]>,
2256 line_mode: bool,
2257 cursor_shape: CursorShape,
2258 cx: &mut Context<Self>,
2259 ) {
2260 let lamport_timestamp = self.text.lamport_clock.tick();
2261 self.remote_selections.insert(
2262 AGENT_REPLICA_ID,
2263 SelectionSet {
2264 selections,
2265 lamport_timestamp,
2266 line_mode,
2267 cursor_shape,
2268 },
2269 );
2270 self.non_text_state_update_count += 1;
2271 cx.notify();
2272 }
2273
2274 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2275 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2276 }
2277
2278 /// Replaces the buffer's entire text.
2279 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2280 where
2281 T: Into<Arc<str>>,
2282 {
2283 self.autoindent_requests.clear();
2284 self.edit([(0..self.len(), text)], None, cx)
2285 }
2286
2287 /// Appends the given text to the end of the buffer.
2288 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2289 where
2290 T: Into<Arc<str>>,
2291 {
2292 self.edit([(self.len()..self.len(), text)], None, cx)
2293 }
2294
2295 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2296 /// delete, and a string of text to insert at that location.
2297 ///
2298 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2299 /// request for the edited ranges, which will be processed when the buffer finishes
2300 /// parsing.
2301 ///
2302 /// Parsing takes place at the end of a transaction, and may compute synchronously
2303 /// or asynchronously, depending on the changes.
2304 pub fn edit<I, S, T>(
2305 &mut self,
2306 edits_iter: I,
2307 autoindent_mode: Option<AutoindentMode>,
2308 cx: &mut Context<Self>,
2309 ) -> Option<clock::Lamport>
2310 where
2311 I: IntoIterator<Item = (Range<S>, T)>,
2312 S: ToOffset,
2313 T: Into<Arc<str>>,
2314 {
2315 // Skip invalid edits and coalesce contiguous ones.
2316 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2317
2318 for (range, new_text) in edits_iter {
2319 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2320
2321 if range.start > range.end {
2322 mem::swap(&mut range.start, &mut range.end);
2323 }
2324 let new_text = new_text.into();
2325 if !new_text.is_empty() || !range.is_empty() {
2326 if let Some((prev_range, prev_text)) = edits.last_mut()
2327 && prev_range.end >= range.start
2328 {
2329 prev_range.end = cmp::max(prev_range.end, range.end);
2330 *prev_text = format!("{prev_text}{new_text}").into();
2331 } else {
2332 edits.push((range, new_text));
2333 }
2334 }
2335 }
2336 if edits.is_empty() {
2337 return None;
2338 }
2339
2340 self.start_transaction();
2341 self.pending_autoindent.take();
2342 let autoindent_request = autoindent_mode
2343 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2344
2345 let edit_operation = self.text.edit(edits.iter().cloned());
2346 let edit_id = edit_operation.timestamp();
2347
2348 if let Some((before_edit, mode)) = autoindent_request {
2349 let mut delta = 0isize;
2350 let mut previous_setting = None;
2351 let entries: Vec<_> = edits
2352 .into_iter()
2353 .enumerate()
2354 .zip(&edit_operation.as_edit().unwrap().new_text)
2355 .filter(|((_, (range, _)), _)| {
2356 let language = before_edit.language_at(range.start);
2357 let language_id = language.map(|l| l.id());
2358 if let Some((cached_language_id, auto_indent)) = previous_setting
2359 && cached_language_id == language_id
2360 {
2361 auto_indent
2362 } else {
2363 // The auto-indent setting is not present in editorconfigs, hence
2364 // we can avoid passing the file here.
2365 let auto_indent =
2366 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2367 previous_setting = Some((language_id, auto_indent));
2368 auto_indent
2369 }
2370 })
2371 .map(|((ix, (range, _)), new_text)| {
2372 let new_text_length = new_text.len();
2373 let old_start = range.start.to_point(&before_edit);
2374 let new_start = (delta + range.start as isize) as usize;
2375 let range_len = range.end - range.start;
2376 delta += new_text_length as isize - range_len as isize;
2377
2378 // Decide what range of the insertion to auto-indent, and whether
2379 // the first line of the insertion should be considered a newly-inserted line
2380 // or an edit to an existing line.
2381 let mut range_of_insertion_to_indent = 0..new_text_length;
2382 let mut first_line_is_new = true;
2383
2384 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2385 let old_line_end = before_edit.line_len(old_start.row);
2386
2387 if old_start.column > old_line_start {
2388 first_line_is_new = false;
2389 }
2390
2391 if !new_text.contains('\n')
2392 && (old_start.column + (range_len as u32) < old_line_end
2393 || old_line_end == old_line_start)
2394 {
2395 first_line_is_new = false;
2396 }
2397
2398 // When inserting text starting with a newline, avoid auto-indenting the
2399 // previous line.
2400 if new_text.starts_with('\n') {
2401 range_of_insertion_to_indent.start += 1;
2402 first_line_is_new = true;
2403 }
2404
2405 let mut original_indent_column = None;
2406 if let AutoindentMode::Block {
2407 original_indent_columns,
2408 } = &mode
2409 {
2410 original_indent_column = Some(if new_text.starts_with('\n') {
2411 indent_size_for_text(
2412 new_text[range_of_insertion_to_indent.clone()].chars(),
2413 )
2414 .len
2415 } else {
2416 original_indent_columns
2417 .get(ix)
2418 .copied()
2419 .flatten()
2420 .unwrap_or_else(|| {
2421 indent_size_for_text(
2422 new_text[range_of_insertion_to_indent.clone()].chars(),
2423 )
2424 .len
2425 })
2426 });
2427
2428 // Avoid auto-indenting the line after the edit.
2429 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2430 range_of_insertion_to_indent.end -= 1;
2431 }
2432 }
2433
2434 AutoindentRequestEntry {
2435 first_line_is_new,
2436 original_indent_column,
2437 indent_size: before_edit.language_indent_size_at(range.start, cx),
2438 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2439 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2440 }
2441 })
2442 .collect();
2443
2444 if !entries.is_empty() {
2445 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2446 before_edit,
2447 entries,
2448 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2449 ignore_empty_lines: false,
2450 }));
2451 }
2452 }
2453
2454 self.end_transaction(cx);
2455 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2456 Some(edit_id)
2457 }
2458
2459 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2460 self.was_changed();
2461
2462 if self.edits_since::<usize>(old_version).next().is_none() {
2463 return;
2464 }
2465
2466 self.reparse(cx);
2467 cx.emit(BufferEvent::Edited);
2468 if was_dirty != self.is_dirty() {
2469 cx.emit(BufferEvent::DirtyChanged);
2470 }
2471 cx.notify();
2472 }
2473
2474 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2475 where
2476 I: IntoIterator<Item = Range<T>>,
2477 T: ToOffset + Copy,
2478 {
2479 let before_edit = self.snapshot();
2480 let entries = ranges
2481 .into_iter()
2482 .map(|range| AutoindentRequestEntry {
2483 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2484 first_line_is_new: true,
2485 indent_size: before_edit.language_indent_size_at(range.start, cx),
2486 original_indent_column: None,
2487 })
2488 .collect();
2489 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2490 before_edit,
2491 entries,
2492 is_block_mode: false,
2493 ignore_empty_lines: true,
2494 }));
2495 self.request_autoindent(cx);
2496 }
2497
2498 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2499 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2500 pub fn insert_empty_line(
2501 &mut self,
2502 position: impl ToPoint,
2503 space_above: bool,
2504 space_below: bool,
2505 cx: &mut Context<Self>,
2506 ) -> Point {
2507 let mut position = position.to_point(self);
2508
2509 self.start_transaction();
2510
2511 self.edit(
2512 [(position..position, "\n")],
2513 Some(AutoindentMode::EachLine),
2514 cx,
2515 );
2516
2517 if position.column > 0 {
2518 position += Point::new(1, 0);
2519 }
2520
2521 if !self.is_line_blank(position.row) {
2522 self.edit(
2523 [(position..position, "\n")],
2524 Some(AutoindentMode::EachLine),
2525 cx,
2526 );
2527 }
2528
2529 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2530 self.edit(
2531 [(position..position, "\n")],
2532 Some(AutoindentMode::EachLine),
2533 cx,
2534 );
2535 position.row += 1;
2536 }
2537
2538 if space_below
2539 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2540 {
2541 self.edit(
2542 [(position..position, "\n")],
2543 Some(AutoindentMode::EachLine),
2544 cx,
2545 );
2546 }
2547
2548 self.end_transaction(cx);
2549
2550 position
2551 }
2552
2553 /// Applies the given remote operations to the buffer.
2554 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2555 self.pending_autoindent.take();
2556 let was_dirty = self.is_dirty();
2557 let old_version = self.version.clone();
2558 let mut deferred_ops = Vec::new();
2559 let buffer_ops = ops
2560 .into_iter()
2561 .filter_map(|op| match op {
2562 Operation::Buffer(op) => Some(op),
2563 _ => {
2564 if self.can_apply_op(&op) {
2565 self.apply_op(op, cx);
2566 } else {
2567 deferred_ops.push(op);
2568 }
2569 None
2570 }
2571 })
2572 .collect::<Vec<_>>();
2573 for operation in buffer_ops.iter() {
2574 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2575 }
2576 self.text.apply_ops(buffer_ops);
2577 self.deferred_ops.insert(deferred_ops);
2578 self.flush_deferred_ops(cx);
2579 self.did_edit(&old_version, was_dirty, cx);
2580 // Notify independently of whether the buffer was edited as the operations could include a
2581 // selection update.
2582 cx.notify();
2583 }
2584
2585 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2586 let mut deferred_ops = Vec::new();
2587 for op in self.deferred_ops.drain().iter().cloned() {
2588 if self.can_apply_op(&op) {
2589 self.apply_op(op, cx);
2590 } else {
2591 deferred_ops.push(op);
2592 }
2593 }
2594 self.deferred_ops.insert(deferred_ops);
2595 }
2596
2597 pub fn has_deferred_ops(&self) -> bool {
2598 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2599 }
2600
2601 fn can_apply_op(&self, operation: &Operation) -> bool {
2602 match operation {
2603 Operation::Buffer(_) => {
2604 unreachable!("buffer operations should never be applied at this layer")
2605 }
2606 Operation::UpdateDiagnostics {
2607 diagnostics: diagnostic_set,
2608 ..
2609 } => diagnostic_set.iter().all(|diagnostic| {
2610 self.text.can_resolve(&diagnostic.range.start)
2611 && self.text.can_resolve(&diagnostic.range.end)
2612 }),
2613 Operation::UpdateSelections { selections, .. } => selections
2614 .iter()
2615 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2616 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2617 }
2618 }
2619
2620 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2621 match operation {
2622 Operation::Buffer(_) => {
2623 unreachable!("buffer operations should never be applied at this layer")
2624 }
2625 Operation::UpdateDiagnostics {
2626 server_id,
2627 diagnostics: diagnostic_set,
2628 lamport_timestamp,
2629 } => {
2630 let snapshot = self.snapshot();
2631 self.apply_diagnostic_update(
2632 server_id,
2633 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2634 lamport_timestamp,
2635 cx,
2636 );
2637 }
2638 Operation::UpdateSelections {
2639 selections,
2640 lamport_timestamp,
2641 line_mode,
2642 cursor_shape,
2643 } => {
2644 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2645 && set.lamport_timestamp > lamport_timestamp
2646 {
2647 return;
2648 }
2649
2650 self.remote_selections.insert(
2651 lamport_timestamp.replica_id,
2652 SelectionSet {
2653 selections,
2654 lamport_timestamp,
2655 line_mode,
2656 cursor_shape,
2657 },
2658 );
2659 self.text.lamport_clock.observe(lamport_timestamp);
2660 self.non_text_state_update_count += 1;
2661 }
2662 Operation::UpdateCompletionTriggers {
2663 triggers,
2664 lamport_timestamp,
2665 server_id,
2666 } => {
2667 if triggers.is_empty() {
2668 self.completion_triggers_per_language_server
2669 .remove(&server_id);
2670 self.completion_triggers = self
2671 .completion_triggers_per_language_server
2672 .values()
2673 .flat_map(|triggers| triggers.iter().cloned())
2674 .collect();
2675 } else {
2676 self.completion_triggers_per_language_server
2677 .insert(server_id, triggers.iter().cloned().collect());
2678 self.completion_triggers.extend(triggers);
2679 }
2680 self.text.lamport_clock.observe(lamport_timestamp);
2681 }
2682 Operation::UpdateLineEnding {
2683 line_ending,
2684 lamport_timestamp,
2685 } => {
2686 self.text.set_line_ending(line_ending);
2687 self.text.lamport_clock.observe(lamport_timestamp);
2688 }
2689 }
2690 }
2691
2692 fn apply_diagnostic_update(
2693 &mut self,
2694 server_id: LanguageServerId,
2695 diagnostics: DiagnosticSet,
2696 lamport_timestamp: clock::Lamport,
2697 cx: &mut Context<Self>,
2698 ) {
2699 if lamport_timestamp > self.diagnostics_timestamp {
2700 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2701 if diagnostics.is_empty() {
2702 if let Ok(ix) = ix {
2703 self.diagnostics.remove(ix);
2704 }
2705 } else {
2706 match ix {
2707 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2708 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2709 };
2710 }
2711 self.diagnostics_timestamp = lamport_timestamp;
2712 self.non_text_state_update_count += 1;
2713 self.text.lamport_clock.observe(lamport_timestamp);
2714 cx.notify();
2715 cx.emit(BufferEvent::DiagnosticsUpdated);
2716 }
2717 }
2718
2719 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2720 self.was_changed();
2721 cx.emit(BufferEvent::Operation {
2722 operation,
2723 is_local,
2724 });
2725 }
2726
2727 /// Removes the selections for a given peer.
2728 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2729 self.remote_selections.remove(&replica_id);
2730 cx.notify();
2731 }
2732
2733 /// Undoes the most recent transaction.
2734 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2735 let was_dirty = self.is_dirty();
2736 let old_version = self.version.clone();
2737
2738 if let Some((transaction_id, operation)) = self.text.undo() {
2739 self.send_operation(Operation::Buffer(operation), true, cx);
2740 self.did_edit(&old_version, was_dirty, cx);
2741 Some(transaction_id)
2742 } else {
2743 None
2744 }
2745 }
2746
2747 /// Manually undoes a specific transaction in the buffer's undo history.
2748 pub fn undo_transaction(
2749 &mut self,
2750 transaction_id: TransactionId,
2751 cx: &mut Context<Self>,
2752 ) -> bool {
2753 let was_dirty = self.is_dirty();
2754 let old_version = self.version.clone();
2755 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2756 self.send_operation(Operation::Buffer(operation), true, cx);
2757 self.did_edit(&old_version, was_dirty, cx);
2758 true
2759 } else {
2760 false
2761 }
2762 }
2763
2764 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2765 pub fn undo_to_transaction(
2766 &mut self,
2767 transaction_id: TransactionId,
2768 cx: &mut Context<Self>,
2769 ) -> bool {
2770 let was_dirty = self.is_dirty();
2771 let old_version = self.version.clone();
2772
2773 let operations = self.text.undo_to_transaction(transaction_id);
2774 let undone = !operations.is_empty();
2775 for operation in operations {
2776 self.send_operation(Operation::Buffer(operation), true, cx);
2777 }
2778 if undone {
2779 self.did_edit(&old_version, was_dirty, cx)
2780 }
2781 undone
2782 }
2783
2784 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2785 let was_dirty = self.is_dirty();
2786 let operation = self.text.undo_operations(counts);
2787 let old_version = self.version.clone();
2788 self.send_operation(Operation::Buffer(operation), true, cx);
2789 self.did_edit(&old_version, was_dirty, cx);
2790 }
2791
2792 /// Manually redoes a specific transaction in the buffer's redo history.
2793 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2794 let was_dirty = self.is_dirty();
2795 let old_version = self.version.clone();
2796
2797 if let Some((transaction_id, operation)) = self.text.redo() {
2798 self.send_operation(Operation::Buffer(operation), true, cx);
2799 self.did_edit(&old_version, was_dirty, cx);
2800 Some(transaction_id)
2801 } else {
2802 None
2803 }
2804 }
2805
2806 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2807 pub fn redo_to_transaction(
2808 &mut self,
2809 transaction_id: TransactionId,
2810 cx: &mut Context<Self>,
2811 ) -> bool {
2812 let was_dirty = self.is_dirty();
2813 let old_version = self.version.clone();
2814
2815 let operations = self.text.redo_to_transaction(transaction_id);
2816 let redone = !operations.is_empty();
2817 for operation in operations {
2818 self.send_operation(Operation::Buffer(operation), true, cx);
2819 }
2820 if redone {
2821 self.did_edit(&old_version, was_dirty, cx)
2822 }
2823 redone
2824 }
2825
2826 /// Override current completion triggers with the user-provided completion triggers.
2827 pub fn set_completion_triggers(
2828 &mut self,
2829 server_id: LanguageServerId,
2830 triggers: BTreeSet<String>,
2831 cx: &mut Context<Self>,
2832 ) {
2833 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2834 if triggers.is_empty() {
2835 self.completion_triggers_per_language_server
2836 .remove(&server_id);
2837 self.completion_triggers = self
2838 .completion_triggers_per_language_server
2839 .values()
2840 .flat_map(|triggers| triggers.iter().cloned())
2841 .collect();
2842 } else {
2843 self.completion_triggers_per_language_server
2844 .insert(server_id, triggers.clone());
2845 self.completion_triggers.extend(triggers.iter().cloned());
2846 }
2847 self.send_operation(
2848 Operation::UpdateCompletionTriggers {
2849 triggers: triggers.into_iter().collect(),
2850 lamport_timestamp: self.completion_triggers_timestamp,
2851 server_id,
2852 },
2853 true,
2854 cx,
2855 );
2856 cx.notify();
2857 }
2858
2859 /// Returns a list of strings which trigger a completion menu for this language.
2860 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2861 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2862 &self.completion_triggers
2863 }
2864
2865 /// Call this directly after performing edits to prevent the preview tab
2866 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2867 /// to return false until there are additional edits.
2868 pub fn refresh_preview(&mut self) {
2869 self.preview_version = self.version.clone();
2870 }
2871
2872 /// Whether we should preserve the preview status of a tab containing this buffer.
2873 pub fn preserve_preview(&self) -> bool {
2874 !self.has_edits_since(&self.preview_version)
2875 }
2876}
2877
2878#[doc(hidden)]
2879#[cfg(any(test, feature = "test-support"))]
2880impl Buffer {
2881 pub fn edit_via_marked_text(
2882 &mut self,
2883 marked_string: &str,
2884 autoindent_mode: Option<AutoindentMode>,
2885 cx: &mut Context<Self>,
2886 ) {
2887 let edits = self.edits_for_marked_text(marked_string);
2888 self.edit(edits, autoindent_mode, cx);
2889 }
2890
2891 pub fn set_group_interval(&mut self, group_interval: Duration) {
2892 self.text.set_group_interval(group_interval);
2893 }
2894
2895 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2896 where
2897 T: rand::Rng,
2898 {
2899 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2900 let mut last_end = None;
2901 for _ in 0..old_range_count {
2902 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2903 break;
2904 }
2905
2906 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2907 let mut range = self.random_byte_range(new_start, rng);
2908 if rng.random_bool(0.2) {
2909 mem::swap(&mut range.start, &mut range.end);
2910 }
2911 last_end = Some(range.end);
2912
2913 let new_text_len = rng.random_range(0..10);
2914 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2915 new_text = new_text.to_uppercase();
2916
2917 edits.push((range, new_text));
2918 }
2919 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2920 self.edit(edits, None, cx);
2921 }
2922
2923 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2924 let was_dirty = self.is_dirty();
2925 let old_version = self.version.clone();
2926
2927 let ops = self.text.randomly_undo_redo(rng);
2928 if !ops.is_empty() {
2929 for op in ops {
2930 self.send_operation(Operation::Buffer(op), true, cx);
2931 self.did_edit(&old_version, was_dirty, cx);
2932 }
2933 }
2934 }
2935}
2936
2937impl EventEmitter<BufferEvent> for Buffer {}
2938
2939impl Deref for Buffer {
2940 type Target = TextBuffer;
2941
2942 fn deref(&self) -> &Self::Target {
2943 &self.text
2944 }
2945}
2946
2947impl BufferSnapshot {
2948 /// Returns [`IndentSize`] for a given line that respects user settings and
2949 /// language preferences.
2950 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2951 indent_size_for_line(self, row)
2952 }
2953
2954 /// Returns [`IndentSize`] for a given position that respects user settings
2955 /// and language preferences.
2956 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2957 let settings = language_settings(
2958 self.language_at(position).map(|l| l.name()),
2959 self.file(),
2960 cx,
2961 );
2962 if settings.hard_tabs {
2963 IndentSize::tab()
2964 } else {
2965 IndentSize::spaces(settings.tab_size.get())
2966 }
2967 }
2968
2969 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2970 /// is passed in as `single_indent_size`.
2971 pub fn suggested_indents(
2972 &self,
2973 rows: impl Iterator<Item = u32>,
2974 single_indent_size: IndentSize,
2975 ) -> BTreeMap<u32, IndentSize> {
2976 let mut result = BTreeMap::new();
2977
2978 for row_range in contiguous_ranges(rows, 10) {
2979 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2980 Some(suggestions) => suggestions,
2981 _ => break,
2982 };
2983
2984 for (row, suggestion) in row_range.zip(suggestions) {
2985 let indent_size = if let Some(suggestion) = suggestion {
2986 result
2987 .get(&suggestion.basis_row)
2988 .copied()
2989 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2990 .with_delta(suggestion.delta, single_indent_size)
2991 } else {
2992 self.indent_size_for_line(row)
2993 };
2994
2995 result.insert(row, indent_size);
2996 }
2997 }
2998
2999 result
3000 }
3001
3002 fn suggest_autoindents(
3003 &self,
3004 row_range: Range<u32>,
3005 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3006 let config = &self.language.as_ref()?.config;
3007 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3008
3009 #[derive(Debug, Clone)]
3010 struct StartPosition {
3011 start: Point,
3012 suffix: SharedString,
3013 }
3014
3015 // Find the suggested indentation ranges based on the syntax tree.
3016 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3017 let end = Point::new(row_range.end, 0);
3018 let range = (start..end).to_offset(&self.text);
3019 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3020 Some(&grammar.indents_config.as_ref()?.query)
3021 });
3022 let indent_configs = matches
3023 .grammars()
3024 .iter()
3025 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3026 .collect::<Vec<_>>();
3027
3028 let mut indent_ranges = Vec::<Range<Point>>::new();
3029 let mut start_positions = Vec::<StartPosition>::new();
3030 let mut outdent_positions = Vec::<Point>::new();
3031 while let Some(mat) = matches.peek() {
3032 let mut start: Option<Point> = None;
3033 let mut end: Option<Point> = None;
3034
3035 let config = indent_configs[mat.grammar_index];
3036 for capture in mat.captures {
3037 if capture.index == config.indent_capture_ix {
3038 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3039 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3040 } else if Some(capture.index) == config.start_capture_ix {
3041 start = Some(Point::from_ts_point(capture.node.end_position()));
3042 } else if Some(capture.index) == config.end_capture_ix {
3043 end = Some(Point::from_ts_point(capture.node.start_position()));
3044 } else if Some(capture.index) == config.outdent_capture_ix {
3045 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3046 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3047 start_positions.push(StartPosition {
3048 start: Point::from_ts_point(capture.node.start_position()),
3049 suffix: suffix.clone(),
3050 });
3051 }
3052 }
3053
3054 matches.advance();
3055 if let Some((start, end)) = start.zip(end) {
3056 if start.row == end.row {
3057 continue;
3058 }
3059 let range = start..end;
3060 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3061 Err(ix) => indent_ranges.insert(ix, range),
3062 Ok(ix) => {
3063 let prev_range = &mut indent_ranges[ix];
3064 prev_range.end = prev_range.end.max(range.end);
3065 }
3066 }
3067 }
3068 }
3069
3070 let mut error_ranges = Vec::<Range<Point>>::new();
3071 let mut matches = self
3072 .syntax
3073 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3074 while let Some(mat) = matches.peek() {
3075 let node = mat.captures[0].node;
3076 let start = Point::from_ts_point(node.start_position());
3077 let end = Point::from_ts_point(node.end_position());
3078 let range = start..end;
3079 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3080 Ok(ix) | Err(ix) => ix,
3081 };
3082 let mut end_ix = ix;
3083 while let Some(existing_range) = error_ranges.get(end_ix) {
3084 if existing_range.end < end {
3085 end_ix += 1;
3086 } else {
3087 break;
3088 }
3089 }
3090 error_ranges.splice(ix..end_ix, [range]);
3091 matches.advance();
3092 }
3093
3094 outdent_positions.sort();
3095 for outdent_position in outdent_positions {
3096 // find the innermost indent range containing this outdent_position
3097 // set its end to the outdent position
3098 if let Some(range_to_truncate) = indent_ranges
3099 .iter_mut()
3100 .filter(|indent_range| indent_range.contains(&outdent_position))
3101 .next_back()
3102 {
3103 range_to_truncate.end = outdent_position;
3104 }
3105 }
3106
3107 start_positions.sort_by_key(|b| b.start);
3108
3109 // Find the suggested indentation increases and decreased based on regexes.
3110 let mut regex_outdent_map = HashMap::default();
3111 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3112 let mut start_positions_iter = start_positions.iter().peekable();
3113
3114 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3115 self.for_each_line(
3116 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3117 ..Point::new(row_range.end, 0),
3118 |row, line| {
3119 if config
3120 .decrease_indent_pattern
3121 .as_ref()
3122 .is_some_and(|regex| regex.is_match(line))
3123 {
3124 indent_change_rows.push((row, Ordering::Less));
3125 }
3126 if config
3127 .increase_indent_pattern
3128 .as_ref()
3129 .is_some_and(|regex| regex.is_match(line))
3130 {
3131 indent_change_rows.push((row + 1, Ordering::Greater));
3132 }
3133 while let Some(pos) = start_positions_iter.peek() {
3134 if pos.start.row < row {
3135 let pos = start_positions_iter.next().unwrap();
3136 last_seen_suffix
3137 .entry(pos.suffix.to_string())
3138 .or_default()
3139 .push(pos.start);
3140 } else {
3141 break;
3142 }
3143 }
3144 for rule in &config.decrease_indent_patterns {
3145 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3146 let row_start_column = self.indent_size_for_line(row).len;
3147 let basis_row = rule
3148 .valid_after
3149 .iter()
3150 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3151 .flatten()
3152 .filter(|start_point| start_point.column <= row_start_column)
3153 .max_by_key(|start_point| start_point.row);
3154 if let Some(outdent_to_row) = basis_row {
3155 regex_outdent_map.insert(row, outdent_to_row.row);
3156 }
3157 break;
3158 }
3159 }
3160 },
3161 );
3162
3163 let mut indent_changes = indent_change_rows.into_iter().peekable();
3164 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3165 prev_non_blank_row.unwrap_or(0)
3166 } else {
3167 row_range.start.saturating_sub(1)
3168 };
3169
3170 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3171 Some(row_range.map(move |row| {
3172 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3173
3174 let mut indent_from_prev_row = false;
3175 let mut outdent_from_prev_row = false;
3176 let mut outdent_to_row = u32::MAX;
3177 let mut from_regex = false;
3178
3179 while let Some((indent_row, delta)) = indent_changes.peek() {
3180 match indent_row.cmp(&row) {
3181 Ordering::Equal => match delta {
3182 Ordering::Less => {
3183 from_regex = true;
3184 outdent_from_prev_row = true
3185 }
3186 Ordering::Greater => {
3187 indent_from_prev_row = true;
3188 from_regex = true
3189 }
3190 _ => {}
3191 },
3192
3193 Ordering::Greater => break,
3194 Ordering::Less => {}
3195 }
3196
3197 indent_changes.next();
3198 }
3199
3200 for range in &indent_ranges {
3201 if range.start.row >= row {
3202 break;
3203 }
3204 if range.start.row == prev_row && range.end > row_start {
3205 indent_from_prev_row = true;
3206 }
3207 if range.end > prev_row_start && range.end <= row_start {
3208 outdent_to_row = outdent_to_row.min(range.start.row);
3209 }
3210 }
3211
3212 if let Some(basis_row) = regex_outdent_map.get(&row) {
3213 indent_from_prev_row = false;
3214 outdent_to_row = *basis_row;
3215 from_regex = true;
3216 }
3217
3218 let within_error = error_ranges
3219 .iter()
3220 .any(|e| e.start.row < row && e.end > row_start);
3221
3222 let suggestion = if outdent_to_row == prev_row
3223 || (outdent_from_prev_row && indent_from_prev_row)
3224 {
3225 Some(IndentSuggestion {
3226 basis_row: prev_row,
3227 delta: Ordering::Equal,
3228 within_error: within_error && !from_regex,
3229 })
3230 } else if indent_from_prev_row {
3231 Some(IndentSuggestion {
3232 basis_row: prev_row,
3233 delta: Ordering::Greater,
3234 within_error: within_error && !from_regex,
3235 })
3236 } else if outdent_to_row < prev_row {
3237 Some(IndentSuggestion {
3238 basis_row: outdent_to_row,
3239 delta: Ordering::Equal,
3240 within_error: within_error && !from_regex,
3241 })
3242 } else if outdent_from_prev_row {
3243 Some(IndentSuggestion {
3244 basis_row: prev_row,
3245 delta: Ordering::Less,
3246 within_error: within_error && !from_regex,
3247 })
3248 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3249 {
3250 Some(IndentSuggestion {
3251 basis_row: prev_row,
3252 delta: Ordering::Equal,
3253 within_error: within_error && !from_regex,
3254 })
3255 } else {
3256 None
3257 };
3258
3259 prev_row = row;
3260 prev_row_start = row_start;
3261 suggestion
3262 }))
3263 }
3264
3265 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3266 while row > 0 {
3267 row -= 1;
3268 if !self.is_line_blank(row) {
3269 return Some(row);
3270 }
3271 }
3272 None
3273 }
3274
3275 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3276 let captures = self.syntax.captures(range, &self.text, |grammar| {
3277 grammar
3278 .highlights_config
3279 .as_ref()
3280 .map(|config| &config.query)
3281 });
3282 let highlight_maps = captures
3283 .grammars()
3284 .iter()
3285 .map(|grammar| grammar.highlight_map())
3286 .collect();
3287 (captures, highlight_maps)
3288 }
3289
3290 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3291 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3292 /// returned in chunks where each chunk has a single syntax highlighting style and
3293 /// diagnostic status.
3294 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3295 let range = range.start.to_offset(self)..range.end.to_offset(self);
3296
3297 let mut syntax = None;
3298 if language_aware {
3299 syntax = Some(self.get_highlights(range.clone()));
3300 }
3301 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3302 let diagnostics = language_aware;
3303 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3304 }
3305
3306 pub fn highlighted_text_for_range<T: ToOffset>(
3307 &self,
3308 range: Range<T>,
3309 override_style: Option<HighlightStyle>,
3310 syntax_theme: &SyntaxTheme,
3311 ) -> HighlightedText {
3312 HighlightedText::from_buffer_range(
3313 range,
3314 &self.text,
3315 &self.syntax,
3316 override_style,
3317 syntax_theme,
3318 )
3319 }
3320
3321 /// Invokes the given callback for each line of text in the given range of the buffer.
3322 /// Uses callback to avoid allocating a string for each line.
3323 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3324 let mut line = String::new();
3325 let mut row = range.start.row;
3326 for chunk in self
3327 .as_rope()
3328 .chunks_in_range(range.to_offset(self))
3329 .chain(["\n"])
3330 {
3331 for (newline_ix, text) in chunk.split('\n').enumerate() {
3332 if newline_ix > 0 {
3333 callback(row, &line);
3334 row += 1;
3335 line.clear();
3336 }
3337 line.push_str(text);
3338 }
3339 }
3340 }
3341
3342 /// Iterates over every [`SyntaxLayer`] in the buffer.
3343 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3344 self.syntax_layers_for_range(0..self.len(), true)
3345 }
3346
3347 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3348 let offset = position.to_offset(self);
3349 self.syntax_layers_for_range(offset..offset, false)
3350 .filter(|l| l.node().end_byte() > offset)
3351 .last()
3352 }
3353
3354 pub fn syntax_layers_for_range<D: ToOffset>(
3355 &self,
3356 range: Range<D>,
3357 include_hidden: bool,
3358 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3359 self.syntax
3360 .layers_for_range(range, &self.text, include_hidden)
3361 }
3362
3363 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3364 &self,
3365 range: Range<D>,
3366 ) -> Option<SyntaxLayer<'_>> {
3367 let range = range.to_offset(self);
3368 self.syntax
3369 .layers_for_range(range, &self.text, false)
3370 .max_by(|a, b| {
3371 if a.depth != b.depth {
3372 a.depth.cmp(&b.depth)
3373 } else if a.offset.0 != b.offset.0 {
3374 a.offset.0.cmp(&b.offset.0)
3375 } else {
3376 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3377 }
3378 })
3379 }
3380
3381 /// Returns the main [`Language`].
3382 pub fn language(&self) -> Option<&Arc<Language>> {
3383 self.language.as_ref()
3384 }
3385
3386 /// Returns the [`Language`] at the given location.
3387 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3388 self.syntax_layer_at(position)
3389 .map(|info| info.language)
3390 .or(self.language.as_ref())
3391 }
3392
3393 /// Returns the settings for the language at the given location.
3394 pub fn settings_at<'a, D: ToOffset>(
3395 &'a self,
3396 position: D,
3397 cx: &'a App,
3398 ) -> Cow<'a, LanguageSettings> {
3399 language_settings(
3400 self.language_at(position).map(|l| l.name()),
3401 self.file.as_ref(),
3402 cx,
3403 )
3404 }
3405
3406 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3407 CharClassifier::new(self.language_scope_at(point))
3408 }
3409
3410 /// Returns the [`LanguageScope`] at the given location.
3411 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3412 let offset = position.to_offset(self);
3413 let mut scope = None;
3414 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3415
3416 // Use the layer that has the smallest node intersecting the given point.
3417 for layer in self
3418 .syntax
3419 .layers_for_range(offset..offset, &self.text, false)
3420 {
3421 let mut cursor = layer.node().walk();
3422
3423 let mut range = None;
3424 loop {
3425 let child_range = cursor.node().byte_range();
3426 if !child_range.contains(&offset) {
3427 break;
3428 }
3429
3430 range = Some(child_range);
3431 if cursor.goto_first_child_for_byte(offset).is_none() {
3432 break;
3433 }
3434 }
3435
3436 if let Some(range) = range
3437 && smallest_range_and_depth.as_ref().is_none_or(
3438 |(smallest_range, smallest_range_depth)| {
3439 if layer.depth > *smallest_range_depth {
3440 true
3441 } else if layer.depth == *smallest_range_depth {
3442 range.len() < smallest_range.len()
3443 } else {
3444 false
3445 }
3446 },
3447 )
3448 {
3449 smallest_range_and_depth = Some((range, layer.depth));
3450 scope = Some(LanguageScope {
3451 language: layer.language.clone(),
3452 override_id: layer.override_id(offset, &self.text),
3453 });
3454 }
3455 }
3456
3457 scope.or_else(|| {
3458 self.language.clone().map(|language| LanguageScope {
3459 language,
3460 override_id: None,
3461 })
3462 })
3463 }
3464
3465 /// Returns a tuple of the range and character kind of the word
3466 /// surrounding the given position.
3467 pub fn surrounding_word<T: ToOffset>(
3468 &self,
3469 start: T,
3470 scope_context: Option<CharScopeContext>,
3471 ) -> (Range<usize>, Option<CharKind>) {
3472 let mut start = start.to_offset(self);
3473 let mut end = start;
3474 let mut next_chars = self.chars_at(start).take(128).peekable();
3475 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3476
3477 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3478 let word_kind = cmp::max(
3479 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3480 next_chars.peek().copied().map(|c| classifier.kind(c)),
3481 );
3482
3483 for ch in prev_chars {
3484 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3485 start -= ch.len_utf8();
3486 } else {
3487 break;
3488 }
3489 }
3490
3491 for ch in next_chars {
3492 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3493 end += ch.len_utf8();
3494 } else {
3495 break;
3496 }
3497 }
3498
3499 (start..end, word_kind)
3500 }
3501
3502 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3503 /// range. When `require_larger` is true, the node found must be larger than the query range.
3504 ///
3505 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3506 /// be moved to the root of the tree.
3507 fn goto_node_enclosing_range(
3508 cursor: &mut tree_sitter::TreeCursor,
3509 query_range: &Range<usize>,
3510 require_larger: bool,
3511 ) -> bool {
3512 let mut ascending = false;
3513 loop {
3514 let mut range = cursor.node().byte_range();
3515 if query_range.is_empty() {
3516 // When the query range is empty and the current node starts after it, move to the
3517 // previous sibling to find the node the containing node.
3518 if range.start > query_range.start {
3519 cursor.goto_previous_sibling();
3520 range = cursor.node().byte_range();
3521 }
3522 } else {
3523 // When the query range is non-empty and the current node ends exactly at the start,
3524 // move to the next sibling to find a node that extends beyond the start.
3525 if range.end == query_range.start {
3526 cursor.goto_next_sibling();
3527 range = cursor.node().byte_range();
3528 }
3529 }
3530
3531 let encloses = range.contains_inclusive(query_range)
3532 && (!require_larger || range.len() > query_range.len());
3533 if !encloses {
3534 ascending = true;
3535 if !cursor.goto_parent() {
3536 return false;
3537 }
3538 continue;
3539 } else if ascending {
3540 return true;
3541 }
3542
3543 // Descend into the current node.
3544 if cursor
3545 .goto_first_child_for_byte(query_range.start)
3546 .is_none()
3547 {
3548 return true;
3549 }
3550 }
3551 }
3552
3553 pub fn syntax_ancestor<'a, T: ToOffset>(
3554 &'a self,
3555 range: Range<T>,
3556 ) -> Option<tree_sitter::Node<'a>> {
3557 let range = range.start.to_offset(self)..range.end.to_offset(self);
3558 let mut result: Option<tree_sitter::Node<'a>> = None;
3559 for layer in self
3560 .syntax
3561 .layers_for_range(range.clone(), &self.text, true)
3562 {
3563 let mut cursor = layer.node().walk();
3564
3565 // Find the node that both contains the range and is larger than it.
3566 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3567 continue;
3568 }
3569
3570 let left_node = cursor.node();
3571 let mut layer_result = left_node;
3572
3573 // For an empty range, try to find another node immediately to the right of the range.
3574 if left_node.end_byte() == range.start {
3575 let mut right_node = None;
3576 while !cursor.goto_next_sibling() {
3577 if !cursor.goto_parent() {
3578 break;
3579 }
3580 }
3581
3582 while cursor.node().start_byte() == range.start {
3583 right_node = Some(cursor.node());
3584 if !cursor.goto_first_child() {
3585 break;
3586 }
3587 }
3588
3589 // If there is a candidate node on both sides of the (empty) range, then
3590 // decide between the two by favoring a named node over an anonymous token.
3591 // If both nodes are the same in that regard, favor the right one.
3592 if let Some(right_node) = right_node
3593 && (right_node.is_named() || !left_node.is_named())
3594 {
3595 layer_result = right_node;
3596 }
3597 }
3598
3599 if let Some(previous_result) = &result
3600 && previous_result.byte_range().len() < layer_result.byte_range().len()
3601 {
3602 continue;
3603 }
3604 result = Some(layer_result);
3605 }
3606
3607 result
3608 }
3609
3610 /// Find the previous sibling syntax node at the given range.
3611 ///
3612 /// This function locates the syntax node that precedes the node containing
3613 /// the given range. It searches hierarchically by:
3614 /// 1. Finding the node that contains the given range
3615 /// 2. Looking for the previous sibling at the same tree level
3616 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3617 ///
3618 /// Returns `None` if there is no previous sibling at any ancestor level.
3619 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3620 &'a self,
3621 range: Range<T>,
3622 ) -> Option<tree_sitter::Node<'a>> {
3623 let range = range.start.to_offset(self)..range.end.to_offset(self);
3624 let mut result: Option<tree_sitter::Node<'a>> = None;
3625
3626 for layer in self
3627 .syntax
3628 .layers_for_range(range.clone(), &self.text, true)
3629 {
3630 let mut cursor = layer.node().walk();
3631
3632 // Find the node that contains the range
3633 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3634 continue;
3635 }
3636
3637 // Look for the previous sibling, moving up ancestor levels if needed
3638 loop {
3639 if cursor.goto_previous_sibling() {
3640 let layer_result = cursor.node();
3641
3642 if let Some(previous_result) = &result {
3643 if previous_result.byte_range().end < layer_result.byte_range().end {
3644 continue;
3645 }
3646 }
3647 result = Some(layer_result);
3648 break;
3649 }
3650
3651 // No sibling found at this level, try moving up to parent
3652 if !cursor.goto_parent() {
3653 break;
3654 }
3655 }
3656 }
3657
3658 result
3659 }
3660
3661 /// Find the next sibling syntax node at the given range.
3662 ///
3663 /// This function locates the syntax node that follows the node containing
3664 /// the given range. It searches hierarchically by:
3665 /// 1. Finding the node that contains the given range
3666 /// 2. Looking for the next sibling at the same tree level
3667 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3668 ///
3669 /// Returns `None` if there is no next sibling at any ancestor level.
3670 pub fn syntax_next_sibling<'a, T: ToOffset>(
3671 &'a self,
3672 range: Range<T>,
3673 ) -> Option<tree_sitter::Node<'a>> {
3674 let range = range.start.to_offset(self)..range.end.to_offset(self);
3675 let mut result: Option<tree_sitter::Node<'a>> = None;
3676
3677 for layer in self
3678 .syntax
3679 .layers_for_range(range.clone(), &self.text, true)
3680 {
3681 let mut cursor = layer.node().walk();
3682
3683 // Find the node that contains the range
3684 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3685 continue;
3686 }
3687
3688 // Look for the next sibling, moving up ancestor levels if needed
3689 loop {
3690 if cursor.goto_next_sibling() {
3691 let layer_result = cursor.node();
3692
3693 if let Some(previous_result) = &result {
3694 if previous_result.byte_range().start > layer_result.byte_range().start {
3695 continue;
3696 }
3697 }
3698 result = Some(layer_result);
3699 break;
3700 }
3701
3702 // No sibling found at this level, try moving up to parent
3703 if !cursor.goto_parent() {
3704 break;
3705 }
3706 }
3707 }
3708
3709 result
3710 }
3711
3712 /// Returns the root syntax node within the given row
3713 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3714 let start_offset = position.to_offset(self);
3715
3716 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3717
3718 let layer = self
3719 .syntax
3720 .layers_for_range(start_offset..start_offset, &self.text, true)
3721 .next()?;
3722
3723 let mut cursor = layer.node().walk();
3724
3725 // Descend to the first leaf that touches the start of the range.
3726 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3727 if cursor.node().end_byte() == start_offset {
3728 cursor.goto_next_sibling();
3729 }
3730 }
3731
3732 // Ascend to the root node within the same row.
3733 while cursor.goto_parent() {
3734 if cursor.node().start_position().row != row {
3735 break;
3736 }
3737 }
3738
3739 Some(cursor.node())
3740 }
3741
3742 /// Returns the outline for the buffer.
3743 ///
3744 /// This method allows passing an optional [`SyntaxTheme`] to
3745 /// syntax-highlight the returned symbols.
3746 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3747 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3748 }
3749
3750 /// Returns all the symbols that contain the given position.
3751 ///
3752 /// This method allows passing an optional [`SyntaxTheme`] to
3753 /// syntax-highlight the returned symbols.
3754 pub fn symbols_containing<T: ToOffset>(
3755 &self,
3756 position: T,
3757 theme: Option<&SyntaxTheme>,
3758 ) -> Vec<OutlineItem<Anchor>> {
3759 let position = position.to_offset(self);
3760 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3761 let end = self.clip_offset(position + 1, Bias::Right);
3762 let mut items = self.outline_items_containing(start..end, false, theme);
3763 let mut prev_depth = None;
3764 items.retain(|item| {
3765 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3766 prev_depth = Some(item.depth);
3767 result
3768 });
3769 items
3770 }
3771
3772 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3773 let range = range.to_offset(self);
3774 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3775 grammar.outline_config.as_ref().map(|c| &c.query)
3776 });
3777 let configs = matches
3778 .grammars()
3779 .iter()
3780 .map(|g| g.outline_config.as_ref().unwrap())
3781 .collect::<Vec<_>>();
3782
3783 while let Some(mat) = matches.peek() {
3784 let config = &configs[mat.grammar_index];
3785 let containing_item_node = maybe!({
3786 let item_node = mat.captures.iter().find_map(|cap| {
3787 if cap.index == config.item_capture_ix {
3788 Some(cap.node)
3789 } else {
3790 None
3791 }
3792 })?;
3793
3794 let item_byte_range = item_node.byte_range();
3795 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3796 None
3797 } else {
3798 Some(item_node)
3799 }
3800 });
3801
3802 if let Some(item_node) = containing_item_node {
3803 return Some(
3804 Point::from_ts_point(item_node.start_position())
3805 ..Point::from_ts_point(item_node.end_position()),
3806 );
3807 }
3808
3809 matches.advance();
3810 }
3811 None
3812 }
3813
3814 pub fn outline_items_containing<T: ToOffset>(
3815 &self,
3816 range: Range<T>,
3817 include_extra_context: bool,
3818 theme: Option<&SyntaxTheme>,
3819 ) -> Vec<OutlineItem<Anchor>> {
3820 let range = range.to_offset(self);
3821 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3822 grammar.outline_config.as_ref().map(|c| &c.query)
3823 });
3824
3825 let mut items = Vec::new();
3826 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3827 while let Some(mat) = matches.peek() {
3828 let config = matches.grammars()[mat.grammar_index]
3829 .outline_config
3830 .as_ref()
3831 .unwrap();
3832 if let Some(item) =
3833 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3834 {
3835 items.push(item);
3836 } else if let Some(capture) = mat
3837 .captures
3838 .iter()
3839 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3840 {
3841 let capture_range = capture.node.start_position()..capture.node.end_position();
3842 let mut capture_row_range =
3843 capture_range.start.row as u32..capture_range.end.row as u32;
3844 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3845 {
3846 capture_row_range.end -= 1;
3847 }
3848 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3849 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3850 last_row_range.end = capture_row_range.end;
3851 } else {
3852 annotation_row_ranges.push(capture_row_range);
3853 }
3854 } else {
3855 annotation_row_ranges.push(capture_row_range);
3856 }
3857 }
3858 matches.advance();
3859 }
3860
3861 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3862
3863 // Assign depths based on containment relationships and convert to anchors.
3864 let mut item_ends_stack = Vec::<Point>::new();
3865 let mut anchor_items = Vec::new();
3866 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3867 for item in items {
3868 while let Some(last_end) = item_ends_stack.last().copied() {
3869 if last_end < item.range.end {
3870 item_ends_stack.pop();
3871 } else {
3872 break;
3873 }
3874 }
3875
3876 let mut annotation_row_range = None;
3877 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3878 let row_preceding_item = item.range.start.row.saturating_sub(1);
3879 if next_annotation_row_range.end < row_preceding_item {
3880 annotation_row_ranges.next();
3881 } else {
3882 if next_annotation_row_range.end == row_preceding_item {
3883 annotation_row_range = Some(next_annotation_row_range.clone());
3884 annotation_row_ranges.next();
3885 }
3886 break;
3887 }
3888 }
3889
3890 anchor_items.push(OutlineItem {
3891 depth: item_ends_stack.len(),
3892 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3893 text: item.text,
3894 highlight_ranges: item.highlight_ranges,
3895 name_ranges: item.name_ranges,
3896 body_range: item
3897 .body_range
3898 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3899 annotation_range: annotation_row_range.map(|annotation_range| {
3900 self.anchor_after(Point::new(annotation_range.start, 0))
3901 ..self.anchor_before(Point::new(
3902 annotation_range.end,
3903 self.line_len(annotation_range.end),
3904 ))
3905 }),
3906 });
3907 item_ends_stack.push(item.range.end);
3908 }
3909
3910 anchor_items
3911 }
3912
3913 fn next_outline_item(
3914 &self,
3915 config: &OutlineConfig,
3916 mat: &SyntaxMapMatch,
3917 range: &Range<usize>,
3918 include_extra_context: bool,
3919 theme: Option<&SyntaxTheme>,
3920 ) -> Option<OutlineItem<Point>> {
3921 let item_node = mat.captures.iter().find_map(|cap| {
3922 if cap.index == config.item_capture_ix {
3923 Some(cap.node)
3924 } else {
3925 None
3926 }
3927 })?;
3928
3929 let item_byte_range = item_node.byte_range();
3930 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3931 return None;
3932 }
3933 let item_point_range = Point::from_ts_point(item_node.start_position())
3934 ..Point::from_ts_point(item_node.end_position());
3935
3936 let mut open_point = None;
3937 let mut close_point = None;
3938
3939 let mut buffer_ranges = Vec::new();
3940 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3941 let mut range = node.start_byte()..node.end_byte();
3942 let start = node.start_position();
3943 if node.end_position().row > start.row {
3944 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3945 }
3946
3947 if !range.is_empty() {
3948 buffer_ranges.push((range, node_is_name));
3949 }
3950 };
3951
3952 for capture in mat.captures {
3953 if capture.index == config.name_capture_ix {
3954 add_to_buffer_ranges(capture.node, true);
3955 } else if Some(capture.index) == config.context_capture_ix
3956 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3957 {
3958 add_to_buffer_ranges(capture.node, false);
3959 } else {
3960 if Some(capture.index) == config.open_capture_ix {
3961 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3962 } else if Some(capture.index) == config.close_capture_ix {
3963 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3964 }
3965 }
3966 }
3967
3968 if buffer_ranges.is_empty() {
3969 return None;
3970 }
3971
3972 let mut text = String::new();
3973 let mut highlight_ranges = Vec::new();
3974 let mut name_ranges = Vec::new();
3975 let mut chunks = self.chunks(
3976 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3977 true,
3978 );
3979 let mut last_buffer_range_end = 0;
3980 for (buffer_range, is_name) in buffer_ranges {
3981 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3982 if space_added {
3983 text.push(' ');
3984 }
3985 let before_append_len = text.len();
3986 let mut offset = buffer_range.start;
3987 chunks.seek(buffer_range.clone());
3988 for mut chunk in chunks.by_ref() {
3989 if chunk.text.len() > buffer_range.end - offset {
3990 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3991 offset = buffer_range.end;
3992 } else {
3993 offset += chunk.text.len();
3994 }
3995 let style = chunk
3996 .syntax_highlight_id
3997 .zip(theme)
3998 .and_then(|(highlight, theme)| highlight.style(theme));
3999 if let Some(style) = style {
4000 let start = text.len();
4001 let end = start + chunk.text.len();
4002 highlight_ranges.push((start..end, style));
4003 }
4004 text.push_str(chunk.text);
4005 if offset >= buffer_range.end {
4006 break;
4007 }
4008 }
4009 if is_name {
4010 let after_append_len = text.len();
4011 let start = if space_added && !name_ranges.is_empty() {
4012 before_append_len - 1
4013 } else {
4014 before_append_len
4015 };
4016 name_ranges.push(start..after_append_len);
4017 }
4018 last_buffer_range_end = buffer_range.end;
4019 }
4020
4021 Some(OutlineItem {
4022 depth: 0, // We'll calculate the depth later
4023 range: item_point_range,
4024 text,
4025 highlight_ranges,
4026 name_ranges,
4027 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4028 annotation_range: None,
4029 })
4030 }
4031
4032 pub fn function_body_fold_ranges<T: ToOffset>(
4033 &self,
4034 within: Range<T>,
4035 ) -> impl Iterator<Item = Range<usize>> + '_ {
4036 self.text_object_ranges(within, TreeSitterOptions::default())
4037 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4038 }
4039
4040 /// For each grammar in the language, runs the provided
4041 /// [`tree_sitter::Query`] against the given range.
4042 pub fn matches(
4043 &self,
4044 range: Range<usize>,
4045 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4046 ) -> SyntaxMapMatches<'_> {
4047 self.syntax.matches(range, self, query)
4048 }
4049
4050 pub fn all_bracket_ranges(
4051 &self,
4052 range: Range<usize>,
4053 ) -> impl Iterator<Item = BracketMatch> + '_ {
4054 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4055 grammar.brackets_config.as_ref().map(|c| &c.query)
4056 });
4057 let configs = matches
4058 .grammars()
4059 .iter()
4060 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4061 .collect::<Vec<_>>();
4062
4063 iter::from_fn(move || {
4064 while let Some(mat) = matches.peek() {
4065 let mut open = None;
4066 let mut close = None;
4067 let config = &configs[mat.grammar_index];
4068 let pattern = &config.patterns[mat.pattern_index];
4069 for capture in mat.captures {
4070 if capture.index == config.open_capture_ix {
4071 open = Some(capture.node.byte_range());
4072 } else if capture.index == config.close_capture_ix {
4073 close = Some(capture.node.byte_range());
4074 }
4075 }
4076
4077 matches.advance();
4078
4079 let Some((open_range, close_range)) = open.zip(close) else {
4080 continue;
4081 };
4082
4083 let bracket_range = open_range.start..=close_range.end;
4084 if !bracket_range.overlaps(&range) {
4085 continue;
4086 }
4087
4088 return Some(BracketMatch {
4089 open_range,
4090 close_range,
4091 newline_only: pattern.newline_only,
4092 });
4093 }
4094 None
4095 })
4096 }
4097
4098 /// Returns bracket range pairs overlapping or adjacent to `range`
4099 pub fn bracket_ranges<T: ToOffset>(
4100 &self,
4101 range: Range<T>,
4102 ) -> impl Iterator<Item = BracketMatch> + '_ {
4103 // Find bracket pairs that *inclusively* contain the given range.
4104 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4105 self.all_bracket_ranges(range)
4106 .filter(|pair| !pair.newline_only)
4107 }
4108
4109 pub fn debug_variables_query<T: ToOffset>(
4110 &self,
4111 range: Range<T>,
4112 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4113 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4114
4115 let mut matches = self.syntax.matches_with_options(
4116 range.clone(),
4117 &self.text,
4118 TreeSitterOptions::default(),
4119 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4120 );
4121
4122 let configs = matches
4123 .grammars()
4124 .iter()
4125 .map(|grammar| grammar.debug_variables_config.as_ref())
4126 .collect::<Vec<_>>();
4127
4128 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4129
4130 iter::from_fn(move || {
4131 loop {
4132 while let Some(capture) = captures.pop() {
4133 if capture.0.overlaps(&range) {
4134 return Some(capture);
4135 }
4136 }
4137
4138 let mat = matches.peek()?;
4139
4140 let Some(config) = configs[mat.grammar_index].as_ref() else {
4141 matches.advance();
4142 continue;
4143 };
4144
4145 for capture in mat.captures {
4146 let Some(ix) = config
4147 .objects_by_capture_ix
4148 .binary_search_by_key(&capture.index, |e| e.0)
4149 .ok()
4150 else {
4151 continue;
4152 };
4153 let text_object = config.objects_by_capture_ix[ix].1;
4154 let byte_range = capture.node.byte_range();
4155
4156 let mut found = false;
4157 for (range, existing) in captures.iter_mut() {
4158 if existing == &text_object {
4159 range.start = range.start.min(byte_range.start);
4160 range.end = range.end.max(byte_range.end);
4161 found = true;
4162 break;
4163 }
4164 }
4165
4166 if !found {
4167 captures.push((byte_range, text_object));
4168 }
4169 }
4170
4171 matches.advance();
4172 }
4173 })
4174 }
4175
4176 pub fn text_object_ranges<T: ToOffset>(
4177 &self,
4178 range: Range<T>,
4179 options: TreeSitterOptions,
4180 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4181 let range =
4182 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4183
4184 let mut matches =
4185 self.syntax
4186 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4187 grammar.text_object_config.as_ref().map(|c| &c.query)
4188 });
4189
4190 let configs = matches
4191 .grammars()
4192 .iter()
4193 .map(|grammar| grammar.text_object_config.as_ref())
4194 .collect::<Vec<_>>();
4195
4196 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4197
4198 iter::from_fn(move || {
4199 loop {
4200 while let Some(capture) = captures.pop() {
4201 if capture.0.overlaps(&range) {
4202 return Some(capture);
4203 }
4204 }
4205
4206 let mat = matches.peek()?;
4207
4208 let Some(config) = configs[mat.grammar_index].as_ref() else {
4209 matches.advance();
4210 continue;
4211 };
4212
4213 for capture in mat.captures {
4214 let Some(ix) = config
4215 .text_objects_by_capture_ix
4216 .binary_search_by_key(&capture.index, |e| e.0)
4217 .ok()
4218 else {
4219 continue;
4220 };
4221 let text_object = config.text_objects_by_capture_ix[ix].1;
4222 let byte_range = capture.node.byte_range();
4223
4224 let mut found = false;
4225 for (range, existing) in captures.iter_mut() {
4226 if existing == &text_object {
4227 range.start = range.start.min(byte_range.start);
4228 range.end = range.end.max(byte_range.end);
4229 found = true;
4230 break;
4231 }
4232 }
4233
4234 if !found {
4235 captures.push((byte_range, text_object));
4236 }
4237 }
4238
4239 matches.advance();
4240 }
4241 })
4242 }
4243
4244 /// Returns enclosing bracket ranges containing the given range
4245 pub fn enclosing_bracket_ranges<T: ToOffset>(
4246 &self,
4247 range: Range<T>,
4248 ) -> impl Iterator<Item = BracketMatch> + '_ {
4249 let range = range.start.to_offset(self)..range.end.to_offset(self);
4250
4251 self.bracket_ranges(range.clone()).filter(move |pair| {
4252 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4253 })
4254 }
4255
4256 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4257 ///
4258 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4259 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4260 &self,
4261 range: Range<T>,
4262 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4263 ) -> Option<(Range<usize>, Range<usize>)> {
4264 let range = range.start.to_offset(self)..range.end.to_offset(self);
4265
4266 // Get the ranges of the innermost pair of brackets.
4267 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4268
4269 for pair in self.enclosing_bracket_ranges(range) {
4270 if let Some(range_filter) = range_filter
4271 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4272 {
4273 continue;
4274 }
4275
4276 let len = pair.close_range.end - pair.open_range.start;
4277
4278 if let Some((existing_open, existing_close)) = &result {
4279 let existing_len = existing_close.end - existing_open.start;
4280 if len > existing_len {
4281 continue;
4282 }
4283 }
4284
4285 result = Some((pair.open_range, pair.close_range));
4286 }
4287
4288 result
4289 }
4290
4291 /// Returns anchor ranges for any matches of the redaction query.
4292 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4293 /// will be run on the relevant section of the buffer.
4294 pub fn redacted_ranges<T: ToOffset>(
4295 &self,
4296 range: Range<T>,
4297 ) -> impl Iterator<Item = Range<usize>> + '_ {
4298 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4299 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4300 grammar
4301 .redactions_config
4302 .as_ref()
4303 .map(|config| &config.query)
4304 });
4305
4306 let configs = syntax_matches
4307 .grammars()
4308 .iter()
4309 .map(|grammar| grammar.redactions_config.as_ref())
4310 .collect::<Vec<_>>();
4311
4312 iter::from_fn(move || {
4313 let redacted_range = syntax_matches
4314 .peek()
4315 .and_then(|mat| {
4316 configs[mat.grammar_index].and_then(|config| {
4317 mat.captures
4318 .iter()
4319 .find(|capture| capture.index == config.redaction_capture_ix)
4320 })
4321 })
4322 .map(|mat| mat.node.byte_range());
4323 syntax_matches.advance();
4324 redacted_range
4325 })
4326 }
4327
4328 pub fn injections_intersecting_range<T: ToOffset>(
4329 &self,
4330 range: Range<T>,
4331 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4332 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4333
4334 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4335 grammar
4336 .injection_config
4337 .as_ref()
4338 .map(|config| &config.query)
4339 });
4340
4341 let configs = syntax_matches
4342 .grammars()
4343 .iter()
4344 .map(|grammar| grammar.injection_config.as_ref())
4345 .collect::<Vec<_>>();
4346
4347 iter::from_fn(move || {
4348 let ranges = syntax_matches.peek().and_then(|mat| {
4349 let config = &configs[mat.grammar_index]?;
4350 let content_capture_range = mat.captures.iter().find_map(|capture| {
4351 if capture.index == config.content_capture_ix {
4352 Some(capture.node.byte_range())
4353 } else {
4354 None
4355 }
4356 })?;
4357 let language = self.language_at(content_capture_range.start)?;
4358 Some((content_capture_range, language))
4359 });
4360 syntax_matches.advance();
4361 ranges
4362 })
4363 }
4364
4365 pub fn runnable_ranges(
4366 &self,
4367 offset_range: Range<usize>,
4368 ) -> impl Iterator<Item = RunnableRange> + '_ {
4369 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4370 grammar.runnable_config.as_ref().map(|config| &config.query)
4371 });
4372
4373 let test_configs = syntax_matches
4374 .grammars()
4375 .iter()
4376 .map(|grammar| grammar.runnable_config.as_ref())
4377 .collect::<Vec<_>>();
4378
4379 iter::from_fn(move || {
4380 loop {
4381 let mat = syntax_matches.peek()?;
4382
4383 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4384 let mut run_range = None;
4385 let full_range = mat.captures.iter().fold(
4386 Range {
4387 start: usize::MAX,
4388 end: 0,
4389 },
4390 |mut acc, next| {
4391 let byte_range = next.node.byte_range();
4392 if acc.start > byte_range.start {
4393 acc.start = byte_range.start;
4394 }
4395 if acc.end < byte_range.end {
4396 acc.end = byte_range.end;
4397 }
4398 acc
4399 },
4400 );
4401 if full_range.start > full_range.end {
4402 // We did not find a full spanning range of this match.
4403 return None;
4404 }
4405 let extra_captures: SmallVec<[_; 1]> =
4406 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4407 test_configs
4408 .extra_captures
4409 .get(capture.index as usize)
4410 .cloned()
4411 .and_then(|tag_name| match tag_name {
4412 RunnableCapture::Named(name) => {
4413 Some((capture.node.byte_range(), name))
4414 }
4415 RunnableCapture::Run => {
4416 let _ = run_range.insert(capture.node.byte_range());
4417 None
4418 }
4419 })
4420 }));
4421 let run_range = run_range?;
4422 let tags = test_configs
4423 .query
4424 .property_settings(mat.pattern_index)
4425 .iter()
4426 .filter_map(|property| {
4427 if *property.key == *"tag" {
4428 property
4429 .value
4430 .as_ref()
4431 .map(|value| RunnableTag(value.to_string().into()))
4432 } else {
4433 None
4434 }
4435 })
4436 .collect();
4437 let extra_captures = extra_captures
4438 .into_iter()
4439 .map(|(range, name)| {
4440 (
4441 name.to_string(),
4442 self.text_for_range(range).collect::<String>(),
4443 )
4444 })
4445 .collect();
4446 // All tags should have the same range.
4447 Some(RunnableRange {
4448 run_range,
4449 full_range,
4450 runnable: Runnable {
4451 tags,
4452 language: mat.language,
4453 buffer: self.remote_id(),
4454 },
4455 extra_captures,
4456 buffer_id: self.remote_id(),
4457 })
4458 });
4459
4460 syntax_matches.advance();
4461 if test_range.is_some() {
4462 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4463 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4464 return test_range;
4465 }
4466 }
4467 })
4468 }
4469
4470 /// Returns selections for remote peers intersecting the given range.
4471 #[allow(clippy::type_complexity)]
4472 pub fn selections_in_range(
4473 &self,
4474 range: Range<Anchor>,
4475 include_local: bool,
4476 ) -> impl Iterator<
4477 Item = (
4478 ReplicaId,
4479 bool,
4480 CursorShape,
4481 impl Iterator<Item = &Selection<Anchor>> + '_,
4482 ),
4483 > + '_ {
4484 self.remote_selections
4485 .iter()
4486 .filter(move |(replica_id, set)| {
4487 (include_local || **replica_id != self.text.replica_id())
4488 && !set.selections.is_empty()
4489 })
4490 .map(move |(replica_id, set)| {
4491 let start_ix = match set.selections.binary_search_by(|probe| {
4492 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4493 }) {
4494 Ok(ix) | Err(ix) => ix,
4495 };
4496 let end_ix = match set.selections.binary_search_by(|probe| {
4497 probe.start.cmp(&range.end, self).then(Ordering::Less)
4498 }) {
4499 Ok(ix) | Err(ix) => ix,
4500 };
4501
4502 (
4503 *replica_id,
4504 set.line_mode,
4505 set.cursor_shape,
4506 set.selections[start_ix..end_ix].iter(),
4507 )
4508 })
4509 }
4510
4511 /// Returns if the buffer contains any diagnostics.
4512 pub fn has_diagnostics(&self) -> bool {
4513 !self.diagnostics.is_empty()
4514 }
4515
4516 /// Returns all the diagnostics intersecting the given range.
4517 pub fn diagnostics_in_range<'a, T, O>(
4518 &'a self,
4519 search_range: Range<T>,
4520 reversed: bool,
4521 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4522 where
4523 T: 'a + Clone + ToOffset,
4524 O: 'a + FromAnchor,
4525 {
4526 let mut iterators: Vec<_> = self
4527 .diagnostics
4528 .iter()
4529 .map(|(_, collection)| {
4530 collection
4531 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4532 .peekable()
4533 })
4534 .collect();
4535
4536 std::iter::from_fn(move || {
4537 let (next_ix, _) = iterators
4538 .iter_mut()
4539 .enumerate()
4540 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4541 .min_by(|(_, a), (_, b)| {
4542 let cmp = a
4543 .range
4544 .start
4545 .cmp(&b.range.start, self)
4546 // when range is equal, sort by diagnostic severity
4547 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4548 // and stabilize order with group_id
4549 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4550 if reversed { cmp.reverse() } else { cmp }
4551 })?;
4552 iterators[next_ix]
4553 .next()
4554 .map(
4555 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4556 diagnostic,
4557 range: FromAnchor::from_anchor(&range.start, self)
4558 ..FromAnchor::from_anchor(&range.end, self),
4559 },
4560 )
4561 })
4562 }
4563
4564 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4565 /// should be used instead.
4566 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4567 &self.diagnostics
4568 }
4569
4570 /// Returns all the diagnostic groups associated with the given
4571 /// language server ID. If no language server ID is provided,
4572 /// all diagnostics groups are returned.
4573 pub fn diagnostic_groups(
4574 &self,
4575 language_server_id: Option<LanguageServerId>,
4576 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4577 let mut groups = Vec::new();
4578
4579 if let Some(language_server_id) = language_server_id {
4580 if let Ok(ix) = self
4581 .diagnostics
4582 .binary_search_by_key(&language_server_id, |e| e.0)
4583 {
4584 self.diagnostics[ix]
4585 .1
4586 .groups(language_server_id, &mut groups, self);
4587 }
4588 } else {
4589 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4590 diagnostics.groups(*language_server_id, &mut groups, self);
4591 }
4592 }
4593
4594 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4595 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4596 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4597 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4598 });
4599
4600 groups
4601 }
4602
4603 /// Returns an iterator over the diagnostics for the given group.
4604 pub fn diagnostic_group<O>(
4605 &self,
4606 group_id: usize,
4607 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4608 where
4609 O: FromAnchor + 'static,
4610 {
4611 self.diagnostics
4612 .iter()
4613 .flat_map(move |(_, set)| set.group(group_id, self))
4614 }
4615
4616 /// An integer version number that accounts for all updates besides
4617 /// the buffer's text itself (which is versioned via a version vector).
4618 pub fn non_text_state_update_count(&self) -> usize {
4619 self.non_text_state_update_count
4620 }
4621
4622 /// An integer version that changes when the buffer's syntax changes.
4623 pub fn syntax_update_count(&self) -> usize {
4624 self.syntax.update_count()
4625 }
4626
4627 /// Returns a snapshot of underlying file.
4628 pub fn file(&self) -> Option<&Arc<dyn File>> {
4629 self.file.as_ref()
4630 }
4631
4632 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4633 if let Some(file) = self.file() {
4634 if file.path().file_name().is_none() || include_root {
4635 Some(file.full_path(cx).to_string_lossy().into_owned())
4636 } else {
4637 Some(file.path().display(file.path_style(cx)).to_string())
4638 }
4639 } else {
4640 None
4641 }
4642 }
4643
4644 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4645 let query_str = query.fuzzy_contents;
4646 if query_str.is_some_and(|query| query.is_empty()) {
4647 return BTreeMap::default();
4648 }
4649
4650 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4651 language,
4652 override_id: None,
4653 }));
4654
4655 let mut query_ix = 0;
4656 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4657 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4658
4659 let mut words = BTreeMap::default();
4660 let mut current_word_start_ix = None;
4661 let mut chunk_ix = query.range.start;
4662 for chunk in self.chunks(query.range, false) {
4663 for (i, c) in chunk.text.char_indices() {
4664 let ix = chunk_ix + i;
4665 if classifier.is_word(c) {
4666 if current_word_start_ix.is_none() {
4667 current_word_start_ix = Some(ix);
4668 }
4669
4670 if let Some(query_chars) = &query_chars
4671 && query_ix < query_len
4672 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4673 {
4674 query_ix += 1;
4675 }
4676 continue;
4677 } else if let Some(word_start) = current_word_start_ix.take()
4678 && query_ix == query_len
4679 {
4680 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4681 let mut word_text = self.text_for_range(word_start..ix).peekable();
4682 let first_char = word_text
4683 .peek()
4684 .and_then(|first_chunk| first_chunk.chars().next());
4685 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4686 if !query.skip_digits
4687 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4688 {
4689 words.insert(word_text.collect(), word_range);
4690 }
4691 }
4692 query_ix = 0;
4693 }
4694 chunk_ix += chunk.text.len();
4695 }
4696
4697 words
4698 }
4699}
4700
4701pub struct WordsQuery<'a> {
4702 /// Only returns words with all chars from the fuzzy string in them.
4703 pub fuzzy_contents: Option<&'a str>,
4704 /// Skips words that start with a digit.
4705 pub skip_digits: bool,
4706 /// Buffer offset range, to look for words.
4707 pub range: Range<usize>,
4708}
4709
4710fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4711 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4712}
4713
4714fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4715 let mut result = IndentSize::spaces(0);
4716 for c in text {
4717 let kind = match c {
4718 ' ' => IndentKind::Space,
4719 '\t' => IndentKind::Tab,
4720 _ => break,
4721 };
4722 if result.len == 0 {
4723 result.kind = kind;
4724 }
4725 result.len += 1;
4726 }
4727 result
4728}
4729
4730impl Clone for BufferSnapshot {
4731 fn clone(&self) -> Self {
4732 Self {
4733 text: self.text.clone(),
4734 syntax: self.syntax.clone(),
4735 file: self.file.clone(),
4736 remote_selections: self.remote_selections.clone(),
4737 diagnostics: self.diagnostics.clone(),
4738 language: self.language.clone(),
4739 non_text_state_update_count: self.non_text_state_update_count,
4740 }
4741 }
4742}
4743
4744impl Deref for BufferSnapshot {
4745 type Target = text::BufferSnapshot;
4746
4747 fn deref(&self) -> &Self::Target {
4748 &self.text
4749 }
4750}
4751
4752unsafe impl Send for BufferChunks<'_> {}
4753
4754impl<'a> BufferChunks<'a> {
4755 pub(crate) fn new(
4756 text: &'a Rope,
4757 range: Range<usize>,
4758 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4759 diagnostics: bool,
4760 buffer_snapshot: Option<&'a BufferSnapshot>,
4761 ) -> Self {
4762 let mut highlights = None;
4763 if let Some((captures, highlight_maps)) = syntax {
4764 highlights = Some(BufferChunkHighlights {
4765 captures,
4766 next_capture: None,
4767 stack: Default::default(),
4768 highlight_maps,
4769 })
4770 }
4771
4772 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4773 let chunks = text.chunks_in_range(range.clone());
4774
4775 let mut this = BufferChunks {
4776 range,
4777 buffer_snapshot,
4778 chunks,
4779 diagnostic_endpoints,
4780 error_depth: 0,
4781 warning_depth: 0,
4782 information_depth: 0,
4783 hint_depth: 0,
4784 unnecessary_depth: 0,
4785 underline: true,
4786 highlights,
4787 };
4788 this.initialize_diagnostic_endpoints();
4789 this
4790 }
4791
4792 /// Seeks to the given byte offset in the buffer.
4793 pub fn seek(&mut self, range: Range<usize>) {
4794 let old_range = std::mem::replace(&mut self.range, range.clone());
4795 self.chunks.set_range(self.range.clone());
4796 if let Some(highlights) = self.highlights.as_mut() {
4797 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4798 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4799 highlights
4800 .stack
4801 .retain(|(end_offset, _)| *end_offset > range.start);
4802 if let Some(capture) = &highlights.next_capture
4803 && range.start >= capture.node.start_byte()
4804 {
4805 let next_capture_end = capture.node.end_byte();
4806 if range.start < next_capture_end {
4807 highlights.stack.push((
4808 next_capture_end,
4809 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4810 ));
4811 }
4812 highlights.next_capture.take();
4813 }
4814 } else if let Some(snapshot) = self.buffer_snapshot {
4815 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4816 *highlights = BufferChunkHighlights {
4817 captures,
4818 next_capture: None,
4819 stack: Default::default(),
4820 highlight_maps,
4821 };
4822 } else {
4823 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4824 // Seeking such BufferChunks is not supported.
4825 debug_assert!(
4826 false,
4827 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4828 );
4829 }
4830
4831 highlights.captures.set_byte_range(self.range.clone());
4832 self.initialize_diagnostic_endpoints();
4833 }
4834 }
4835
4836 fn initialize_diagnostic_endpoints(&mut self) {
4837 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4838 && let Some(buffer) = self.buffer_snapshot
4839 {
4840 let mut diagnostic_endpoints = Vec::new();
4841 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4842 diagnostic_endpoints.push(DiagnosticEndpoint {
4843 offset: entry.range.start,
4844 is_start: true,
4845 severity: entry.diagnostic.severity,
4846 is_unnecessary: entry.diagnostic.is_unnecessary,
4847 underline: entry.diagnostic.underline,
4848 });
4849 diagnostic_endpoints.push(DiagnosticEndpoint {
4850 offset: entry.range.end,
4851 is_start: false,
4852 severity: entry.diagnostic.severity,
4853 is_unnecessary: entry.diagnostic.is_unnecessary,
4854 underline: entry.diagnostic.underline,
4855 });
4856 }
4857 diagnostic_endpoints
4858 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4859 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4860 self.hint_depth = 0;
4861 self.error_depth = 0;
4862 self.warning_depth = 0;
4863 self.information_depth = 0;
4864 }
4865 }
4866
4867 /// The current byte offset in the buffer.
4868 pub fn offset(&self) -> usize {
4869 self.range.start
4870 }
4871
4872 pub fn range(&self) -> Range<usize> {
4873 self.range.clone()
4874 }
4875
4876 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4877 let depth = match endpoint.severity {
4878 DiagnosticSeverity::ERROR => &mut self.error_depth,
4879 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4880 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4881 DiagnosticSeverity::HINT => &mut self.hint_depth,
4882 _ => return,
4883 };
4884 if endpoint.is_start {
4885 *depth += 1;
4886 } else {
4887 *depth -= 1;
4888 }
4889
4890 if endpoint.is_unnecessary {
4891 if endpoint.is_start {
4892 self.unnecessary_depth += 1;
4893 } else {
4894 self.unnecessary_depth -= 1;
4895 }
4896 }
4897 }
4898
4899 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4900 if self.error_depth > 0 {
4901 Some(DiagnosticSeverity::ERROR)
4902 } else if self.warning_depth > 0 {
4903 Some(DiagnosticSeverity::WARNING)
4904 } else if self.information_depth > 0 {
4905 Some(DiagnosticSeverity::INFORMATION)
4906 } else if self.hint_depth > 0 {
4907 Some(DiagnosticSeverity::HINT)
4908 } else {
4909 None
4910 }
4911 }
4912
4913 fn current_code_is_unnecessary(&self) -> bool {
4914 self.unnecessary_depth > 0
4915 }
4916}
4917
4918impl<'a> Iterator for BufferChunks<'a> {
4919 type Item = Chunk<'a>;
4920
4921 fn next(&mut self) -> Option<Self::Item> {
4922 let mut next_capture_start = usize::MAX;
4923 let mut next_diagnostic_endpoint = usize::MAX;
4924
4925 if let Some(highlights) = self.highlights.as_mut() {
4926 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4927 if *parent_capture_end <= self.range.start {
4928 highlights.stack.pop();
4929 } else {
4930 break;
4931 }
4932 }
4933
4934 if highlights.next_capture.is_none() {
4935 highlights.next_capture = highlights.captures.next();
4936 }
4937
4938 while let Some(capture) = highlights.next_capture.as_ref() {
4939 if self.range.start < capture.node.start_byte() {
4940 next_capture_start = capture.node.start_byte();
4941 break;
4942 } else {
4943 let highlight_id =
4944 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4945 highlights
4946 .stack
4947 .push((capture.node.end_byte(), highlight_id));
4948 highlights.next_capture = highlights.captures.next();
4949 }
4950 }
4951 }
4952
4953 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4954 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4955 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4956 if endpoint.offset <= self.range.start {
4957 self.update_diagnostic_depths(endpoint);
4958 diagnostic_endpoints.next();
4959 self.underline = endpoint.underline;
4960 } else {
4961 next_diagnostic_endpoint = endpoint.offset;
4962 break;
4963 }
4964 }
4965 }
4966 self.diagnostic_endpoints = diagnostic_endpoints;
4967
4968 if let Some(ChunkBitmaps {
4969 text: chunk,
4970 chars: chars_map,
4971 tabs,
4972 }) = self.chunks.peek_tabs()
4973 {
4974 let chunk_start = self.range.start;
4975 let mut chunk_end = (self.chunks.offset() + chunk.len())
4976 .min(next_capture_start)
4977 .min(next_diagnostic_endpoint);
4978 let mut highlight_id = None;
4979 if let Some(highlights) = self.highlights.as_ref()
4980 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4981 {
4982 chunk_end = chunk_end.min(*parent_capture_end);
4983 highlight_id = Some(*parent_highlight_id);
4984 }
4985
4986 let slice =
4987 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4988 let bit_end = chunk_end - self.chunks.offset();
4989
4990 let mask = if bit_end >= 128 {
4991 u128::MAX
4992 } else {
4993 (1u128 << bit_end) - 1
4994 };
4995 let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
4996 let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
4997
4998 self.range.start = chunk_end;
4999 if self.range.start == self.chunks.offset() + chunk.len() {
5000 self.chunks.next().unwrap();
5001 }
5002
5003 Some(Chunk {
5004 text: slice,
5005 syntax_highlight_id: highlight_id,
5006 underline: self.underline,
5007 diagnostic_severity: self.current_diagnostic_severity(),
5008 is_unnecessary: self.current_code_is_unnecessary(),
5009 tabs,
5010 chars: chars_map,
5011 ..Chunk::default()
5012 })
5013 } else {
5014 None
5015 }
5016 }
5017}
5018
5019impl operation_queue::Operation for Operation {
5020 fn lamport_timestamp(&self) -> clock::Lamport {
5021 match self {
5022 Operation::Buffer(_) => {
5023 unreachable!("buffer operations should never be deferred at this layer")
5024 }
5025 Operation::UpdateDiagnostics {
5026 lamport_timestamp, ..
5027 }
5028 | Operation::UpdateSelections {
5029 lamport_timestamp, ..
5030 }
5031 | Operation::UpdateCompletionTriggers {
5032 lamport_timestamp, ..
5033 }
5034 | Operation::UpdateLineEnding {
5035 lamport_timestamp, ..
5036 } => *lamport_timestamp,
5037 }
5038 }
5039}
5040
5041impl Default for Diagnostic {
5042 fn default() -> Self {
5043 Self {
5044 source: Default::default(),
5045 source_kind: DiagnosticSourceKind::Other,
5046 code: None,
5047 code_description: None,
5048 severity: DiagnosticSeverity::ERROR,
5049 message: Default::default(),
5050 markdown: None,
5051 group_id: 0,
5052 is_primary: false,
5053 is_disk_based: false,
5054 is_unnecessary: false,
5055 underline: true,
5056 data: None,
5057 }
5058 }
5059}
5060
5061impl IndentSize {
5062 /// Returns an [`IndentSize`] representing the given spaces.
5063 pub fn spaces(len: u32) -> Self {
5064 Self {
5065 len,
5066 kind: IndentKind::Space,
5067 }
5068 }
5069
5070 /// Returns an [`IndentSize`] representing a tab.
5071 pub fn tab() -> Self {
5072 Self {
5073 len: 1,
5074 kind: IndentKind::Tab,
5075 }
5076 }
5077
5078 /// An iterator over the characters represented by this [`IndentSize`].
5079 pub fn chars(&self) -> impl Iterator<Item = char> {
5080 iter::repeat(self.char()).take(self.len as usize)
5081 }
5082
5083 /// The character representation of this [`IndentSize`].
5084 pub fn char(&self) -> char {
5085 match self.kind {
5086 IndentKind::Space => ' ',
5087 IndentKind::Tab => '\t',
5088 }
5089 }
5090
5091 /// Consumes the current [`IndentSize`] and returns a new one that has
5092 /// been shrunk or enlarged by the given size along the given direction.
5093 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5094 match direction {
5095 Ordering::Less => {
5096 if self.kind == size.kind && self.len >= size.len {
5097 self.len -= size.len;
5098 }
5099 }
5100 Ordering::Equal => {}
5101 Ordering::Greater => {
5102 if self.len == 0 {
5103 self = size;
5104 } else if self.kind == size.kind {
5105 self.len += size.len;
5106 }
5107 }
5108 }
5109 self
5110 }
5111
5112 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5113 match self.kind {
5114 IndentKind::Space => self.len as usize,
5115 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5116 }
5117 }
5118}
5119
5120#[cfg(any(test, feature = "test-support"))]
5121pub struct TestFile {
5122 pub path: Arc<RelPath>,
5123 pub root_name: String,
5124 pub local_root: Option<PathBuf>,
5125}
5126
5127#[cfg(any(test, feature = "test-support"))]
5128impl File for TestFile {
5129 fn path(&self) -> &Arc<RelPath> {
5130 &self.path
5131 }
5132
5133 fn full_path(&self, _: &gpui::App) -> PathBuf {
5134 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5135 }
5136
5137 fn as_local(&self) -> Option<&dyn LocalFile> {
5138 if self.local_root.is_some() {
5139 Some(self)
5140 } else {
5141 None
5142 }
5143 }
5144
5145 fn disk_state(&self) -> DiskState {
5146 unimplemented!()
5147 }
5148
5149 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5150 self.path().file_name().unwrap_or(self.root_name.as_ref())
5151 }
5152
5153 fn worktree_id(&self, _: &App) -> WorktreeId {
5154 WorktreeId::from_usize(0)
5155 }
5156
5157 fn to_proto(&self, _: &App) -> rpc::proto::File {
5158 unimplemented!()
5159 }
5160
5161 fn is_private(&self) -> bool {
5162 false
5163 }
5164
5165 fn path_style(&self, _cx: &App) -> PathStyle {
5166 PathStyle::local()
5167 }
5168}
5169
5170#[cfg(any(test, feature = "test-support"))]
5171impl LocalFile for TestFile {
5172 fn abs_path(&self, _cx: &App) -> PathBuf {
5173 PathBuf::from(self.local_root.as_ref().unwrap())
5174 .join(&self.root_name)
5175 .join(self.path.as_std_path())
5176 }
5177
5178 fn load(&self, _cx: &App) -> Task<Result<String>> {
5179 unimplemented!()
5180 }
5181
5182 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5183 unimplemented!()
5184 }
5185}
5186
5187pub(crate) fn contiguous_ranges(
5188 values: impl Iterator<Item = u32>,
5189 max_len: usize,
5190) -> impl Iterator<Item = Range<u32>> {
5191 let mut values = values;
5192 let mut current_range: Option<Range<u32>> = None;
5193 std::iter::from_fn(move || {
5194 loop {
5195 if let Some(value) = values.next() {
5196 if let Some(range) = &mut current_range
5197 && value == range.end
5198 && range.len() < max_len
5199 {
5200 range.end += 1;
5201 continue;
5202 }
5203
5204 let prev_range = current_range.clone();
5205 current_range = Some(value..(value + 1));
5206 if prev_range.is_some() {
5207 return prev_range;
5208 }
5209 } else {
5210 return current_range.take();
5211 }
5212 }
5213 })
5214}
5215
5216#[derive(Default, Debug)]
5217pub struct CharClassifier {
5218 scope: Option<LanguageScope>,
5219 scope_context: Option<CharScopeContext>,
5220 ignore_punctuation: bool,
5221}
5222
5223impl CharClassifier {
5224 pub fn new(scope: Option<LanguageScope>) -> Self {
5225 Self {
5226 scope,
5227 scope_context: None,
5228 ignore_punctuation: false,
5229 }
5230 }
5231
5232 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5233 Self {
5234 scope_context,
5235 ..self
5236 }
5237 }
5238
5239 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5240 Self {
5241 ignore_punctuation,
5242 ..self
5243 }
5244 }
5245
5246 pub fn is_whitespace(&self, c: char) -> bool {
5247 self.kind(c) == CharKind::Whitespace
5248 }
5249
5250 pub fn is_word(&self, c: char) -> bool {
5251 self.kind(c) == CharKind::Word
5252 }
5253
5254 pub fn is_punctuation(&self, c: char) -> bool {
5255 self.kind(c) == CharKind::Punctuation
5256 }
5257
5258 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5259 if c.is_alphanumeric() || c == '_' {
5260 return CharKind::Word;
5261 }
5262
5263 if let Some(scope) = &self.scope {
5264 let characters = match self.scope_context {
5265 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5266 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5267 None => scope.word_characters(),
5268 };
5269 if let Some(characters) = characters
5270 && characters.contains(&c)
5271 {
5272 return CharKind::Word;
5273 }
5274 }
5275
5276 if c.is_whitespace() {
5277 return CharKind::Whitespace;
5278 }
5279
5280 if ignore_punctuation {
5281 CharKind::Word
5282 } else {
5283 CharKind::Punctuation
5284 }
5285 }
5286
5287 pub fn kind(&self, c: char) -> CharKind {
5288 self.kind_with(c, self.ignore_punctuation)
5289 }
5290}
5291
5292/// Find all of the ranges of whitespace that occur at the ends of lines
5293/// in the given rope.
5294///
5295/// This could also be done with a regex search, but this implementation
5296/// avoids copying text.
5297pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5298 let mut ranges = Vec::new();
5299
5300 let mut offset = 0;
5301 let mut prev_chunk_trailing_whitespace_range = 0..0;
5302 for chunk in rope.chunks() {
5303 let mut prev_line_trailing_whitespace_range = 0..0;
5304 for (i, line) in chunk.split('\n').enumerate() {
5305 let line_end_offset = offset + line.len();
5306 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5307 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5308
5309 if i == 0 && trimmed_line_len == 0 {
5310 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5311 }
5312 if !prev_line_trailing_whitespace_range.is_empty() {
5313 ranges.push(prev_line_trailing_whitespace_range);
5314 }
5315
5316 offset = line_end_offset + 1;
5317 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5318 }
5319
5320 offset -= 1;
5321 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5322 }
5323
5324 if !prev_chunk_trailing_whitespace_range.is_empty() {
5325 ranges.push(prev_chunk_trailing_whitespace_range);
5326 }
5327
5328 ranges
5329}