1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// Whether this chunk of text is marked as unnecessary.
510 pub is_unnecessary: bool,
511 /// Whether this chunk of text was originally a tab character.
512 pub is_tab: bool,
513 /// A bitset of which characters are tabs in this string.
514 pub tabs: u128,
515 /// Bitmap of character indices in this chunk
516 pub chars: u128,
517 /// Whether this chunk of text was originally a tab character.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .take_while(|(range, _)| range.start < newline_ix)
628 .filter_map(|(mut range, highlight)| {
629 range.start = range.start.saturating_sub(preview_start_ix);
630 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
631 if range.is_empty() {
632 None
633 } else {
634 Some((range, highlight))
635 }
636 });
637
638 let preview = Self {
639 text: SharedString::new(preview_text),
640 highlights: preview_highlights.collect(),
641 };
642
643 (preview, self.text.len() > newline_ix)
644 }
645}
646
647impl HighlightedTextBuilder {
648 pub fn build(self) -> HighlightedText {
649 HighlightedText {
650 text: self.text.into(),
651 highlights: self.highlights,
652 }
653 }
654
655 pub fn add_text_from_buffer_range<T: ToOffset>(
656 &mut self,
657 range: Range<T>,
658 snapshot: &text::BufferSnapshot,
659 syntax_snapshot: &SyntaxSnapshot,
660 override_style: Option<HighlightStyle>,
661 syntax_theme: &SyntaxTheme,
662 ) {
663 let range = range.to_offset(snapshot);
664 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
665 let start = self.text.len();
666 self.text.push_str(chunk.text);
667 let end = self.text.len();
668
669 if let Some(highlight_style) = chunk
670 .syntax_highlight_id
671 .and_then(|id| id.style(syntax_theme))
672 {
673 let highlight_style = override_style.map_or(highlight_style, |override_style| {
674 highlight_style.highlight(override_style)
675 });
676 self.highlights.push((start..end, highlight_style));
677 } else if let Some(override_style) = override_style {
678 self.highlights.push((start..end, override_style));
679 }
680 }
681 }
682
683 fn highlighted_chunks<'a>(
684 range: Range<usize>,
685 snapshot: &'a text::BufferSnapshot,
686 syntax_snapshot: &'a SyntaxSnapshot,
687 ) -> BufferChunks<'a> {
688 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
689 grammar
690 .highlights_config
691 .as_ref()
692 .map(|config| &config.query)
693 });
694
695 let highlight_maps = captures
696 .grammars()
697 .iter()
698 .map(|grammar| grammar.highlight_map())
699 .collect();
700
701 BufferChunks::new(
702 snapshot.as_rope(),
703 range,
704 Some((captures, highlight_maps)),
705 false,
706 None,
707 )
708 }
709}
710
711#[derive(Clone)]
712pub struct EditPreview {
713 old_snapshot: text::BufferSnapshot,
714 applied_edits_snapshot: text::BufferSnapshot,
715 syntax_snapshot: SyntaxSnapshot,
716}
717
718impl EditPreview {
719 pub fn highlight_edits(
720 &self,
721 current_snapshot: &BufferSnapshot,
722 edits: &[(Range<Anchor>, String)],
723 include_deletions: bool,
724 cx: &App,
725 ) -> HighlightedText {
726 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
727 return HighlightedText::default();
728 };
729
730 let mut highlighted_text = HighlightedTextBuilder::default();
731
732 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
733
734 let insertion_highlight_style = HighlightStyle {
735 background_color: Some(cx.theme().status().created_background),
736 ..Default::default()
737 };
738 let deletion_highlight_style = HighlightStyle {
739 background_color: Some(cx.theme().status().deleted_background),
740 ..Default::default()
741 };
742 let syntax_theme = cx.theme().syntax();
743
744 for (range, edit_text) in edits {
745 let edit_new_end_in_preview_snapshot = range
746 .end
747 .bias_right(&self.old_snapshot)
748 .to_offset(&self.applied_edits_snapshot);
749 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
750
751 let unchanged_range_in_preview_snapshot =
752 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
753 if !unchanged_range_in_preview_snapshot.is_empty() {
754 highlighted_text.add_text_from_buffer_range(
755 unchanged_range_in_preview_snapshot,
756 &self.applied_edits_snapshot,
757 &self.syntax_snapshot,
758 None,
759 syntax_theme,
760 );
761 }
762
763 let range_in_current_snapshot = range.to_offset(current_snapshot);
764 if include_deletions && !range_in_current_snapshot.is_empty() {
765 highlighted_text.add_text_from_buffer_range(
766 range_in_current_snapshot,
767 ¤t_snapshot.text,
768 ¤t_snapshot.syntax,
769 Some(deletion_highlight_style),
770 syntax_theme,
771 );
772 }
773
774 if !edit_text.is_empty() {
775 highlighted_text.add_text_from_buffer_range(
776 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
777 &self.applied_edits_snapshot,
778 &self.syntax_snapshot,
779 Some(insertion_highlight_style),
780 syntax_theme,
781 );
782 }
783
784 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
785 }
786
787 highlighted_text.add_text_from_buffer_range(
788 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
789 &self.applied_edits_snapshot,
790 &self.syntax_snapshot,
791 None,
792 syntax_theme,
793 );
794
795 highlighted_text.build()
796 }
797
798 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
799 let (first, _) = edits.first()?;
800 let (last, _) = edits.last()?;
801
802 let start = first
803 .start
804 .bias_left(&self.old_snapshot)
805 .to_point(&self.applied_edits_snapshot);
806 let end = last
807 .end
808 .bias_right(&self.old_snapshot)
809 .to_point(&self.applied_edits_snapshot);
810
811 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
812 let range = Point::new(start.row, 0)
813 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
814
815 Some(range.to_offset(&self.applied_edits_snapshot))
816 }
817}
818
819#[derive(Clone, Debug, PartialEq, Eq)]
820pub struct BracketMatch {
821 pub open_range: Range<usize>,
822 pub close_range: Range<usize>,
823 pub newline_only: bool,
824}
825
826impl Buffer {
827 /// Create a new buffer with the given base text.
828 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
829 Self::build(
830 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
831 None,
832 Capability::ReadWrite,
833 )
834 }
835
836 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
837 pub fn local_normalized(
838 base_text_normalized: Rope,
839 line_ending: LineEnding,
840 cx: &Context<Self>,
841 ) -> Self {
842 Self::build(
843 TextBuffer::new_normalized(
844 0,
845 cx.entity_id().as_non_zero_u64().into(),
846 line_ending,
847 base_text_normalized,
848 ),
849 None,
850 Capability::ReadWrite,
851 )
852 }
853
854 /// Create a new buffer that is a replica of a remote buffer.
855 pub fn remote(
856 remote_id: BufferId,
857 replica_id: ReplicaId,
858 capability: Capability,
859 base_text: impl Into<String>,
860 ) -> Self {
861 Self::build(
862 TextBuffer::new(replica_id, remote_id, base_text.into()),
863 None,
864 capability,
865 )
866 }
867
868 /// Create a new buffer that is a replica of a remote buffer, populating its
869 /// state from the given protobuf message.
870 pub fn from_proto(
871 replica_id: ReplicaId,
872 capability: Capability,
873 message: proto::BufferState,
874 file: Option<Arc<dyn File>>,
875 ) -> Result<Self> {
876 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
877 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
878 let mut this = Self::build(buffer, file, capability);
879 this.text.set_line_ending(proto::deserialize_line_ending(
880 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
881 ));
882 this.saved_version = proto::deserialize_version(&message.saved_version);
883 this.saved_mtime = message.saved_mtime.map(|time| time.into());
884 Ok(this)
885 }
886
887 /// Serialize the buffer's state to a protobuf message.
888 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
889 proto::BufferState {
890 id: self.remote_id().into(),
891 file: self.file.as_ref().map(|f| f.to_proto(cx)),
892 base_text: self.base_text().to_string(),
893 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
894 saved_version: proto::serialize_version(&self.saved_version),
895 saved_mtime: self.saved_mtime.map(|time| time.into()),
896 }
897 }
898
899 /// Serialize as protobufs all of the changes to the buffer since the given version.
900 pub fn serialize_ops(
901 &self,
902 since: Option<clock::Global>,
903 cx: &App,
904 ) -> Task<Vec<proto::Operation>> {
905 let mut operations = Vec::new();
906 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
907
908 operations.extend(self.remote_selections.iter().map(|(_, set)| {
909 proto::serialize_operation(&Operation::UpdateSelections {
910 selections: set.selections.clone(),
911 lamport_timestamp: set.lamport_timestamp,
912 line_mode: set.line_mode,
913 cursor_shape: set.cursor_shape,
914 })
915 }));
916
917 for (server_id, diagnostics) in &self.diagnostics {
918 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
919 lamport_timestamp: self.diagnostics_timestamp,
920 server_id: *server_id,
921 diagnostics: diagnostics.iter().cloned().collect(),
922 }));
923 }
924
925 for (server_id, completions) in &self.completion_triggers_per_language_server {
926 operations.push(proto::serialize_operation(
927 &Operation::UpdateCompletionTriggers {
928 triggers: completions.iter().cloned().collect(),
929 lamport_timestamp: self.completion_triggers_timestamp,
930 server_id: *server_id,
931 },
932 ));
933 }
934
935 let text_operations = self.text.operations().clone();
936 cx.background_spawn(async move {
937 let since = since.unwrap_or_default();
938 operations.extend(
939 text_operations
940 .iter()
941 .filter(|(_, op)| !since.observed(op.timestamp()))
942 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
943 );
944 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
945 operations
946 })
947 }
948
949 /// Assign a language to the buffer, returning the buffer.
950 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
951 self.set_language(Some(language), cx);
952 self
953 }
954
955 /// Returns the [`Capability`] of this buffer.
956 pub fn capability(&self) -> Capability {
957 self.capability
958 }
959
960 /// Whether this buffer can only be read.
961 pub fn read_only(&self) -> bool {
962 self.capability == Capability::ReadOnly
963 }
964
965 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
966 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
967 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
968 let snapshot = buffer.snapshot();
969 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
970 Self {
971 saved_mtime,
972 saved_version: buffer.version(),
973 preview_version: buffer.version(),
974 reload_task: None,
975 transaction_depth: 0,
976 was_dirty_before_starting_transaction: None,
977 has_unsaved_edits: Cell::new((buffer.version(), false)),
978 text: buffer,
979 branch_state: None,
980 file,
981 capability,
982 syntax_map,
983 reparse: None,
984 non_text_state_update_count: 0,
985 sync_parse_timeout: Duration::from_millis(1),
986 parse_status: watch::channel(ParseStatus::Idle),
987 autoindent_requests: Default::default(),
988 wait_for_autoindent_txs: Default::default(),
989 pending_autoindent: Default::default(),
990 language: None,
991 remote_selections: Default::default(),
992 diagnostics: Default::default(),
993 diagnostics_timestamp: Default::default(),
994 completion_triggers: Default::default(),
995 completion_triggers_per_language_server: Default::default(),
996 completion_triggers_timestamp: Default::default(),
997 deferred_ops: OperationQueue::new(),
998 has_conflict: false,
999 change_bits: Default::default(),
1000 _subscriptions: Vec::new(),
1001 }
1002 }
1003
1004 pub fn build_snapshot(
1005 text: Rope,
1006 language: Option<Arc<Language>>,
1007 language_registry: Option<Arc<LanguageRegistry>>,
1008 cx: &mut App,
1009 ) -> impl Future<Output = BufferSnapshot> + use<> {
1010 let entity_id = cx.reserve_entity::<Self>().entity_id();
1011 let buffer_id = entity_id.as_non_zero_u64().into();
1012 async move {
1013 let text =
1014 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1015 let mut syntax = SyntaxMap::new(&text).snapshot();
1016 if let Some(language) = language.clone() {
1017 let language_registry = language_registry.clone();
1018 syntax.reparse(&text, language_registry, language);
1019 }
1020 BufferSnapshot {
1021 text,
1022 syntax,
1023 file: None,
1024 diagnostics: Default::default(),
1025 remote_selections: Default::default(),
1026 language,
1027 non_text_state_update_count: 0,
1028 }
1029 }
1030 }
1031
1032 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1033 let entity_id = cx.reserve_entity::<Self>().entity_id();
1034 let buffer_id = entity_id.as_non_zero_u64().into();
1035 let text =
1036 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1037 let syntax = SyntaxMap::new(&text).snapshot();
1038 BufferSnapshot {
1039 text,
1040 syntax,
1041 file: None,
1042 diagnostics: Default::default(),
1043 remote_selections: Default::default(),
1044 language: None,
1045 non_text_state_update_count: 0,
1046 }
1047 }
1048
1049 #[cfg(any(test, feature = "test-support"))]
1050 pub fn build_snapshot_sync(
1051 text: Rope,
1052 language: Option<Arc<Language>>,
1053 language_registry: Option<Arc<LanguageRegistry>>,
1054 cx: &mut App,
1055 ) -> BufferSnapshot {
1056 let entity_id = cx.reserve_entity::<Self>().entity_id();
1057 let buffer_id = entity_id.as_non_zero_u64().into();
1058 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1059 let mut syntax = SyntaxMap::new(&text).snapshot();
1060 if let Some(language) = language.clone() {
1061 syntax.reparse(&text, language_registry, language);
1062 }
1063 BufferSnapshot {
1064 text,
1065 syntax,
1066 file: None,
1067 diagnostics: Default::default(),
1068 remote_selections: Default::default(),
1069 language,
1070 non_text_state_update_count: 0,
1071 }
1072 }
1073
1074 /// Retrieve a snapshot of the buffer's current state. This is computationally
1075 /// cheap, and allows reading from the buffer on a background thread.
1076 pub fn snapshot(&self) -> BufferSnapshot {
1077 let text = self.text.snapshot();
1078 let mut syntax_map = self.syntax_map.lock();
1079 syntax_map.interpolate(&text);
1080 let syntax = syntax_map.snapshot();
1081
1082 BufferSnapshot {
1083 text,
1084 syntax,
1085 file: self.file.clone(),
1086 remote_selections: self.remote_selections.clone(),
1087 diagnostics: self.diagnostics.clone(),
1088 language: self.language.clone(),
1089 non_text_state_update_count: self.non_text_state_update_count,
1090 }
1091 }
1092
1093 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1094 let this = cx.entity();
1095 cx.new(|cx| {
1096 let mut branch = Self {
1097 branch_state: Some(BufferBranchState {
1098 base_buffer: this.clone(),
1099 merged_operations: Default::default(),
1100 }),
1101 language: self.language.clone(),
1102 has_conflict: self.has_conflict,
1103 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1104 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1105 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1106 };
1107 if let Some(language_registry) = self.language_registry() {
1108 branch.set_language_registry(language_registry);
1109 }
1110
1111 // Reparse the branch buffer so that we get syntax highlighting immediately.
1112 branch.reparse(cx);
1113
1114 branch
1115 })
1116 }
1117
1118 pub fn preview_edits(
1119 &self,
1120 edits: Arc<[(Range<Anchor>, String)]>,
1121 cx: &App,
1122 ) -> Task<EditPreview> {
1123 let registry = self.language_registry();
1124 let language = self.language().cloned();
1125 let old_snapshot = self.text.snapshot();
1126 let mut branch_buffer = self.text.branch();
1127 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1128 cx.background_spawn(async move {
1129 if !edits.is_empty() {
1130 if let Some(language) = language.clone() {
1131 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1132 }
1133
1134 branch_buffer.edit(edits.iter().cloned());
1135 let snapshot = branch_buffer.snapshot();
1136 syntax_snapshot.interpolate(&snapshot);
1137
1138 if let Some(language) = language {
1139 syntax_snapshot.reparse(&snapshot, registry, language);
1140 }
1141 }
1142 EditPreview {
1143 old_snapshot,
1144 applied_edits_snapshot: branch_buffer.snapshot(),
1145 syntax_snapshot,
1146 }
1147 })
1148 }
1149
1150 /// Applies all of the changes in this buffer that intersect any of the
1151 /// given `ranges` to its base buffer.
1152 ///
1153 /// If `ranges` is empty, then all changes will be applied. This buffer must
1154 /// be a branch buffer to call this method.
1155 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1156 let Some(base_buffer) = self.base_buffer() else {
1157 debug_panic!("not a branch buffer");
1158 return;
1159 };
1160
1161 let mut ranges = if ranges.is_empty() {
1162 &[0..usize::MAX]
1163 } else {
1164 ranges.as_slice()
1165 }
1166 .iter()
1167 .peekable();
1168
1169 let mut edits = Vec::new();
1170 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1171 let mut is_included = false;
1172 while let Some(range) = ranges.peek() {
1173 if range.end < edit.new.start {
1174 ranges.next().unwrap();
1175 } else {
1176 if range.start <= edit.new.end {
1177 is_included = true;
1178 }
1179 break;
1180 }
1181 }
1182
1183 if is_included {
1184 edits.push((
1185 edit.old.clone(),
1186 self.text_for_range(edit.new.clone()).collect::<String>(),
1187 ));
1188 }
1189 }
1190
1191 let operation = base_buffer.update(cx, |base_buffer, cx| {
1192 // cx.emit(BufferEvent::DiffBaseChanged);
1193 base_buffer.edit(edits, None, cx)
1194 });
1195
1196 if let Some(operation) = operation
1197 && let Some(BufferBranchState {
1198 merged_operations, ..
1199 }) = &mut self.branch_state
1200 {
1201 merged_operations.push(operation);
1202 }
1203 }
1204
1205 fn on_base_buffer_event(
1206 &mut self,
1207 _: Entity<Buffer>,
1208 event: &BufferEvent,
1209 cx: &mut Context<Self>,
1210 ) {
1211 let BufferEvent::Operation { operation, .. } = event else {
1212 return;
1213 };
1214 let Some(BufferBranchState {
1215 merged_operations, ..
1216 }) = &mut self.branch_state
1217 else {
1218 return;
1219 };
1220
1221 let mut operation_to_undo = None;
1222 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1223 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1224 {
1225 merged_operations.remove(ix);
1226 operation_to_undo = Some(operation.timestamp);
1227 }
1228
1229 self.apply_ops([operation.clone()], cx);
1230
1231 if let Some(timestamp) = operation_to_undo {
1232 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1233 self.undo_operations(counts, cx);
1234 }
1235 }
1236
1237 #[cfg(test)]
1238 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1239 &self.text
1240 }
1241
1242 /// Retrieve a snapshot of the buffer's raw text, without any
1243 /// language-related state like the syntax tree or diagnostics.
1244 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1245 self.text.snapshot()
1246 }
1247
1248 /// The file associated with the buffer, if any.
1249 pub fn file(&self) -> Option<&Arc<dyn File>> {
1250 self.file.as_ref()
1251 }
1252
1253 /// The version of the buffer that was last saved or reloaded from disk.
1254 pub fn saved_version(&self) -> &clock::Global {
1255 &self.saved_version
1256 }
1257
1258 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1259 pub fn saved_mtime(&self) -> Option<MTime> {
1260 self.saved_mtime
1261 }
1262
1263 /// Assign a language to the buffer.
1264 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1265 self.non_text_state_update_count += 1;
1266 self.syntax_map.lock().clear(&self.text);
1267 self.language = language;
1268 self.was_changed();
1269 self.reparse(cx);
1270 cx.emit(BufferEvent::LanguageChanged);
1271 }
1272
1273 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1274 /// other languages if parts of the buffer are written in different languages.
1275 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1276 self.syntax_map
1277 .lock()
1278 .set_language_registry(language_registry);
1279 }
1280
1281 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1282 self.syntax_map.lock().language_registry()
1283 }
1284
1285 /// Assign the line ending type to the buffer.
1286 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1287 self.text.set_line_ending(line_ending);
1288
1289 let lamport_timestamp = self.text.lamport_clock.tick();
1290 self.send_operation(
1291 Operation::UpdateLineEnding {
1292 line_ending,
1293 lamport_timestamp,
1294 },
1295 true,
1296 cx,
1297 );
1298 }
1299
1300 /// Assign the buffer a new [`Capability`].
1301 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1302 if self.capability != capability {
1303 self.capability = capability;
1304 cx.emit(BufferEvent::CapabilityChanged)
1305 }
1306 }
1307
1308 /// This method is called to signal that the buffer has been saved.
1309 pub fn did_save(
1310 &mut self,
1311 version: clock::Global,
1312 mtime: Option<MTime>,
1313 cx: &mut Context<Self>,
1314 ) {
1315 self.saved_version = version;
1316 self.has_unsaved_edits
1317 .set((self.saved_version().clone(), false));
1318 self.has_conflict = false;
1319 self.saved_mtime = mtime;
1320 self.was_changed();
1321 cx.emit(BufferEvent::Saved);
1322 cx.notify();
1323 }
1324
1325 /// Reloads the contents of the buffer from disk.
1326 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1327 let (tx, rx) = futures::channel::oneshot::channel();
1328 let prev_version = self.text.version();
1329 self.reload_task = Some(cx.spawn(async move |this, cx| {
1330 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1331 let file = this.file.as_ref()?.as_local()?;
1332
1333 Some((file.disk_state().mtime(), file.load(cx)))
1334 })?
1335 else {
1336 return Ok(());
1337 };
1338
1339 let new_text = new_text.await?;
1340 let diff = this
1341 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1342 .await;
1343 this.update(cx, |this, cx| {
1344 if this.version() == diff.base_version {
1345 this.finalize_last_transaction();
1346 this.apply_diff(diff, cx);
1347 tx.send(this.finalize_last_transaction().cloned()).ok();
1348 this.has_conflict = false;
1349 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1350 } else {
1351 if !diff.edits.is_empty()
1352 || this
1353 .edits_since::<usize>(&diff.base_version)
1354 .next()
1355 .is_some()
1356 {
1357 this.has_conflict = true;
1358 }
1359
1360 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1361 }
1362
1363 this.reload_task.take();
1364 })
1365 }));
1366 rx
1367 }
1368
1369 /// This method is called to signal that the buffer has been reloaded.
1370 pub fn did_reload(
1371 &mut self,
1372 version: clock::Global,
1373 line_ending: LineEnding,
1374 mtime: Option<MTime>,
1375 cx: &mut Context<Self>,
1376 ) {
1377 self.saved_version = version;
1378 self.has_unsaved_edits
1379 .set((self.saved_version.clone(), false));
1380 self.text.set_line_ending(line_ending);
1381 self.saved_mtime = mtime;
1382 cx.emit(BufferEvent::Reloaded);
1383 cx.notify();
1384 }
1385
1386 /// Updates the [`File`] backing this buffer. This should be called when
1387 /// the file has changed or has been deleted.
1388 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1389 let was_dirty = self.is_dirty();
1390 let mut file_changed = false;
1391
1392 if let Some(old_file) = self.file.as_ref() {
1393 if new_file.path() != old_file.path() {
1394 file_changed = true;
1395 }
1396
1397 let old_state = old_file.disk_state();
1398 let new_state = new_file.disk_state();
1399 if old_state != new_state {
1400 file_changed = true;
1401 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1402 cx.emit(BufferEvent::ReloadNeeded)
1403 }
1404 }
1405 } else {
1406 file_changed = true;
1407 };
1408
1409 self.file = Some(new_file);
1410 if file_changed {
1411 self.was_changed();
1412 self.non_text_state_update_count += 1;
1413 if was_dirty != self.is_dirty() {
1414 cx.emit(BufferEvent::DirtyChanged);
1415 }
1416 cx.emit(BufferEvent::FileHandleChanged);
1417 cx.notify();
1418 }
1419 }
1420
1421 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1422 Some(self.branch_state.as_ref()?.base_buffer.clone())
1423 }
1424
1425 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1426 pub fn language(&self) -> Option<&Arc<Language>> {
1427 self.language.as_ref()
1428 }
1429
1430 /// Returns the [`Language`] at the given location.
1431 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1432 let offset = position.to_offset(self);
1433 let mut is_first = true;
1434 let start_anchor = self.anchor_before(offset);
1435 let end_anchor = self.anchor_after(offset);
1436 self.syntax_map
1437 .lock()
1438 .layers_for_range(offset..offset, &self.text, false)
1439 .filter(|layer| {
1440 if is_first {
1441 is_first = false;
1442 return true;
1443 }
1444
1445 layer
1446 .included_sub_ranges
1447 .map(|sub_ranges| {
1448 sub_ranges.iter().any(|sub_range| {
1449 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1450 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1451 !is_before_start && !is_after_end
1452 })
1453 })
1454 .unwrap_or(true)
1455 })
1456 .last()
1457 .map(|info| info.language.clone())
1458 .or_else(|| self.language.clone())
1459 }
1460
1461 /// Returns each [`Language`] for the active syntax layers at the given location.
1462 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1463 let offset = position.to_offset(self);
1464 let mut languages: Vec<Arc<Language>> = self
1465 .syntax_map
1466 .lock()
1467 .layers_for_range(offset..offset, &self.text, false)
1468 .map(|info| info.language.clone())
1469 .collect();
1470
1471 if languages.is_empty()
1472 && let Some(buffer_language) = self.language()
1473 {
1474 languages.push(buffer_language.clone());
1475 }
1476
1477 languages
1478 }
1479
1480 /// An integer version number that accounts for all updates besides
1481 /// the buffer's text itself (which is versioned via a version vector).
1482 pub fn non_text_state_update_count(&self) -> usize {
1483 self.non_text_state_update_count
1484 }
1485
1486 /// Whether the buffer is being parsed in the background.
1487 #[cfg(any(test, feature = "test-support"))]
1488 pub fn is_parsing(&self) -> bool {
1489 self.reparse.is_some()
1490 }
1491
1492 /// Indicates whether the buffer contains any regions that may be
1493 /// written in a language that hasn't been loaded yet.
1494 pub fn contains_unknown_injections(&self) -> bool {
1495 self.syntax_map.lock().contains_unknown_injections()
1496 }
1497
1498 #[cfg(any(test, feature = "test-support"))]
1499 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1500 self.sync_parse_timeout = timeout;
1501 }
1502
1503 /// Called after an edit to synchronize the buffer's main parse tree with
1504 /// the buffer's new underlying state.
1505 ///
1506 /// Locks the syntax map and interpolates the edits since the last reparse
1507 /// into the foreground syntax tree.
1508 ///
1509 /// Then takes a stable snapshot of the syntax map before unlocking it.
1510 /// The snapshot with the interpolated edits is sent to a background thread,
1511 /// where we ask Tree-sitter to perform an incremental parse.
1512 ///
1513 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1514 /// waiting on the parse to complete. As soon as it completes, we proceed
1515 /// synchronously, unless a 1ms timeout elapses.
1516 ///
1517 /// If we time out waiting on the parse, we spawn a second task waiting
1518 /// until the parse does complete and return with the interpolated tree still
1519 /// in the foreground. When the background parse completes, call back into
1520 /// the main thread and assign the foreground parse state.
1521 ///
1522 /// If the buffer or grammar changed since the start of the background parse,
1523 /// initiate an additional reparse recursively. To avoid concurrent parses
1524 /// for the same buffer, we only initiate a new parse if we are not already
1525 /// parsing in the background.
1526 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1527 if self.reparse.is_some() {
1528 return;
1529 }
1530 let language = if let Some(language) = self.language.clone() {
1531 language
1532 } else {
1533 return;
1534 };
1535
1536 let text = self.text_snapshot();
1537 let parsed_version = self.version();
1538
1539 let mut syntax_map = self.syntax_map.lock();
1540 syntax_map.interpolate(&text);
1541 let language_registry = syntax_map.language_registry();
1542 let mut syntax_snapshot = syntax_map.snapshot();
1543 drop(syntax_map);
1544
1545 let parse_task = cx.background_spawn({
1546 let language = language.clone();
1547 let language_registry = language_registry.clone();
1548 async move {
1549 syntax_snapshot.reparse(&text, language_registry, language);
1550 syntax_snapshot
1551 }
1552 });
1553
1554 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1555 match cx
1556 .background_executor()
1557 .block_with_timeout(self.sync_parse_timeout, parse_task)
1558 {
1559 Ok(new_syntax_snapshot) => {
1560 self.did_finish_parsing(new_syntax_snapshot, cx);
1561 self.reparse = None;
1562 }
1563 Err(parse_task) => {
1564 self.reparse = Some(cx.spawn(async move |this, cx| {
1565 let new_syntax_map = parse_task.await;
1566 this.update(cx, move |this, cx| {
1567 let grammar_changed =
1568 this.language.as_ref().is_none_or(|current_language| {
1569 !Arc::ptr_eq(&language, current_language)
1570 });
1571 let language_registry_changed = new_syntax_map
1572 .contains_unknown_injections()
1573 && language_registry.is_some_and(|registry| {
1574 registry.version() != new_syntax_map.language_registry_version()
1575 });
1576 let parse_again = language_registry_changed
1577 || grammar_changed
1578 || this.version.changed_since(&parsed_version);
1579 this.did_finish_parsing(new_syntax_map, cx);
1580 this.reparse = None;
1581 if parse_again {
1582 this.reparse(cx);
1583 }
1584 })
1585 .ok();
1586 }));
1587 }
1588 }
1589 }
1590
1591 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1592 self.was_changed();
1593 self.non_text_state_update_count += 1;
1594 self.syntax_map.lock().did_parse(syntax_snapshot);
1595 self.request_autoindent(cx);
1596 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1597 cx.emit(BufferEvent::Reparsed);
1598 cx.notify();
1599 }
1600
1601 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1602 self.parse_status.1.clone()
1603 }
1604
1605 /// Assign to the buffer a set of diagnostics created by a given language server.
1606 pub fn update_diagnostics(
1607 &mut self,
1608 server_id: LanguageServerId,
1609 diagnostics: DiagnosticSet,
1610 cx: &mut Context<Self>,
1611 ) {
1612 let lamport_timestamp = self.text.lamport_clock.tick();
1613 let op = Operation::UpdateDiagnostics {
1614 server_id,
1615 diagnostics: diagnostics.iter().cloned().collect(),
1616 lamport_timestamp,
1617 };
1618
1619 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1620 self.send_operation(op, true, cx);
1621 }
1622
1623 pub fn buffer_diagnostics(
1624 &self,
1625 for_server: Option<LanguageServerId>,
1626 ) -> Vec<&DiagnosticEntry<Anchor>> {
1627 match for_server {
1628 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1629 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1630 Err(_) => Vec::new(),
1631 },
1632 None => self
1633 .diagnostics
1634 .iter()
1635 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1636 .collect(),
1637 }
1638 }
1639
1640 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1641 if let Some(indent_sizes) = self.compute_autoindents() {
1642 let indent_sizes = cx.background_spawn(indent_sizes);
1643 match cx
1644 .background_executor()
1645 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1646 {
1647 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1648 Err(indent_sizes) => {
1649 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1650 let indent_sizes = indent_sizes.await;
1651 this.update(cx, |this, cx| {
1652 this.apply_autoindents(indent_sizes, cx);
1653 })
1654 .ok();
1655 }));
1656 }
1657 }
1658 } else {
1659 self.autoindent_requests.clear();
1660 for tx in self.wait_for_autoindent_txs.drain(..) {
1661 tx.send(()).ok();
1662 }
1663 }
1664 }
1665
1666 fn compute_autoindents(
1667 &self,
1668 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1669 let max_rows_between_yields = 100;
1670 let snapshot = self.snapshot();
1671 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1672 return None;
1673 }
1674
1675 let autoindent_requests = self.autoindent_requests.clone();
1676 Some(async move {
1677 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1678 for request in autoindent_requests {
1679 // Resolve each edited range to its row in the current buffer and in the
1680 // buffer before this batch of edits.
1681 let mut row_ranges = Vec::new();
1682 let mut old_to_new_rows = BTreeMap::new();
1683 let mut language_indent_sizes_by_new_row = Vec::new();
1684 for entry in &request.entries {
1685 let position = entry.range.start;
1686 let new_row = position.to_point(&snapshot).row;
1687 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1688 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1689
1690 if !entry.first_line_is_new {
1691 let old_row = position.to_point(&request.before_edit).row;
1692 old_to_new_rows.insert(old_row, new_row);
1693 }
1694 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1695 }
1696
1697 // Build a map containing the suggested indentation for each of the edited lines
1698 // with respect to the state of the buffer before these edits. This map is keyed
1699 // by the rows for these lines in the current state of the buffer.
1700 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1701 let old_edited_ranges =
1702 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1703 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1704 let mut language_indent_size = IndentSize::default();
1705 for old_edited_range in old_edited_ranges {
1706 let suggestions = request
1707 .before_edit
1708 .suggest_autoindents(old_edited_range.clone())
1709 .into_iter()
1710 .flatten();
1711 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1712 if let Some(suggestion) = suggestion {
1713 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1714
1715 // Find the indent size based on the language for this row.
1716 while let Some((row, size)) = language_indent_sizes.peek() {
1717 if *row > new_row {
1718 break;
1719 }
1720 language_indent_size = *size;
1721 language_indent_sizes.next();
1722 }
1723
1724 let suggested_indent = old_to_new_rows
1725 .get(&suggestion.basis_row)
1726 .and_then(|from_row| {
1727 Some(old_suggestions.get(from_row).copied()?.0)
1728 })
1729 .unwrap_or_else(|| {
1730 request
1731 .before_edit
1732 .indent_size_for_line(suggestion.basis_row)
1733 })
1734 .with_delta(suggestion.delta, language_indent_size);
1735 old_suggestions
1736 .insert(new_row, (suggested_indent, suggestion.within_error));
1737 }
1738 }
1739 yield_now().await;
1740 }
1741
1742 // Compute new suggestions for each line, but only include them in the result
1743 // if they differ from the old suggestion for that line.
1744 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1745 let mut language_indent_size = IndentSize::default();
1746 for (row_range, original_indent_column) in row_ranges {
1747 let new_edited_row_range = if request.is_block_mode {
1748 row_range.start..row_range.start + 1
1749 } else {
1750 row_range.clone()
1751 };
1752
1753 let suggestions = snapshot
1754 .suggest_autoindents(new_edited_row_range.clone())
1755 .into_iter()
1756 .flatten();
1757 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1758 if let Some(suggestion) = suggestion {
1759 // Find the indent size based on the language for this row.
1760 while let Some((row, size)) = language_indent_sizes.peek() {
1761 if *row > new_row {
1762 break;
1763 }
1764 language_indent_size = *size;
1765 language_indent_sizes.next();
1766 }
1767
1768 let suggested_indent = indent_sizes
1769 .get(&suggestion.basis_row)
1770 .copied()
1771 .map(|e| e.0)
1772 .unwrap_or_else(|| {
1773 snapshot.indent_size_for_line(suggestion.basis_row)
1774 })
1775 .with_delta(suggestion.delta, language_indent_size);
1776
1777 if old_suggestions.get(&new_row).is_none_or(
1778 |(old_indentation, was_within_error)| {
1779 suggested_indent != *old_indentation
1780 && (!suggestion.within_error || *was_within_error)
1781 },
1782 ) {
1783 indent_sizes.insert(
1784 new_row,
1785 (suggested_indent, request.ignore_empty_lines),
1786 );
1787 }
1788 }
1789 }
1790
1791 if let (true, Some(original_indent_column)) =
1792 (request.is_block_mode, original_indent_column)
1793 {
1794 let new_indent =
1795 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1796 *indent
1797 } else {
1798 snapshot.indent_size_for_line(row_range.start)
1799 };
1800 let delta = new_indent.len as i64 - original_indent_column as i64;
1801 if delta != 0 {
1802 for row in row_range.skip(1) {
1803 indent_sizes.entry(row).or_insert_with(|| {
1804 let mut size = snapshot.indent_size_for_line(row);
1805 if size.kind == new_indent.kind {
1806 match delta.cmp(&0) {
1807 Ordering::Greater => size.len += delta as u32,
1808 Ordering::Less => {
1809 size.len = size.len.saturating_sub(-delta as u32)
1810 }
1811 Ordering::Equal => {}
1812 }
1813 }
1814 (size, request.ignore_empty_lines)
1815 });
1816 }
1817 }
1818 }
1819
1820 yield_now().await;
1821 }
1822 }
1823
1824 indent_sizes
1825 .into_iter()
1826 .filter_map(|(row, (indent, ignore_empty_lines))| {
1827 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1828 None
1829 } else {
1830 Some((row, indent))
1831 }
1832 })
1833 .collect()
1834 })
1835 }
1836
1837 fn apply_autoindents(
1838 &mut self,
1839 indent_sizes: BTreeMap<u32, IndentSize>,
1840 cx: &mut Context<Self>,
1841 ) {
1842 self.autoindent_requests.clear();
1843 for tx in self.wait_for_autoindent_txs.drain(..) {
1844 tx.send(()).ok();
1845 }
1846
1847 let edits: Vec<_> = indent_sizes
1848 .into_iter()
1849 .filter_map(|(row, indent_size)| {
1850 let current_size = indent_size_for_line(self, row);
1851 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1852 })
1853 .collect();
1854
1855 let preserve_preview = self.preserve_preview();
1856 self.edit(edits, None, cx);
1857 if preserve_preview {
1858 self.refresh_preview();
1859 }
1860 }
1861
1862 /// Create a minimal edit that will cause the given row to be indented
1863 /// with the given size. After applying this edit, the length of the line
1864 /// will always be at least `new_size.len`.
1865 pub fn edit_for_indent_size_adjustment(
1866 row: u32,
1867 current_size: IndentSize,
1868 new_size: IndentSize,
1869 ) -> Option<(Range<Point>, String)> {
1870 if new_size.kind == current_size.kind {
1871 match new_size.len.cmp(¤t_size.len) {
1872 Ordering::Greater => {
1873 let point = Point::new(row, 0);
1874 Some((
1875 point..point,
1876 iter::repeat(new_size.char())
1877 .take((new_size.len - current_size.len) as usize)
1878 .collect::<String>(),
1879 ))
1880 }
1881
1882 Ordering::Less => Some((
1883 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1884 String::new(),
1885 )),
1886
1887 Ordering::Equal => None,
1888 }
1889 } else {
1890 Some((
1891 Point::new(row, 0)..Point::new(row, current_size.len),
1892 iter::repeat(new_size.char())
1893 .take(new_size.len as usize)
1894 .collect::<String>(),
1895 ))
1896 }
1897 }
1898
1899 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1900 /// and the given new text.
1901 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1902 let old_text = self.as_rope().clone();
1903 let base_version = self.version();
1904 cx.background_executor()
1905 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1906 let old_text = old_text.to_string();
1907 let line_ending = LineEnding::detect(&new_text);
1908 LineEnding::normalize(&mut new_text);
1909 let edits = text_diff(&old_text, &new_text);
1910 Diff {
1911 base_version,
1912 line_ending,
1913 edits,
1914 }
1915 })
1916 }
1917
1918 /// Spawns a background task that searches the buffer for any whitespace
1919 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1920 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1921 let old_text = self.as_rope().clone();
1922 let line_ending = self.line_ending();
1923 let base_version = self.version();
1924 cx.background_spawn(async move {
1925 let ranges = trailing_whitespace_ranges(&old_text);
1926 let empty = Arc::<str>::from("");
1927 Diff {
1928 base_version,
1929 line_ending,
1930 edits: ranges
1931 .into_iter()
1932 .map(|range| (range, empty.clone()))
1933 .collect(),
1934 }
1935 })
1936 }
1937
1938 /// Ensures that the buffer ends with a single newline character, and
1939 /// no other whitespace. Skips if the buffer is empty.
1940 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1941 let len = self.len();
1942 if len == 0 {
1943 return;
1944 }
1945 let mut offset = len;
1946 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1947 let non_whitespace_len = chunk
1948 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1949 .len();
1950 offset -= chunk.len();
1951 offset += non_whitespace_len;
1952 if non_whitespace_len != 0 {
1953 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1954 return;
1955 }
1956 break;
1957 }
1958 }
1959 self.edit([(offset..len, "\n")], None, cx);
1960 }
1961
1962 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1963 /// calculated, then adjust the diff to account for those changes, and discard any
1964 /// parts of the diff that conflict with those changes.
1965 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1966 let snapshot = self.snapshot();
1967 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1968 let mut delta = 0;
1969 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1970 while let Some(edit_since) = edits_since.peek() {
1971 // If the edit occurs after a diff hunk, then it does not
1972 // affect that hunk.
1973 if edit_since.old.start > range.end {
1974 break;
1975 }
1976 // If the edit precedes the diff hunk, then adjust the hunk
1977 // to reflect the edit.
1978 else if edit_since.old.end < range.start {
1979 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1980 edits_since.next();
1981 }
1982 // If the edit intersects a diff hunk, then discard that hunk.
1983 else {
1984 return None;
1985 }
1986 }
1987
1988 let start = (range.start as i64 + delta) as usize;
1989 let end = (range.end as i64 + delta) as usize;
1990 Some((start..end, new_text))
1991 });
1992
1993 self.start_transaction();
1994 self.text.set_line_ending(diff.line_ending);
1995 self.edit(adjusted_edits, None, cx);
1996 self.end_transaction(cx)
1997 }
1998
1999 fn has_unsaved_edits(&self) -> bool {
2000 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2001
2002 if last_version == self.version {
2003 self.has_unsaved_edits
2004 .set((last_version, has_unsaved_edits));
2005 return has_unsaved_edits;
2006 }
2007
2008 let has_edits = self.has_edits_since(&self.saved_version);
2009 self.has_unsaved_edits
2010 .set((self.version.clone(), has_edits));
2011 has_edits
2012 }
2013
2014 /// Checks if the buffer has unsaved changes.
2015 pub fn is_dirty(&self) -> bool {
2016 if self.capability == Capability::ReadOnly {
2017 return false;
2018 }
2019 if self.has_conflict {
2020 return true;
2021 }
2022 match self.file.as_ref().map(|f| f.disk_state()) {
2023 Some(DiskState::New) | Some(DiskState::Deleted) => {
2024 !self.is_empty() && self.has_unsaved_edits()
2025 }
2026 _ => self.has_unsaved_edits(),
2027 }
2028 }
2029
2030 /// Checks if the buffer and its file have both changed since the buffer
2031 /// was last saved or reloaded.
2032 pub fn has_conflict(&self) -> bool {
2033 if self.has_conflict {
2034 return true;
2035 }
2036 let Some(file) = self.file.as_ref() else {
2037 return false;
2038 };
2039 match file.disk_state() {
2040 DiskState::New => false,
2041 DiskState::Present { mtime } => match self.saved_mtime {
2042 Some(saved_mtime) => {
2043 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2044 }
2045 None => true,
2046 },
2047 DiskState::Deleted => false,
2048 }
2049 }
2050
2051 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2052 pub fn subscribe(&mut self) -> Subscription {
2053 self.text.subscribe()
2054 }
2055
2056 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2057 ///
2058 /// This allows downstream code to check if the buffer's text has changed without
2059 /// waiting for an effect cycle, which would be required if using eents.
2060 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2061 if let Err(ix) = self
2062 .change_bits
2063 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2064 {
2065 self.change_bits.insert(ix, bit);
2066 }
2067 }
2068
2069 fn was_changed(&mut self) {
2070 self.change_bits.retain(|change_bit| {
2071 change_bit.upgrade().is_some_and(|bit| {
2072 bit.replace(true);
2073 true
2074 })
2075 });
2076 }
2077
2078 /// Starts a transaction, if one is not already in-progress. When undoing or
2079 /// redoing edits, all of the edits performed within a transaction are undone
2080 /// or redone together.
2081 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2082 self.start_transaction_at(Instant::now())
2083 }
2084
2085 /// Starts a transaction, providing the current time. Subsequent transactions
2086 /// that occur within a short period of time will be grouped together. This
2087 /// is controlled by the buffer's undo grouping duration.
2088 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2089 self.transaction_depth += 1;
2090 if self.was_dirty_before_starting_transaction.is_none() {
2091 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2092 }
2093 self.text.start_transaction_at(now)
2094 }
2095
2096 /// Terminates the current transaction, if this is the outermost transaction.
2097 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2098 self.end_transaction_at(Instant::now(), cx)
2099 }
2100
2101 /// Terminates the current transaction, providing the current time. Subsequent transactions
2102 /// that occur within a short period of time will be grouped together. This
2103 /// is controlled by the buffer's undo grouping duration.
2104 pub fn end_transaction_at(
2105 &mut self,
2106 now: Instant,
2107 cx: &mut Context<Self>,
2108 ) -> Option<TransactionId> {
2109 assert!(self.transaction_depth > 0);
2110 self.transaction_depth -= 1;
2111 let was_dirty = if self.transaction_depth == 0 {
2112 self.was_dirty_before_starting_transaction.take().unwrap()
2113 } else {
2114 false
2115 };
2116 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2117 self.did_edit(&start_version, was_dirty, cx);
2118 Some(transaction_id)
2119 } else {
2120 None
2121 }
2122 }
2123
2124 /// Manually add a transaction to the buffer's undo history.
2125 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2126 self.text.push_transaction(transaction, now);
2127 }
2128
2129 /// Differs from `push_transaction` in that it does not clear the redo
2130 /// stack. Intended to be used to create a parent transaction to merge
2131 /// potential child transactions into.
2132 ///
2133 /// The caller is responsible for removing it from the undo history using
2134 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2135 /// are merged into this transaction, the caller is responsible for ensuring
2136 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2137 /// cleared is to create transactions with the usual `start_transaction` and
2138 /// `end_transaction` methods and merging the resulting transactions into
2139 /// the transaction created by this method
2140 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2141 self.text.push_empty_transaction(now)
2142 }
2143
2144 /// Prevent the last transaction from being grouped with any subsequent transactions,
2145 /// even if they occur with the buffer's undo grouping duration.
2146 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2147 self.text.finalize_last_transaction()
2148 }
2149
2150 /// Manually group all changes since a given transaction.
2151 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2152 self.text.group_until_transaction(transaction_id);
2153 }
2154
2155 /// Manually remove a transaction from the buffer's undo history
2156 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2157 self.text.forget_transaction(transaction_id)
2158 }
2159
2160 /// Retrieve a transaction from the buffer's undo history
2161 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2162 self.text.get_transaction(transaction_id)
2163 }
2164
2165 /// Manually merge two transactions in the buffer's undo history.
2166 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2167 self.text.merge_transactions(transaction, destination);
2168 }
2169
2170 /// Waits for the buffer to receive operations with the given timestamps.
2171 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2172 &mut self,
2173 edit_ids: It,
2174 ) -> impl Future<Output = Result<()>> + use<It> {
2175 self.text.wait_for_edits(edit_ids)
2176 }
2177
2178 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2179 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2180 &mut self,
2181 anchors: It,
2182 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2183 self.text.wait_for_anchors(anchors)
2184 }
2185
2186 /// Waits for the buffer to receive operations up to the given version.
2187 pub fn wait_for_version(
2188 &mut self,
2189 version: clock::Global,
2190 ) -> impl Future<Output = Result<()>> + use<> {
2191 self.text.wait_for_version(version)
2192 }
2193
2194 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2195 /// [`Buffer::wait_for_version`] to resolve with an error.
2196 pub fn give_up_waiting(&mut self) {
2197 self.text.give_up_waiting();
2198 }
2199
2200 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2201 let mut rx = None;
2202 if !self.autoindent_requests.is_empty() {
2203 let channel = oneshot::channel();
2204 self.wait_for_autoindent_txs.push(channel.0);
2205 rx = Some(channel.1);
2206 }
2207 rx
2208 }
2209
2210 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2211 pub fn set_active_selections(
2212 &mut self,
2213 selections: Arc<[Selection<Anchor>]>,
2214 line_mode: bool,
2215 cursor_shape: CursorShape,
2216 cx: &mut Context<Self>,
2217 ) {
2218 let lamport_timestamp = self.text.lamport_clock.tick();
2219 self.remote_selections.insert(
2220 self.text.replica_id(),
2221 SelectionSet {
2222 selections: selections.clone(),
2223 lamport_timestamp,
2224 line_mode,
2225 cursor_shape,
2226 },
2227 );
2228 self.send_operation(
2229 Operation::UpdateSelections {
2230 selections,
2231 line_mode,
2232 lamport_timestamp,
2233 cursor_shape,
2234 },
2235 true,
2236 cx,
2237 );
2238 self.non_text_state_update_count += 1;
2239 cx.notify();
2240 }
2241
2242 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2243 /// this replica.
2244 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2245 if self
2246 .remote_selections
2247 .get(&self.text.replica_id())
2248 .is_none_or(|set| !set.selections.is_empty())
2249 {
2250 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2251 }
2252 }
2253
2254 pub fn set_agent_selections(
2255 &mut self,
2256 selections: Arc<[Selection<Anchor>]>,
2257 line_mode: bool,
2258 cursor_shape: CursorShape,
2259 cx: &mut Context<Self>,
2260 ) {
2261 let lamport_timestamp = self.text.lamport_clock.tick();
2262 self.remote_selections.insert(
2263 AGENT_REPLICA_ID,
2264 SelectionSet {
2265 selections,
2266 lamport_timestamp,
2267 line_mode,
2268 cursor_shape,
2269 },
2270 );
2271 self.non_text_state_update_count += 1;
2272 cx.notify();
2273 }
2274
2275 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2276 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2277 }
2278
2279 /// Replaces the buffer's entire text.
2280 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2281 where
2282 T: Into<Arc<str>>,
2283 {
2284 self.autoindent_requests.clear();
2285 self.edit([(0..self.len(), text)], None, cx)
2286 }
2287
2288 /// Appends the given text to the end of the buffer.
2289 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2290 where
2291 T: Into<Arc<str>>,
2292 {
2293 self.edit([(self.len()..self.len(), text)], None, cx)
2294 }
2295
2296 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2297 /// delete, and a string of text to insert at that location.
2298 ///
2299 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2300 /// request for the edited ranges, which will be processed when the buffer finishes
2301 /// parsing.
2302 ///
2303 /// Parsing takes place at the end of a transaction, and may compute synchronously
2304 /// or asynchronously, depending on the changes.
2305 pub fn edit<I, S, T>(
2306 &mut self,
2307 edits_iter: I,
2308 autoindent_mode: Option<AutoindentMode>,
2309 cx: &mut Context<Self>,
2310 ) -> Option<clock::Lamport>
2311 where
2312 I: IntoIterator<Item = (Range<S>, T)>,
2313 S: ToOffset,
2314 T: Into<Arc<str>>,
2315 {
2316 // Skip invalid edits and coalesce contiguous ones.
2317 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2318
2319 for (range, new_text) in edits_iter {
2320 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2321
2322 if range.start > range.end {
2323 mem::swap(&mut range.start, &mut range.end);
2324 }
2325 let new_text = new_text.into();
2326 if !new_text.is_empty() || !range.is_empty() {
2327 if let Some((prev_range, prev_text)) = edits.last_mut()
2328 && prev_range.end >= range.start
2329 {
2330 prev_range.end = cmp::max(prev_range.end, range.end);
2331 *prev_text = format!("{prev_text}{new_text}").into();
2332 } else {
2333 edits.push((range, new_text));
2334 }
2335 }
2336 }
2337 if edits.is_empty() {
2338 return None;
2339 }
2340
2341 self.start_transaction();
2342 self.pending_autoindent.take();
2343 let autoindent_request = autoindent_mode
2344 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2345
2346 let edit_operation = self.text.edit(edits.iter().cloned());
2347 let edit_id = edit_operation.timestamp();
2348
2349 if let Some((before_edit, mode)) = autoindent_request {
2350 let mut delta = 0isize;
2351 let mut previous_setting = None;
2352 let entries: Vec<_> = edits
2353 .into_iter()
2354 .enumerate()
2355 .zip(&edit_operation.as_edit().unwrap().new_text)
2356 .filter(|((_, (range, _)), _)| {
2357 let language = before_edit.language_at(range.start);
2358 let language_id = language.map(|l| l.id());
2359 if let Some((cached_language_id, auto_indent)) = previous_setting
2360 && cached_language_id == language_id
2361 {
2362 auto_indent
2363 } else {
2364 // The auto-indent setting is not present in editorconfigs, hence
2365 // we can avoid passing the file here.
2366 let auto_indent =
2367 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2368 previous_setting = Some((language_id, auto_indent));
2369 auto_indent
2370 }
2371 })
2372 .map(|((ix, (range, _)), new_text)| {
2373 let new_text_length = new_text.len();
2374 let old_start = range.start.to_point(&before_edit);
2375 let new_start = (delta + range.start as isize) as usize;
2376 let range_len = range.end - range.start;
2377 delta += new_text_length as isize - range_len as isize;
2378
2379 // Decide what range of the insertion to auto-indent, and whether
2380 // the first line of the insertion should be considered a newly-inserted line
2381 // or an edit to an existing line.
2382 let mut range_of_insertion_to_indent = 0..new_text_length;
2383 let mut first_line_is_new = true;
2384
2385 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2386 let old_line_end = before_edit.line_len(old_start.row);
2387
2388 if old_start.column > old_line_start {
2389 first_line_is_new = false;
2390 }
2391
2392 if !new_text.contains('\n')
2393 && (old_start.column + (range_len as u32) < old_line_end
2394 || old_line_end == old_line_start)
2395 {
2396 first_line_is_new = false;
2397 }
2398
2399 // When inserting text starting with a newline, avoid auto-indenting the
2400 // previous line.
2401 if new_text.starts_with('\n') {
2402 range_of_insertion_to_indent.start += 1;
2403 first_line_is_new = true;
2404 }
2405
2406 let mut original_indent_column = None;
2407 if let AutoindentMode::Block {
2408 original_indent_columns,
2409 } = &mode
2410 {
2411 original_indent_column = Some(if new_text.starts_with('\n') {
2412 indent_size_for_text(
2413 new_text[range_of_insertion_to_indent.clone()].chars(),
2414 )
2415 .len
2416 } else {
2417 original_indent_columns
2418 .get(ix)
2419 .copied()
2420 .flatten()
2421 .unwrap_or_else(|| {
2422 indent_size_for_text(
2423 new_text[range_of_insertion_to_indent.clone()].chars(),
2424 )
2425 .len
2426 })
2427 });
2428
2429 // Avoid auto-indenting the line after the edit.
2430 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2431 range_of_insertion_to_indent.end -= 1;
2432 }
2433 }
2434
2435 AutoindentRequestEntry {
2436 first_line_is_new,
2437 original_indent_column,
2438 indent_size: before_edit.language_indent_size_at(range.start, cx),
2439 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2440 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2441 }
2442 })
2443 .collect();
2444
2445 if !entries.is_empty() {
2446 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2447 before_edit,
2448 entries,
2449 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2450 ignore_empty_lines: false,
2451 }));
2452 }
2453 }
2454
2455 self.end_transaction(cx);
2456 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2457 Some(edit_id)
2458 }
2459
2460 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2461 self.was_changed();
2462
2463 if self.edits_since::<usize>(old_version).next().is_none() {
2464 return;
2465 }
2466
2467 self.reparse(cx);
2468 cx.emit(BufferEvent::Edited);
2469 if was_dirty != self.is_dirty() {
2470 cx.emit(BufferEvent::DirtyChanged);
2471 }
2472 cx.notify();
2473 }
2474
2475 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2476 where
2477 I: IntoIterator<Item = Range<T>>,
2478 T: ToOffset + Copy,
2479 {
2480 let before_edit = self.snapshot();
2481 let entries = ranges
2482 .into_iter()
2483 .map(|range| AutoindentRequestEntry {
2484 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2485 first_line_is_new: true,
2486 indent_size: before_edit.language_indent_size_at(range.start, cx),
2487 original_indent_column: None,
2488 })
2489 .collect();
2490 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2491 before_edit,
2492 entries,
2493 is_block_mode: false,
2494 ignore_empty_lines: true,
2495 }));
2496 self.request_autoindent(cx);
2497 }
2498
2499 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2500 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2501 pub fn insert_empty_line(
2502 &mut self,
2503 position: impl ToPoint,
2504 space_above: bool,
2505 space_below: bool,
2506 cx: &mut Context<Self>,
2507 ) -> Point {
2508 let mut position = position.to_point(self);
2509
2510 self.start_transaction();
2511
2512 self.edit(
2513 [(position..position, "\n")],
2514 Some(AutoindentMode::EachLine),
2515 cx,
2516 );
2517
2518 if position.column > 0 {
2519 position += Point::new(1, 0);
2520 }
2521
2522 if !self.is_line_blank(position.row) {
2523 self.edit(
2524 [(position..position, "\n")],
2525 Some(AutoindentMode::EachLine),
2526 cx,
2527 );
2528 }
2529
2530 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2531 self.edit(
2532 [(position..position, "\n")],
2533 Some(AutoindentMode::EachLine),
2534 cx,
2535 );
2536 position.row += 1;
2537 }
2538
2539 if space_below
2540 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2541 {
2542 self.edit(
2543 [(position..position, "\n")],
2544 Some(AutoindentMode::EachLine),
2545 cx,
2546 );
2547 }
2548
2549 self.end_transaction(cx);
2550
2551 position
2552 }
2553
2554 /// Applies the given remote operations to the buffer.
2555 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2556 self.pending_autoindent.take();
2557 let was_dirty = self.is_dirty();
2558 let old_version = self.version.clone();
2559 let mut deferred_ops = Vec::new();
2560 let buffer_ops = ops
2561 .into_iter()
2562 .filter_map(|op| match op {
2563 Operation::Buffer(op) => Some(op),
2564 _ => {
2565 if self.can_apply_op(&op) {
2566 self.apply_op(op, cx);
2567 } else {
2568 deferred_ops.push(op);
2569 }
2570 None
2571 }
2572 })
2573 .collect::<Vec<_>>();
2574 for operation in buffer_ops.iter() {
2575 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2576 }
2577 self.text.apply_ops(buffer_ops);
2578 self.deferred_ops.insert(deferred_ops);
2579 self.flush_deferred_ops(cx);
2580 self.did_edit(&old_version, was_dirty, cx);
2581 // Notify independently of whether the buffer was edited as the operations could include a
2582 // selection update.
2583 cx.notify();
2584 }
2585
2586 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2587 let mut deferred_ops = Vec::new();
2588 for op in self.deferred_ops.drain().iter().cloned() {
2589 if self.can_apply_op(&op) {
2590 self.apply_op(op, cx);
2591 } else {
2592 deferred_ops.push(op);
2593 }
2594 }
2595 self.deferred_ops.insert(deferred_ops);
2596 }
2597
2598 pub fn has_deferred_ops(&self) -> bool {
2599 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2600 }
2601
2602 fn can_apply_op(&self, operation: &Operation) -> bool {
2603 match operation {
2604 Operation::Buffer(_) => {
2605 unreachable!("buffer operations should never be applied at this layer")
2606 }
2607 Operation::UpdateDiagnostics {
2608 diagnostics: diagnostic_set,
2609 ..
2610 } => diagnostic_set.iter().all(|diagnostic| {
2611 self.text.can_resolve(&diagnostic.range.start)
2612 && self.text.can_resolve(&diagnostic.range.end)
2613 }),
2614 Operation::UpdateSelections { selections, .. } => selections
2615 .iter()
2616 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2617 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2618 }
2619 }
2620
2621 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2622 match operation {
2623 Operation::Buffer(_) => {
2624 unreachable!("buffer operations should never be applied at this layer")
2625 }
2626 Operation::UpdateDiagnostics {
2627 server_id,
2628 diagnostics: diagnostic_set,
2629 lamport_timestamp,
2630 } => {
2631 let snapshot = self.snapshot();
2632 self.apply_diagnostic_update(
2633 server_id,
2634 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2635 lamport_timestamp,
2636 cx,
2637 );
2638 }
2639 Operation::UpdateSelections {
2640 selections,
2641 lamport_timestamp,
2642 line_mode,
2643 cursor_shape,
2644 } => {
2645 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2646 && set.lamport_timestamp > lamport_timestamp
2647 {
2648 return;
2649 }
2650
2651 self.remote_selections.insert(
2652 lamport_timestamp.replica_id,
2653 SelectionSet {
2654 selections,
2655 lamport_timestamp,
2656 line_mode,
2657 cursor_shape,
2658 },
2659 );
2660 self.text.lamport_clock.observe(lamport_timestamp);
2661 self.non_text_state_update_count += 1;
2662 }
2663 Operation::UpdateCompletionTriggers {
2664 triggers,
2665 lamport_timestamp,
2666 server_id,
2667 } => {
2668 if triggers.is_empty() {
2669 self.completion_triggers_per_language_server
2670 .remove(&server_id);
2671 self.completion_triggers = self
2672 .completion_triggers_per_language_server
2673 .values()
2674 .flat_map(|triggers| triggers.iter().cloned())
2675 .collect();
2676 } else {
2677 self.completion_triggers_per_language_server
2678 .insert(server_id, triggers.iter().cloned().collect());
2679 self.completion_triggers.extend(triggers);
2680 }
2681 self.text.lamport_clock.observe(lamport_timestamp);
2682 }
2683 Operation::UpdateLineEnding {
2684 line_ending,
2685 lamport_timestamp,
2686 } => {
2687 self.text.set_line_ending(line_ending);
2688 self.text.lamport_clock.observe(lamport_timestamp);
2689 }
2690 }
2691 }
2692
2693 fn apply_diagnostic_update(
2694 &mut self,
2695 server_id: LanguageServerId,
2696 diagnostics: DiagnosticSet,
2697 lamport_timestamp: clock::Lamport,
2698 cx: &mut Context<Self>,
2699 ) {
2700 if lamport_timestamp > self.diagnostics_timestamp {
2701 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2702 if diagnostics.is_empty() {
2703 if let Ok(ix) = ix {
2704 self.diagnostics.remove(ix);
2705 }
2706 } else {
2707 match ix {
2708 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2709 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2710 };
2711 }
2712 self.diagnostics_timestamp = lamport_timestamp;
2713 self.non_text_state_update_count += 1;
2714 self.text.lamport_clock.observe(lamport_timestamp);
2715 cx.notify();
2716 cx.emit(BufferEvent::DiagnosticsUpdated);
2717 }
2718 }
2719
2720 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2721 self.was_changed();
2722 cx.emit(BufferEvent::Operation {
2723 operation,
2724 is_local,
2725 });
2726 }
2727
2728 /// Removes the selections for a given peer.
2729 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2730 self.remote_selections.remove(&replica_id);
2731 cx.notify();
2732 }
2733
2734 /// Undoes the most recent transaction.
2735 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2736 let was_dirty = self.is_dirty();
2737 let old_version = self.version.clone();
2738
2739 if let Some((transaction_id, operation)) = self.text.undo() {
2740 self.send_operation(Operation::Buffer(operation), true, cx);
2741 self.did_edit(&old_version, was_dirty, cx);
2742 Some(transaction_id)
2743 } else {
2744 None
2745 }
2746 }
2747
2748 /// Manually undoes a specific transaction in the buffer's undo history.
2749 pub fn undo_transaction(
2750 &mut self,
2751 transaction_id: TransactionId,
2752 cx: &mut Context<Self>,
2753 ) -> bool {
2754 let was_dirty = self.is_dirty();
2755 let old_version = self.version.clone();
2756 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2757 self.send_operation(Operation::Buffer(operation), true, cx);
2758 self.did_edit(&old_version, was_dirty, cx);
2759 true
2760 } else {
2761 false
2762 }
2763 }
2764
2765 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2766 pub fn undo_to_transaction(
2767 &mut self,
2768 transaction_id: TransactionId,
2769 cx: &mut Context<Self>,
2770 ) -> bool {
2771 let was_dirty = self.is_dirty();
2772 let old_version = self.version.clone();
2773
2774 let operations = self.text.undo_to_transaction(transaction_id);
2775 let undone = !operations.is_empty();
2776 for operation in operations {
2777 self.send_operation(Operation::Buffer(operation), true, cx);
2778 }
2779 if undone {
2780 self.did_edit(&old_version, was_dirty, cx)
2781 }
2782 undone
2783 }
2784
2785 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2786 let was_dirty = self.is_dirty();
2787 let operation = self.text.undo_operations(counts);
2788 let old_version = self.version.clone();
2789 self.send_operation(Operation::Buffer(operation), true, cx);
2790 self.did_edit(&old_version, was_dirty, cx);
2791 }
2792
2793 /// Manually redoes a specific transaction in the buffer's redo history.
2794 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2795 let was_dirty = self.is_dirty();
2796 let old_version = self.version.clone();
2797
2798 if let Some((transaction_id, operation)) = self.text.redo() {
2799 self.send_operation(Operation::Buffer(operation), true, cx);
2800 self.did_edit(&old_version, was_dirty, cx);
2801 Some(transaction_id)
2802 } else {
2803 None
2804 }
2805 }
2806
2807 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2808 pub fn redo_to_transaction(
2809 &mut self,
2810 transaction_id: TransactionId,
2811 cx: &mut Context<Self>,
2812 ) -> bool {
2813 let was_dirty = self.is_dirty();
2814 let old_version = self.version.clone();
2815
2816 let operations = self.text.redo_to_transaction(transaction_id);
2817 let redone = !operations.is_empty();
2818 for operation in operations {
2819 self.send_operation(Operation::Buffer(operation), true, cx);
2820 }
2821 if redone {
2822 self.did_edit(&old_version, was_dirty, cx)
2823 }
2824 redone
2825 }
2826
2827 /// Override current completion triggers with the user-provided completion triggers.
2828 pub fn set_completion_triggers(
2829 &mut self,
2830 server_id: LanguageServerId,
2831 triggers: BTreeSet<String>,
2832 cx: &mut Context<Self>,
2833 ) {
2834 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2835 if triggers.is_empty() {
2836 self.completion_triggers_per_language_server
2837 .remove(&server_id);
2838 self.completion_triggers = self
2839 .completion_triggers_per_language_server
2840 .values()
2841 .flat_map(|triggers| triggers.iter().cloned())
2842 .collect();
2843 } else {
2844 self.completion_triggers_per_language_server
2845 .insert(server_id, triggers.clone());
2846 self.completion_triggers.extend(triggers.iter().cloned());
2847 }
2848 self.send_operation(
2849 Operation::UpdateCompletionTriggers {
2850 triggers: triggers.into_iter().collect(),
2851 lamport_timestamp: self.completion_triggers_timestamp,
2852 server_id,
2853 },
2854 true,
2855 cx,
2856 );
2857 cx.notify();
2858 }
2859
2860 /// Returns a list of strings which trigger a completion menu for this language.
2861 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2862 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2863 &self.completion_triggers
2864 }
2865
2866 /// Call this directly after performing edits to prevent the preview tab
2867 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2868 /// to return false until there are additional edits.
2869 pub fn refresh_preview(&mut self) {
2870 self.preview_version = self.version.clone();
2871 }
2872
2873 /// Whether we should preserve the preview status of a tab containing this buffer.
2874 pub fn preserve_preview(&self) -> bool {
2875 !self.has_edits_since(&self.preview_version)
2876 }
2877}
2878
2879#[doc(hidden)]
2880#[cfg(any(test, feature = "test-support"))]
2881impl Buffer {
2882 pub fn edit_via_marked_text(
2883 &mut self,
2884 marked_string: &str,
2885 autoindent_mode: Option<AutoindentMode>,
2886 cx: &mut Context<Self>,
2887 ) {
2888 let edits = self.edits_for_marked_text(marked_string);
2889 self.edit(edits, autoindent_mode, cx);
2890 }
2891
2892 pub fn set_group_interval(&mut self, group_interval: Duration) {
2893 self.text.set_group_interval(group_interval);
2894 }
2895
2896 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2897 where
2898 T: rand::Rng,
2899 {
2900 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2901 let mut last_end = None;
2902 for _ in 0..old_range_count {
2903 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2904 break;
2905 }
2906
2907 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2908 let mut range = self.random_byte_range(new_start, rng);
2909 if rng.random_bool(0.2) {
2910 mem::swap(&mut range.start, &mut range.end);
2911 }
2912 last_end = Some(range.end);
2913
2914 let new_text_len = rng.random_range(0..10);
2915 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2916 new_text = new_text.to_uppercase();
2917
2918 edits.push((range, new_text));
2919 }
2920 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2921 self.edit(edits, None, cx);
2922 }
2923
2924 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2925 let was_dirty = self.is_dirty();
2926 let old_version = self.version.clone();
2927
2928 let ops = self.text.randomly_undo_redo(rng);
2929 if !ops.is_empty() {
2930 for op in ops {
2931 self.send_operation(Operation::Buffer(op), true, cx);
2932 self.did_edit(&old_version, was_dirty, cx);
2933 }
2934 }
2935 }
2936}
2937
2938impl EventEmitter<BufferEvent> for Buffer {}
2939
2940impl Deref for Buffer {
2941 type Target = TextBuffer;
2942
2943 fn deref(&self) -> &Self::Target {
2944 &self.text
2945 }
2946}
2947
2948impl BufferSnapshot {
2949 /// Returns [`IndentSize`] for a given line that respects user settings and
2950 /// language preferences.
2951 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2952 indent_size_for_line(self, row)
2953 }
2954
2955 /// Returns [`IndentSize`] for a given position that respects user settings
2956 /// and language preferences.
2957 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2958 let settings = language_settings(
2959 self.language_at(position).map(|l| l.name()),
2960 self.file(),
2961 cx,
2962 );
2963 if settings.hard_tabs {
2964 IndentSize::tab()
2965 } else {
2966 IndentSize::spaces(settings.tab_size.get())
2967 }
2968 }
2969
2970 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2971 /// is passed in as `single_indent_size`.
2972 pub fn suggested_indents(
2973 &self,
2974 rows: impl Iterator<Item = u32>,
2975 single_indent_size: IndentSize,
2976 ) -> BTreeMap<u32, IndentSize> {
2977 let mut result = BTreeMap::new();
2978
2979 for row_range in contiguous_ranges(rows, 10) {
2980 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2981 Some(suggestions) => suggestions,
2982 _ => break,
2983 };
2984
2985 for (row, suggestion) in row_range.zip(suggestions) {
2986 let indent_size = if let Some(suggestion) = suggestion {
2987 result
2988 .get(&suggestion.basis_row)
2989 .copied()
2990 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2991 .with_delta(suggestion.delta, single_indent_size)
2992 } else {
2993 self.indent_size_for_line(row)
2994 };
2995
2996 result.insert(row, indent_size);
2997 }
2998 }
2999
3000 result
3001 }
3002
3003 fn suggest_autoindents(
3004 &self,
3005 row_range: Range<u32>,
3006 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3007 let config = &self.language.as_ref()?.config;
3008 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3009
3010 #[derive(Debug, Clone)]
3011 struct StartPosition {
3012 start: Point,
3013 suffix: SharedString,
3014 }
3015
3016 // Find the suggested indentation ranges based on the syntax tree.
3017 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3018 let end = Point::new(row_range.end, 0);
3019 let range = (start..end).to_offset(&self.text);
3020 let mut matches = self.syntax.matches_with_options(
3021 range.clone(),
3022 &self.text,
3023 TreeSitterOptions {
3024 max_distance_from_inclusion_byte_range: Some(10 * 1024),
3025 max_start_depth: None,
3026 },
3027 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3028 );
3029 let indent_configs = matches
3030 .grammars()
3031 .iter()
3032 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3033 .collect::<Vec<_>>();
3034
3035 let mut indent_ranges = Vec::<Range<Point>>::new();
3036 let mut start_positions = Vec::<StartPosition>::new();
3037 let mut outdent_positions = Vec::<Point>::new();
3038 while let Some(mat) = matches.peek() {
3039 let mut start: Option<Point> = None;
3040 let mut end: Option<Point> = None;
3041
3042 let config = indent_configs[mat.grammar_index];
3043 for capture in mat.captures {
3044 if capture.index == config.indent_capture_ix {
3045 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3046 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3047 } else if Some(capture.index) == config.start_capture_ix {
3048 start = Some(Point::from_ts_point(capture.node.end_position()));
3049 } else if Some(capture.index) == config.end_capture_ix {
3050 end = Some(Point::from_ts_point(capture.node.start_position()));
3051 } else if Some(capture.index) == config.outdent_capture_ix {
3052 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3053 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3054 start_positions.push(StartPosition {
3055 start: Point::from_ts_point(capture.node.start_position()),
3056 suffix: suffix.clone(),
3057 });
3058 }
3059 }
3060
3061 matches.advance();
3062 if let Some((start, end)) = start.zip(end) {
3063 if start.row == end.row {
3064 continue;
3065 }
3066 let range = start..end;
3067 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3068 Err(ix) => indent_ranges.insert(ix, range),
3069 Ok(ix) => {
3070 let prev_range = &mut indent_ranges[ix];
3071 prev_range.end = prev_range.end.max(range.end);
3072 }
3073 }
3074 }
3075 }
3076
3077 let mut error_ranges = Vec::<Range<Point>>::new();
3078 let mut matches = self
3079 .syntax
3080 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3081 while let Some(mat) = matches.peek() {
3082 let node = mat.captures[0].node;
3083 let start = Point::from_ts_point(node.start_position());
3084 let end = Point::from_ts_point(node.end_position());
3085 let range = start..end;
3086 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3087 Ok(ix) | Err(ix) => ix,
3088 };
3089 let mut end_ix = ix;
3090 while let Some(existing_range) = error_ranges.get(end_ix) {
3091 if existing_range.end < end {
3092 end_ix += 1;
3093 } else {
3094 break;
3095 }
3096 }
3097 error_ranges.splice(ix..end_ix, [range]);
3098 matches.advance();
3099 }
3100
3101 outdent_positions.sort();
3102 for outdent_position in outdent_positions {
3103 // find the innermost indent range containing this outdent_position
3104 // set its end to the outdent position
3105 if let Some(range_to_truncate) = indent_ranges
3106 .iter_mut()
3107 .filter(|indent_range| indent_range.contains(&outdent_position))
3108 .next_back()
3109 {
3110 range_to_truncate.end = outdent_position;
3111 }
3112 }
3113
3114 start_positions.sort_by_key(|b| b.start);
3115
3116 // Find the suggested indentation increases and decreased based on regexes.
3117 let mut regex_outdent_map = HashMap::default();
3118 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3119 let mut start_positions_iter = start_positions.iter().peekable();
3120
3121 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3122 self.for_each_line(
3123 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3124 ..Point::new(row_range.end, 0),
3125 |row, line| {
3126 if config
3127 .decrease_indent_pattern
3128 .as_ref()
3129 .is_some_and(|regex| regex.is_match(line))
3130 {
3131 indent_change_rows.push((row, Ordering::Less));
3132 }
3133 if config
3134 .increase_indent_pattern
3135 .as_ref()
3136 .is_some_and(|regex| regex.is_match(line))
3137 {
3138 indent_change_rows.push((row + 1, Ordering::Greater));
3139 }
3140 while let Some(pos) = start_positions_iter.peek() {
3141 if pos.start.row < row {
3142 let pos = start_positions_iter.next().unwrap();
3143 last_seen_suffix
3144 .entry(pos.suffix.to_string())
3145 .or_default()
3146 .push(pos.start);
3147 } else {
3148 break;
3149 }
3150 }
3151 for rule in &config.decrease_indent_patterns {
3152 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3153 let row_start_column = self.indent_size_for_line(row).len;
3154 let basis_row = rule
3155 .valid_after
3156 .iter()
3157 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3158 .flatten()
3159 .filter(|start_point| start_point.column <= row_start_column)
3160 .max_by_key(|start_point| start_point.row);
3161 if let Some(outdent_to_row) = basis_row {
3162 regex_outdent_map.insert(row, outdent_to_row.row);
3163 }
3164 break;
3165 }
3166 }
3167 },
3168 );
3169
3170 let mut indent_changes = indent_change_rows.into_iter().peekable();
3171 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3172 prev_non_blank_row.unwrap_or(0)
3173 } else {
3174 row_range.start.saturating_sub(1)
3175 };
3176
3177 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3178 Some(row_range.map(move |row| {
3179 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3180
3181 let mut indent_from_prev_row = false;
3182 let mut outdent_from_prev_row = false;
3183 let mut outdent_to_row = u32::MAX;
3184 let mut from_regex = false;
3185
3186 while let Some((indent_row, delta)) = indent_changes.peek() {
3187 match indent_row.cmp(&row) {
3188 Ordering::Equal => match delta {
3189 Ordering::Less => {
3190 from_regex = true;
3191 outdent_from_prev_row = true
3192 }
3193 Ordering::Greater => {
3194 indent_from_prev_row = true;
3195 from_regex = true
3196 }
3197 _ => {}
3198 },
3199
3200 Ordering::Greater => break,
3201 Ordering::Less => {}
3202 }
3203
3204 indent_changes.next();
3205 }
3206
3207 for range in &indent_ranges {
3208 if range.start.row >= row {
3209 break;
3210 }
3211 if range.start.row == prev_row && range.end > row_start {
3212 indent_from_prev_row = true;
3213 }
3214 if range.end > prev_row_start && range.end <= row_start {
3215 outdent_to_row = outdent_to_row.min(range.start.row);
3216 }
3217 }
3218
3219 if let Some(basis_row) = regex_outdent_map.get(&row) {
3220 indent_from_prev_row = false;
3221 outdent_to_row = *basis_row;
3222 from_regex = true;
3223 }
3224
3225 let within_error = error_ranges
3226 .iter()
3227 .any(|e| e.start.row < row && e.end > row_start);
3228
3229 let suggestion = if outdent_to_row == prev_row
3230 || (outdent_from_prev_row && indent_from_prev_row)
3231 {
3232 Some(IndentSuggestion {
3233 basis_row: prev_row,
3234 delta: Ordering::Equal,
3235 within_error: within_error && !from_regex,
3236 })
3237 } else if indent_from_prev_row {
3238 Some(IndentSuggestion {
3239 basis_row: prev_row,
3240 delta: Ordering::Greater,
3241 within_error: within_error && !from_regex,
3242 })
3243 } else if outdent_to_row < prev_row {
3244 Some(IndentSuggestion {
3245 basis_row: outdent_to_row,
3246 delta: Ordering::Equal,
3247 within_error: within_error && !from_regex,
3248 })
3249 } else if outdent_from_prev_row {
3250 Some(IndentSuggestion {
3251 basis_row: prev_row,
3252 delta: Ordering::Less,
3253 within_error: within_error && !from_regex,
3254 })
3255 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3256 {
3257 Some(IndentSuggestion {
3258 basis_row: prev_row,
3259 delta: Ordering::Equal,
3260 within_error: within_error && !from_regex,
3261 })
3262 } else {
3263 None
3264 };
3265
3266 prev_row = row;
3267 prev_row_start = row_start;
3268 suggestion
3269 }))
3270 }
3271
3272 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3273 while row > 0 {
3274 row -= 1;
3275 if !self.is_line_blank(row) {
3276 return Some(row);
3277 }
3278 }
3279 None
3280 }
3281
3282 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3283 let captures = self.syntax.captures(range, &self.text, |grammar| {
3284 grammar
3285 .highlights_config
3286 .as_ref()
3287 .map(|config| &config.query)
3288 });
3289 let highlight_maps = captures
3290 .grammars()
3291 .iter()
3292 .map(|grammar| grammar.highlight_map())
3293 .collect();
3294 (captures, highlight_maps)
3295 }
3296
3297 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3298 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3299 /// returned in chunks where each chunk has a single syntax highlighting style and
3300 /// diagnostic status.
3301 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3302 let range = range.start.to_offset(self)..range.end.to_offset(self);
3303
3304 let mut syntax = None;
3305 if language_aware {
3306 syntax = Some(self.get_highlights(range.clone()));
3307 }
3308 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3309 let diagnostics = language_aware;
3310 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3311 }
3312
3313 pub fn highlighted_text_for_range<T: ToOffset>(
3314 &self,
3315 range: Range<T>,
3316 override_style: Option<HighlightStyle>,
3317 syntax_theme: &SyntaxTheme,
3318 ) -> HighlightedText {
3319 HighlightedText::from_buffer_range(
3320 range,
3321 &self.text,
3322 &self.syntax,
3323 override_style,
3324 syntax_theme,
3325 )
3326 }
3327
3328 /// Invokes the given callback for each line of text in the given range of the buffer.
3329 /// Uses callback to avoid allocating a string for each line.
3330 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3331 let mut line = String::new();
3332 let mut row = range.start.row;
3333 for chunk in self
3334 .as_rope()
3335 .chunks_in_range(range.to_offset(self))
3336 .chain(["\n"])
3337 {
3338 for (newline_ix, text) in chunk.split('\n').enumerate() {
3339 if newline_ix > 0 {
3340 callback(row, &line);
3341 row += 1;
3342 line.clear();
3343 }
3344 line.push_str(text);
3345 }
3346 }
3347 }
3348
3349 /// Iterates over every [`SyntaxLayer`] in the buffer.
3350 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3351 self.syntax_layers_for_range(0..self.len(), true)
3352 }
3353
3354 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3355 let offset = position.to_offset(self);
3356 self.syntax_layers_for_range(offset..offset, false)
3357 .filter(|l| l.node().end_byte() > offset)
3358 .last()
3359 }
3360
3361 pub fn syntax_layers_for_range<D: ToOffset>(
3362 &self,
3363 range: Range<D>,
3364 include_hidden: bool,
3365 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3366 self.syntax
3367 .layers_for_range(range, &self.text, include_hidden)
3368 }
3369
3370 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3371 &self,
3372 range: Range<D>,
3373 ) -> Option<SyntaxLayer<'_>> {
3374 let range = range.to_offset(self);
3375 self.syntax
3376 .layers_for_range(range, &self.text, false)
3377 .max_by(|a, b| {
3378 if a.depth != b.depth {
3379 a.depth.cmp(&b.depth)
3380 } else if a.offset.0 != b.offset.0 {
3381 a.offset.0.cmp(&b.offset.0)
3382 } else {
3383 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3384 }
3385 })
3386 }
3387
3388 /// Returns the main [`Language`].
3389 pub fn language(&self) -> Option<&Arc<Language>> {
3390 self.language.as_ref()
3391 }
3392
3393 /// Returns the [`Language`] at the given location.
3394 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3395 self.syntax_layer_at(position)
3396 .map(|info| info.language)
3397 .or(self.language.as_ref())
3398 }
3399
3400 /// Returns the settings for the language at the given location.
3401 pub fn settings_at<'a, D: ToOffset>(
3402 &'a self,
3403 position: D,
3404 cx: &'a App,
3405 ) -> Cow<'a, LanguageSettings> {
3406 language_settings(
3407 self.language_at(position).map(|l| l.name()),
3408 self.file.as_ref(),
3409 cx,
3410 )
3411 }
3412
3413 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3414 CharClassifier::new(self.language_scope_at(point))
3415 }
3416
3417 /// Returns the [`LanguageScope`] at the given location.
3418 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3419 let offset = position.to_offset(self);
3420 let mut scope = None;
3421 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3422
3423 // Use the layer that has the smallest node intersecting the given point.
3424 for layer in self
3425 .syntax
3426 .layers_for_range(offset..offset, &self.text, false)
3427 {
3428 let mut cursor = layer.node().walk();
3429
3430 let mut range = None;
3431 loop {
3432 let child_range = cursor.node().byte_range();
3433 if !child_range.contains(&offset) {
3434 break;
3435 }
3436
3437 range = Some(child_range);
3438 if cursor.goto_first_child_for_byte(offset).is_none() {
3439 break;
3440 }
3441 }
3442
3443 if let Some(range) = range
3444 && smallest_range_and_depth.as_ref().is_none_or(
3445 |(smallest_range, smallest_range_depth)| {
3446 if layer.depth > *smallest_range_depth {
3447 true
3448 } else if layer.depth == *smallest_range_depth {
3449 range.len() < smallest_range.len()
3450 } else {
3451 false
3452 }
3453 },
3454 )
3455 {
3456 smallest_range_and_depth = Some((range, layer.depth));
3457 scope = Some(LanguageScope {
3458 language: layer.language.clone(),
3459 override_id: layer.override_id(offset, &self.text),
3460 });
3461 }
3462 }
3463
3464 scope.or_else(|| {
3465 self.language.clone().map(|language| LanguageScope {
3466 language,
3467 override_id: None,
3468 })
3469 })
3470 }
3471
3472 /// Returns a tuple of the range and character kind of the word
3473 /// surrounding the given position.
3474 pub fn surrounding_word<T: ToOffset>(
3475 &self,
3476 start: T,
3477 scope_context: Option<CharScopeContext>,
3478 ) -> (Range<usize>, Option<CharKind>) {
3479 let mut start = start.to_offset(self);
3480 let mut end = start;
3481 let mut next_chars = self.chars_at(start).take(128).peekable();
3482 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3483
3484 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3485 let word_kind = cmp::max(
3486 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3487 next_chars.peek().copied().map(|c| classifier.kind(c)),
3488 );
3489
3490 for ch in prev_chars {
3491 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3492 start -= ch.len_utf8();
3493 } else {
3494 break;
3495 }
3496 }
3497
3498 for ch in next_chars {
3499 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3500 end += ch.len_utf8();
3501 } else {
3502 break;
3503 }
3504 }
3505
3506 (start..end, word_kind)
3507 }
3508
3509 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3510 /// range. When `require_larger` is true, the node found must be larger than the query range.
3511 ///
3512 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3513 /// be moved to the root of the tree.
3514 fn goto_node_enclosing_range(
3515 cursor: &mut tree_sitter::TreeCursor,
3516 query_range: &Range<usize>,
3517 require_larger: bool,
3518 ) -> bool {
3519 let mut ascending = false;
3520 loop {
3521 let mut range = cursor.node().byte_range();
3522 if query_range.is_empty() {
3523 // When the query range is empty and the current node starts after it, move to the
3524 // previous sibling to find the node the containing node.
3525 if range.start > query_range.start {
3526 cursor.goto_previous_sibling();
3527 range = cursor.node().byte_range();
3528 }
3529 } else {
3530 // When the query range is non-empty and the current node ends exactly at the start,
3531 // move to the next sibling to find a node that extends beyond the start.
3532 if range.end == query_range.start {
3533 cursor.goto_next_sibling();
3534 range = cursor.node().byte_range();
3535 }
3536 }
3537
3538 let encloses = range.contains_inclusive(query_range)
3539 && (!require_larger || range.len() > query_range.len());
3540 if !encloses {
3541 ascending = true;
3542 if !cursor.goto_parent() {
3543 return false;
3544 }
3545 continue;
3546 } else if ascending {
3547 return true;
3548 }
3549
3550 // Descend into the current node.
3551 if cursor
3552 .goto_first_child_for_byte(query_range.start)
3553 .is_none()
3554 {
3555 return true;
3556 }
3557 }
3558 }
3559
3560 pub fn syntax_ancestor<'a, T: ToOffset>(
3561 &'a self,
3562 range: Range<T>,
3563 ) -> Option<tree_sitter::Node<'a>> {
3564 let range = range.start.to_offset(self)..range.end.to_offset(self);
3565 let mut result: Option<tree_sitter::Node<'a>> = None;
3566 for layer in self
3567 .syntax
3568 .layers_for_range(range.clone(), &self.text, true)
3569 {
3570 let mut cursor = layer.node().walk();
3571
3572 // Find the node that both contains the range and is larger than it.
3573 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3574 continue;
3575 }
3576
3577 let left_node = cursor.node();
3578 let mut layer_result = left_node;
3579
3580 // For an empty range, try to find another node immediately to the right of the range.
3581 if left_node.end_byte() == range.start {
3582 let mut right_node = None;
3583 while !cursor.goto_next_sibling() {
3584 if !cursor.goto_parent() {
3585 break;
3586 }
3587 }
3588
3589 while cursor.node().start_byte() == range.start {
3590 right_node = Some(cursor.node());
3591 if !cursor.goto_first_child() {
3592 break;
3593 }
3594 }
3595
3596 // If there is a candidate node on both sides of the (empty) range, then
3597 // decide between the two by favoring a named node over an anonymous token.
3598 // If both nodes are the same in that regard, favor the right one.
3599 if let Some(right_node) = right_node
3600 && (right_node.is_named() || !left_node.is_named())
3601 {
3602 layer_result = right_node;
3603 }
3604 }
3605
3606 if let Some(previous_result) = &result
3607 && previous_result.byte_range().len() < layer_result.byte_range().len()
3608 {
3609 continue;
3610 }
3611 result = Some(layer_result);
3612 }
3613
3614 result
3615 }
3616
3617 /// Find the previous sibling syntax node at the given range.
3618 ///
3619 /// This function locates the syntax node that precedes the node containing
3620 /// the given range. It searches hierarchically by:
3621 /// 1. Finding the node that contains the given range
3622 /// 2. Looking for the previous sibling at the same tree level
3623 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3624 ///
3625 /// Returns `None` if there is no previous sibling at any ancestor level.
3626 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3627 &'a self,
3628 range: Range<T>,
3629 ) -> Option<tree_sitter::Node<'a>> {
3630 let range = range.start.to_offset(self)..range.end.to_offset(self);
3631 let mut result: Option<tree_sitter::Node<'a>> = None;
3632
3633 for layer in self
3634 .syntax
3635 .layers_for_range(range.clone(), &self.text, true)
3636 {
3637 let mut cursor = layer.node().walk();
3638
3639 // Find the node that contains the range
3640 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3641 continue;
3642 }
3643
3644 // Look for the previous sibling, moving up ancestor levels if needed
3645 loop {
3646 if cursor.goto_previous_sibling() {
3647 let layer_result = cursor.node();
3648
3649 if let Some(previous_result) = &result {
3650 if previous_result.byte_range().end < layer_result.byte_range().end {
3651 continue;
3652 }
3653 }
3654 result = Some(layer_result);
3655 break;
3656 }
3657
3658 // No sibling found at this level, try moving up to parent
3659 if !cursor.goto_parent() {
3660 break;
3661 }
3662 }
3663 }
3664
3665 result
3666 }
3667
3668 /// Find the next sibling syntax node at the given range.
3669 ///
3670 /// This function locates the syntax node that follows the node containing
3671 /// the given range. It searches hierarchically by:
3672 /// 1. Finding the node that contains the given range
3673 /// 2. Looking for the next sibling at the same tree level
3674 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3675 ///
3676 /// Returns `None` if there is no next sibling at any ancestor level.
3677 pub fn syntax_next_sibling<'a, T: ToOffset>(
3678 &'a self,
3679 range: Range<T>,
3680 ) -> Option<tree_sitter::Node<'a>> {
3681 let range = range.start.to_offset(self)..range.end.to_offset(self);
3682 let mut result: Option<tree_sitter::Node<'a>> = None;
3683
3684 for layer in self
3685 .syntax
3686 .layers_for_range(range.clone(), &self.text, true)
3687 {
3688 let mut cursor = layer.node().walk();
3689
3690 // Find the node that contains the range
3691 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3692 continue;
3693 }
3694
3695 // Look for the next sibling, moving up ancestor levels if needed
3696 loop {
3697 if cursor.goto_next_sibling() {
3698 let layer_result = cursor.node();
3699
3700 if let Some(previous_result) = &result {
3701 if previous_result.byte_range().start > layer_result.byte_range().start {
3702 continue;
3703 }
3704 }
3705 result = Some(layer_result);
3706 break;
3707 }
3708
3709 // No sibling found at this level, try moving up to parent
3710 if !cursor.goto_parent() {
3711 break;
3712 }
3713 }
3714 }
3715
3716 result
3717 }
3718
3719 /// Returns the root syntax node within the given row
3720 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3721 let start_offset = position.to_offset(self);
3722
3723 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3724
3725 let layer = self
3726 .syntax
3727 .layers_for_range(start_offset..start_offset, &self.text, true)
3728 .next()?;
3729
3730 let mut cursor = layer.node().walk();
3731
3732 // Descend to the first leaf that touches the start of the range.
3733 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3734 if cursor.node().end_byte() == start_offset {
3735 cursor.goto_next_sibling();
3736 }
3737 }
3738
3739 // Ascend to the root node within the same row.
3740 while cursor.goto_parent() {
3741 if cursor.node().start_position().row != row {
3742 break;
3743 }
3744 }
3745
3746 Some(cursor.node())
3747 }
3748
3749 /// Returns the outline for the buffer.
3750 ///
3751 /// This method allows passing an optional [`SyntaxTheme`] to
3752 /// syntax-highlight the returned symbols.
3753 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3754 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3755 }
3756
3757 /// Returns all the symbols that contain the given position.
3758 ///
3759 /// This method allows passing an optional [`SyntaxTheme`] to
3760 /// syntax-highlight the returned symbols.
3761 pub fn symbols_containing<T: ToOffset>(
3762 &self,
3763 position: T,
3764 theme: Option<&SyntaxTheme>,
3765 ) -> Vec<OutlineItem<Anchor>> {
3766 let position = position.to_offset(self);
3767 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3768 let end = self.clip_offset(position + 1, Bias::Right);
3769 let mut items = self.outline_items_containing(start..end, false, theme);
3770 let mut prev_depth = None;
3771 items.retain(|item| {
3772 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3773 prev_depth = Some(item.depth);
3774 result
3775 });
3776 items
3777 }
3778
3779 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3780 let range = range.to_offset(self);
3781 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3782 grammar.outline_config.as_ref().map(|c| &c.query)
3783 });
3784 let configs = matches
3785 .grammars()
3786 .iter()
3787 .map(|g| g.outline_config.as_ref().unwrap())
3788 .collect::<Vec<_>>();
3789
3790 while let Some(mat) = matches.peek() {
3791 let config = &configs[mat.grammar_index];
3792 let containing_item_node = maybe!({
3793 let item_node = mat.captures.iter().find_map(|cap| {
3794 if cap.index == config.item_capture_ix {
3795 Some(cap.node)
3796 } else {
3797 None
3798 }
3799 })?;
3800
3801 let item_byte_range = item_node.byte_range();
3802 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3803 None
3804 } else {
3805 Some(item_node)
3806 }
3807 });
3808
3809 if let Some(item_node) = containing_item_node {
3810 return Some(
3811 Point::from_ts_point(item_node.start_position())
3812 ..Point::from_ts_point(item_node.end_position()),
3813 );
3814 }
3815
3816 matches.advance();
3817 }
3818 None
3819 }
3820
3821 pub fn outline_items_containing<T: ToOffset>(
3822 &self,
3823 range: Range<T>,
3824 include_extra_context: bool,
3825 theme: Option<&SyntaxTheme>,
3826 ) -> Vec<OutlineItem<Anchor>> {
3827 let range = range.to_offset(self);
3828 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3829 grammar.outline_config.as_ref().map(|c| &c.query)
3830 });
3831
3832 let mut items = Vec::new();
3833 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3834 while let Some(mat) = matches.peek() {
3835 let config = matches.grammars()[mat.grammar_index]
3836 .outline_config
3837 .as_ref()
3838 .unwrap();
3839 if let Some(item) =
3840 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3841 {
3842 items.push(item);
3843 } else if let Some(capture) = mat
3844 .captures
3845 .iter()
3846 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3847 {
3848 let capture_range = capture.node.start_position()..capture.node.end_position();
3849 let mut capture_row_range =
3850 capture_range.start.row as u32..capture_range.end.row as u32;
3851 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3852 {
3853 capture_row_range.end -= 1;
3854 }
3855 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3856 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3857 last_row_range.end = capture_row_range.end;
3858 } else {
3859 annotation_row_ranges.push(capture_row_range);
3860 }
3861 } else {
3862 annotation_row_ranges.push(capture_row_range);
3863 }
3864 }
3865 matches.advance();
3866 }
3867
3868 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3869
3870 // Assign depths based on containment relationships and convert to anchors.
3871 let mut item_ends_stack = Vec::<Point>::new();
3872 let mut anchor_items = Vec::new();
3873 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3874 for item in items {
3875 while let Some(last_end) = item_ends_stack.last().copied() {
3876 if last_end < item.range.end {
3877 item_ends_stack.pop();
3878 } else {
3879 break;
3880 }
3881 }
3882
3883 let mut annotation_row_range = None;
3884 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3885 let row_preceding_item = item.range.start.row.saturating_sub(1);
3886 if next_annotation_row_range.end < row_preceding_item {
3887 annotation_row_ranges.next();
3888 } else {
3889 if next_annotation_row_range.end == row_preceding_item {
3890 annotation_row_range = Some(next_annotation_row_range.clone());
3891 annotation_row_ranges.next();
3892 }
3893 break;
3894 }
3895 }
3896
3897 anchor_items.push(OutlineItem {
3898 depth: item_ends_stack.len(),
3899 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3900 text: item.text,
3901 highlight_ranges: item.highlight_ranges,
3902 name_ranges: item.name_ranges,
3903 body_range: item
3904 .body_range
3905 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3906 annotation_range: annotation_row_range.map(|annotation_range| {
3907 self.anchor_after(Point::new(annotation_range.start, 0))
3908 ..self.anchor_before(Point::new(
3909 annotation_range.end,
3910 self.line_len(annotation_range.end),
3911 ))
3912 }),
3913 });
3914 item_ends_stack.push(item.range.end);
3915 }
3916
3917 anchor_items
3918 }
3919
3920 fn next_outline_item(
3921 &self,
3922 config: &OutlineConfig,
3923 mat: &SyntaxMapMatch,
3924 range: &Range<usize>,
3925 include_extra_context: bool,
3926 theme: Option<&SyntaxTheme>,
3927 ) -> Option<OutlineItem<Point>> {
3928 let item_node = mat.captures.iter().find_map(|cap| {
3929 if cap.index == config.item_capture_ix {
3930 Some(cap.node)
3931 } else {
3932 None
3933 }
3934 })?;
3935
3936 let item_byte_range = item_node.byte_range();
3937 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3938 return None;
3939 }
3940 let item_point_range = Point::from_ts_point(item_node.start_position())
3941 ..Point::from_ts_point(item_node.end_position());
3942
3943 let mut open_point = None;
3944 let mut close_point = None;
3945
3946 let mut buffer_ranges = Vec::new();
3947 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3948 let mut range = node.start_byte()..node.end_byte();
3949 let start = node.start_position();
3950 if node.end_position().row > start.row {
3951 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3952 }
3953
3954 if !range.is_empty() {
3955 buffer_ranges.push((range, node_is_name));
3956 }
3957 };
3958
3959 for capture in mat.captures {
3960 if capture.index == config.name_capture_ix {
3961 add_to_buffer_ranges(capture.node, true);
3962 } else if Some(capture.index) == config.context_capture_ix
3963 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3964 {
3965 add_to_buffer_ranges(capture.node, false);
3966 } else {
3967 if Some(capture.index) == config.open_capture_ix {
3968 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3969 } else if Some(capture.index) == config.close_capture_ix {
3970 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3971 }
3972 }
3973 }
3974
3975 if buffer_ranges.is_empty() {
3976 return None;
3977 }
3978
3979 let mut text = String::new();
3980 let mut highlight_ranges = Vec::new();
3981 let mut name_ranges = Vec::new();
3982 let mut chunks = self.chunks(
3983 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3984 true,
3985 );
3986 let mut last_buffer_range_end = 0;
3987 for (buffer_range, is_name) in buffer_ranges {
3988 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3989 if space_added {
3990 text.push(' ');
3991 }
3992 let before_append_len = text.len();
3993 let mut offset = buffer_range.start;
3994 chunks.seek(buffer_range.clone());
3995 for mut chunk in chunks.by_ref() {
3996 if chunk.text.len() > buffer_range.end - offset {
3997 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3998 offset = buffer_range.end;
3999 } else {
4000 offset += chunk.text.len();
4001 }
4002 let style = chunk
4003 .syntax_highlight_id
4004 .zip(theme)
4005 .and_then(|(highlight, theme)| highlight.style(theme));
4006 if let Some(style) = style {
4007 let start = text.len();
4008 let end = start + chunk.text.len();
4009 highlight_ranges.push((start..end, style));
4010 }
4011 text.push_str(chunk.text);
4012 if offset >= buffer_range.end {
4013 break;
4014 }
4015 }
4016 if is_name {
4017 let after_append_len = text.len();
4018 let start = if space_added && !name_ranges.is_empty() {
4019 before_append_len - 1
4020 } else {
4021 before_append_len
4022 };
4023 name_ranges.push(start..after_append_len);
4024 }
4025 last_buffer_range_end = buffer_range.end;
4026 }
4027
4028 Some(OutlineItem {
4029 depth: 0, // We'll calculate the depth later
4030 range: item_point_range,
4031 text,
4032 highlight_ranges,
4033 name_ranges,
4034 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4035 annotation_range: None,
4036 })
4037 }
4038
4039 pub fn function_body_fold_ranges<T: ToOffset>(
4040 &self,
4041 within: Range<T>,
4042 ) -> impl Iterator<Item = Range<usize>> + '_ {
4043 self.text_object_ranges(within, TreeSitterOptions::default())
4044 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4045 }
4046
4047 /// For each grammar in the language, runs the provided
4048 /// [`tree_sitter::Query`] against the given range.
4049 pub fn matches(
4050 &self,
4051 range: Range<usize>,
4052 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4053 ) -> SyntaxMapMatches<'_> {
4054 self.syntax.matches(range, self, query)
4055 }
4056
4057 pub fn all_bracket_ranges(
4058 &self,
4059 range: Range<usize>,
4060 ) -> impl Iterator<Item = BracketMatch> + '_ {
4061 let mut matches = self.syntax.matches_with_options(
4062 range.clone(),
4063 &self.text,
4064 TreeSitterOptions {
4065 max_distance_from_inclusion_byte_range: Some(10 * 1024),
4066 max_start_depth: None,
4067 },
4068 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4069 );
4070 let configs = matches
4071 .grammars()
4072 .iter()
4073 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4074 .collect::<Vec<_>>();
4075
4076 iter::from_fn(move || {
4077 while let Some(mat) = matches.peek() {
4078 let mut open = None;
4079 let mut close = None;
4080 let config = &configs[mat.grammar_index];
4081 let pattern = &config.patterns[mat.pattern_index];
4082 for capture in mat.captures {
4083 if capture.index == config.open_capture_ix {
4084 open = Some(capture.node.byte_range());
4085 } else if capture.index == config.close_capture_ix {
4086 close = Some(capture.node.byte_range());
4087 }
4088 }
4089
4090 matches.advance();
4091
4092 let Some((open_range, close_range)) = open.zip(close) else {
4093 continue;
4094 };
4095
4096 let bracket_range = open_range.start..=close_range.end;
4097 if !bracket_range.overlaps(&range) {
4098 continue;
4099 }
4100
4101 return Some(BracketMatch {
4102 open_range,
4103 close_range,
4104 newline_only: pattern.newline_only,
4105 });
4106 }
4107 None
4108 })
4109 }
4110
4111 /// Returns bracket range pairs overlapping or adjacent to `range`
4112 pub fn bracket_ranges<T: ToOffset>(
4113 &self,
4114 range: Range<T>,
4115 ) -> impl Iterator<Item = BracketMatch> + '_ {
4116 // Find bracket pairs that *inclusively* contain the given range.
4117 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4118 self.all_bracket_ranges(range)
4119 .filter(|pair| !pair.newline_only)
4120 }
4121
4122 pub fn debug_variables_query<T: ToOffset>(
4123 &self,
4124 range: Range<T>,
4125 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4126 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4127
4128 let mut matches = self.syntax.matches_with_options(
4129 range.clone(),
4130 &self.text,
4131 TreeSitterOptions::default(),
4132 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4133 );
4134
4135 let configs = matches
4136 .grammars()
4137 .iter()
4138 .map(|grammar| grammar.debug_variables_config.as_ref())
4139 .collect::<Vec<_>>();
4140
4141 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4142
4143 iter::from_fn(move || {
4144 loop {
4145 while let Some(capture) = captures.pop() {
4146 if capture.0.overlaps(&range) {
4147 return Some(capture);
4148 }
4149 }
4150
4151 let mat = matches.peek()?;
4152
4153 let Some(config) = configs[mat.grammar_index].as_ref() else {
4154 matches.advance();
4155 continue;
4156 };
4157
4158 for capture in mat.captures {
4159 let Some(ix) = config
4160 .objects_by_capture_ix
4161 .binary_search_by_key(&capture.index, |e| e.0)
4162 .ok()
4163 else {
4164 continue;
4165 };
4166 let text_object = config.objects_by_capture_ix[ix].1;
4167 let byte_range = capture.node.byte_range();
4168
4169 let mut found = false;
4170 for (range, existing) in captures.iter_mut() {
4171 if existing == &text_object {
4172 range.start = range.start.min(byte_range.start);
4173 range.end = range.end.max(byte_range.end);
4174 found = true;
4175 break;
4176 }
4177 }
4178
4179 if !found {
4180 captures.push((byte_range, text_object));
4181 }
4182 }
4183
4184 matches.advance();
4185 }
4186 })
4187 }
4188
4189 pub fn text_object_ranges<T: ToOffset>(
4190 &self,
4191 range: Range<T>,
4192 options: TreeSitterOptions,
4193 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4194 let range =
4195 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4196
4197 let mut matches =
4198 self.syntax
4199 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4200 grammar.text_object_config.as_ref().map(|c| &c.query)
4201 });
4202
4203 let configs = matches
4204 .grammars()
4205 .iter()
4206 .map(|grammar| grammar.text_object_config.as_ref())
4207 .collect::<Vec<_>>();
4208
4209 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4210
4211 iter::from_fn(move || {
4212 loop {
4213 while let Some(capture) = captures.pop() {
4214 if capture.0.overlaps(&range) {
4215 return Some(capture);
4216 }
4217 }
4218
4219 let mat = matches.peek()?;
4220
4221 let Some(config) = configs[mat.grammar_index].as_ref() else {
4222 matches.advance();
4223 continue;
4224 };
4225
4226 for capture in mat.captures {
4227 let Some(ix) = config
4228 .text_objects_by_capture_ix
4229 .binary_search_by_key(&capture.index, |e| e.0)
4230 .ok()
4231 else {
4232 continue;
4233 };
4234 let text_object = config.text_objects_by_capture_ix[ix].1;
4235 let byte_range = capture.node.byte_range();
4236
4237 let mut found = false;
4238 for (range, existing) in captures.iter_mut() {
4239 if existing == &text_object {
4240 range.start = range.start.min(byte_range.start);
4241 range.end = range.end.max(byte_range.end);
4242 found = true;
4243 break;
4244 }
4245 }
4246
4247 if !found {
4248 captures.push((byte_range, text_object));
4249 }
4250 }
4251
4252 matches.advance();
4253 }
4254 })
4255 }
4256
4257 /// Returns enclosing bracket ranges containing the given range
4258 pub fn enclosing_bracket_ranges<T: ToOffset>(
4259 &self,
4260 range: Range<T>,
4261 ) -> impl Iterator<Item = BracketMatch> + '_ {
4262 let range = range.start.to_offset(self)..range.end.to_offset(self);
4263
4264 self.bracket_ranges(range.clone()).filter(move |pair| {
4265 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4266 })
4267 }
4268
4269 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4270 ///
4271 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4272 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4273 &self,
4274 range: Range<T>,
4275 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4276 ) -> Option<(Range<usize>, Range<usize>)> {
4277 let range = range.start.to_offset(self)..range.end.to_offset(self);
4278
4279 // Get the ranges of the innermost pair of brackets.
4280 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4281
4282 for pair in self.enclosing_bracket_ranges(range) {
4283 if let Some(range_filter) = range_filter
4284 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4285 {
4286 continue;
4287 }
4288
4289 let len = pair.close_range.end - pair.open_range.start;
4290
4291 if let Some((existing_open, existing_close)) = &result {
4292 let existing_len = existing_close.end - existing_open.start;
4293 if len > existing_len {
4294 continue;
4295 }
4296 }
4297
4298 result = Some((pair.open_range, pair.close_range));
4299 }
4300
4301 result
4302 }
4303
4304 /// Returns anchor ranges for any matches of the redaction query.
4305 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4306 /// will be run on the relevant section of the buffer.
4307 pub fn redacted_ranges<T: ToOffset>(
4308 &self,
4309 range: Range<T>,
4310 ) -> impl Iterator<Item = Range<usize>> + '_ {
4311 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4312 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4313 grammar
4314 .redactions_config
4315 .as_ref()
4316 .map(|config| &config.query)
4317 });
4318
4319 let configs = syntax_matches
4320 .grammars()
4321 .iter()
4322 .map(|grammar| grammar.redactions_config.as_ref())
4323 .collect::<Vec<_>>();
4324
4325 iter::from_fn(move || {
4326 let redacted_range = syntax_matches
4327 .peek()
4328 .and_then(|mat| {
4329 configs[mat.grammar_index].and_then(|config| {
4330 mat.captures
4331 .iter()
4332 .find(|capture| capture.index == config.redaction_capture_ix)
4333 })
4334 })
4335 .map(|mat| mat.node.byte_range());
4336 syntax_matches.advance();
4337 redacted_range
4338 })
4339 }
4340
4341 pub fn injections_intersecting_range<T: ToOffset>(
4342 &self,
4343 range: Range<T>,
4344 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4345 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4346
4347 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4348 grammar
4349 .injection_config
4350 .as_ref()
4351 .map(|config| &config.query)
4352 });
4353
4354 let configs = syntax_matches
4355 .grammars()
4356 .iter()
4357 .map(|grammar| grammar.injection_config.as_ref())
4358 .collect::<Vec<_>>();
4359
4360 iter::from_fn(move || {
4361 let ranges = syntax_matches.peek().and_then(|mat| {
4362 let config = &configs[mat.grammar_index]?;
4363 let content_capture_range = mat.captures.iter().find_map(|capture| {
4364 if capture.index == config.content_capture_ix {
4365 Some(capture.node.byte_range())
4366 } else {
4367 None
4368 }
4369 })?;
4370 let language = self.language_at(content_capture_range.start)?;
4371 Some((content_capture_range, language))
4372 });
4373 syntax_matches.advance();
4374 ranges
4375 })
4376 }
4377
4378 pub fn runnable_ranges(
4379 &self,
4380 offset_range: Range<usize>,
4381 ) -> impl Iterator<Item = RunnableRange> + '_ {
4382 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4383 grammar.runnable_config.as_ref().map(|config| &config.query)
4384 });
4385
4386 let test_configs = syntax_matches
4387 .grammars()
4388 .iter()
4389 .map(|grammar| grammar.runnable_config.as_ref())
4390 .collect::<Vec<_>>();
4391
4392 iter::from_fn(move || {
4393 loop {
4394 let mat = syntax_matches.peek()?;
4395
4396 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4397 let mut run_range = None;
4398 let full_range = mat.captures.iter().fold(
4399 Range {
4400 start: usize::MAX,
4401 end: 0,
4402 },
4403 |mut acc, next| {
4404 let byte_range = next.node.byte_range();
4405 if acc.start > byte_range.start {
4406 acc.start = byte_range.start;
4407 }
4408 if acc.end < byte_range.end {
4409 acc.end = byte_range.end;
4410 }
4411 acc
4412 },
4413 );
4414 if full_range.start > full_range.end {
4415 // We did not find a full spanning range of this match.
4416 return None;
4417 }
4418 let extra_captures: SmallVec<[_; 1]> =
4419 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4420 test_configs
4421 .extra_captures
4422 .get(capture.index as usize)
4423 .cloned()
4424 .and_then(|tag_name| match tag_name {
4425 RunnableCapture::Named(name) => {
4426 Some((capture.node.byte_range(), name))
4427 }
4428 RunnableCapture::Run => {
4429 let _ = run_range.insert(capture.node.byte_range());
4430 None
4431 }
4432 })
4433 }));
4434 let run_range = run_range?;
4435 let tags = test_configs
4436 .query
4437 .property_settings(mat.pattern_index)
4438 .iter()
4439 .filter_map(|property| {
4440 if *property.key == *"tag" {
4441 property
4442 .value
4443 .as_ref()
4444 .map(|value| RunnableTag(value.to_string().into()))
4445 } else {
4446 None
4447 }
4448 })
4449 .collect();
4450 let extra_captures = extra_captures
4451 .into_iter()
4452 .map(|(range, name)| {
4453 (
4454 name.to_string(),
4455 self.text_for_range(range).collect::<String>(),
4456 )
4457 })
4458 .collect();
4459 // All tags should have the same range.
4460 Some(RunnableRange {
4461 run_range,
4462 full_range,
4463 runnable: Runnable {
4464 tags,
4465 language: mat.language,
4466 buffer: self.remote_id(),
4467 },
4468 extra_captures,
4469 buffer_id: self.remote_id(),
4470 })
4471 });
4472
4473 syntax_matches.advance();
4474 if test_range.is_some() {
4475 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4476 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4477 return test_range;
4478 }
4479 }
4480 })
4481 }
4482
4483 /// Returns selections for remote peers intersecting the given range.
4484 #[allow(clippy::type_complexity)]
4485 pub fn selections_in_range(
4486 &self,
4487 range: Range<Anchor>,
4488 include_local: bool,
4489 ) -> impl Iterator<
4490 Item = (
4491 ReplicaId,
4492 bool,
4493 CursorShape,
4494 impl Iterator<Item = &Selection<Anchor>> + '_,
4495 ),
4496 > + '_ {
4497 self.remote_selections
4498 .iter()
4499 .filter(move |(replica_id, set)| {
4500 (include_local || **replica_id != self.text.replica_id())
4501 && !set.selections.is_empty()
4502 })
4503 .map(move |(replica_id, set)| {
4504 let start_ix = match set.selections.binary_search_by(|probe| {
4505 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4506 }) {
4507 Ok(ix) | Err(ix) => ix,
4508 };
4509 let end_ix = match set.selections.binary_search_by(|probe| {
4510 probe.start.cmp(&range.end, self).then(Ordering::Less)
4511 }) {
4512 Ok(ix) | Err(ix) => ix,
4513 };
4514
4515 (
4516 *replica_id,
4517 set.line_mode,
4518 set.cursor_shape,
4519 set.selections[start_ix..end_ix].iter(),
4520 )
4521 })
4522 }
4523
4524 /// Returns if the buffer contains any diagnostics.
4525 pub fn has_diagnostics(&self) -> bool {
4526 !self.diagnostics.is_empty()
4527 }
4528
4529 /// Returns all the diagnostics intersecting the given range.
4530 pub fn diagnostics_in_range<'a, T, O>(
4531 &'a self,
4532 search_range: Range<T>,
4533 reversed: bool,
4534 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4535 where
4536 T: 'a + Clone + ToOffset,
4537 O: 'a + FromAnchor,
4538 {
4539 let mut iterators: Vec<_> = self
4540 .diagnostics
4541 .iter()
4542 .map(|(_, collection)| {
4543 collection
4544 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4545 .peekable()
4546 })
4547 .collect();
4548
4549 std::iter::from_fn(move || {
4550 let (next_ix, _) = iterators
4551 .iter_mut()
4552 .enumerate()
4553 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4554 .min_by(|(_, a), (_, b)| {
4555 let cmp = a
4556 .range
4557 .start
4558 .cmp(&b.range.start, self)
4559 // when range is equal, sort by diagnostic severity
4560 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4561 // and stabilize order with group_id
4562 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4563 if reversed { cmp.reverse() } else { cmp }
4564 })?;
4565 iterators[next_ix]
4566 .next()
4567 .map(
4568 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4569 diagnostic,
4570 range: FromAnchor::from_anchor(&range.start, self)
4571 ..FromAnchor::from_anchor(&range.end, self),
4572 },
4573 )
4574 })
4575 }
4576
4577 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4578 /// should be used instead.
4579 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4580 &self.diagnostics
4581 }
4582
4583 /// Returns all the diagnostic groups associated with the given
4584 /// language server ID. If no language server ID is provided,
4585 /// all diagnostics groups are returned.
4586 pub fn diagnostic_groups(
4587 &self,
4588 language_server_id: Option<LanguageServerId>,
4589 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4590 let mut groups = Vec::new();
4591
4592 if let Some(language_server_id) = language_server_id {
4593 if let Ok(ix) = self
4594 .diagnostics
4595 .binary_search_by_key(&language_server_id, |e| e.0)
4596 {
4597 self.diagnostics[ix]
4598 .1
4599 .groups(language_server_id, &mut groups, self);
4600 }
4601 } else {
4602 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4603 diagnostics.groups(*language_server_id, &mut groups, self);
4604 }
4605 }
4606
4607 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4608 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4609 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4610 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4611 });
4612
4613 groups
4614 }
4615
4616 /// Returns an iterator over the diagnostics for the given group.
4617 pub fn diagnostic_group<O>(
4618 &self,
4619 group_id: usize,
4620 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4621 where
4622 O: FromAnchor + 'static,
4623 {
4624 self.diagnostics
4625 .iter()
4626 .flat_map(move |(_, set)| set.group(group_id, self))
4627 }
4628
4629 /// An integer version number that accounts for all updates besides
4630 /// the buffer's text itself (which is versioned via a version vector).
4631 pub fn non_text_state_update_count(&self) -> usize {
4632 self.non_text_state_update_count
4633 }
4634
4635 /// An integer version that changes when the buffer's syntax changes.
4636 pub fn syntax_update_count(&self) -> usize {
4637 self.syntax.update_count()
4638 }
4639
4640 /// Returns a snapshot of underlying file.
4641 pub fn file(&self) -> Option<&Arc<dyn File>> {
4642 self.file.as_ref()
4643 }
4644
4645 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4646 if let Some(file) = self.file() {
4647 if file.path().file_name().is_none() || include_root {
4648 Some(file.full_path(cx).to_string_lossy().into_owned())
4649 } else {
4650 Some(file.path().display(file.path_style(cx)).to_string())
4651 }
4652 } else {
4653 None
4654 }
4655 }
4656
4657 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4658 let query_str = query.fuzzy_contents;
4659 if query_str.is_some_and(|query| query.is_empty()) {
4660 return BTreeMap::default();
4661 }
4662
4663 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4664 language,
4665 override_id: None,
4666 }));
4667
4668 let mut query_ix = 0;
4669 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4670 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4671
4672 let mut words = BTreeMap::default();
4673 let mut current_word_start_ix = None;
4674 let mut chunk_ix = query.range.start;
4675 for chunk in self.chunks(query.range, false) {
4676 for (i, c) in chunk.text.char_indices() {
4677 let ix = chunk_ix + i;
4678 if classifier.is_word(c) {
4679 if current_word_start_ix.is_none() {
4680 current_word_start_ix = Some(ix);
4681 }
4682
4683 if let Some(query_chars) = &query_chars
4684 && query_ix < query_len
4685 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4686 {
4687 query_ix += 1;
4688 }
4689 continue;
4690 } else if let Some(word_start) = current_word_start_ix.take()
4691 && query_ix == query_len
4692 {
4693 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4694 let mut word_text = self.text_for_range(word_start..ix).peekable();
4695 let first_char = word_text
4696 .peek()
4697 .and_then(|first_chunk| first_chunk.chars().next());
4698 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4699 if !query.skip_digits
4700 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4701 {
4702 words.insert(word_text.collect(), word_range);
4703 }
4704 }
4705 query_ix = 0;
4706 }
4707 chunk_ix += chunk.text.len();
4708 }
4709
4710 words
4711 }
4712}
4713
4714pub struct WordsQuery<'a> {
4715 /// Only returns words with all chars from the fuzzy string in them.
4716 pub fuzzy_contents: Option<&'a str>,
4717 /// Skips words that start with a digit.
4718 pub skip_digits: bool,
4719 /// Buffer offset range, to look for words.
4720 pub range: Range<usize>,
4721}
4722
4723fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4724 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4725}
4726
4727fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4728 let mut result = IndentSize::spaces(0);
4729 for c in text {
4730 let kind = match c {
4731 ' ' => IndentKind::Space,
4732 '\t' => IndentKind::Tab,
4733 _ => break,
4734 };
4735 if result.len == 0 {
4736 result.kind = kind;
4737 }
4738 result.len += 1;
4739 }
4740 result
4741}
4742
4743impl Clone for BufferSnapshot {
4744 fn clone(&self) -> Self {
4745 Self {
4746 text: self.text.clone(),
4747 syntax: self.syntax.clone(),
4748 file: self.file.clone(),
4749 remote_selections: self.remote_selections.clone(),
4750 diagnostics: self.diagnostics.clone(),
4751 language: self.language.clone(),
4752 non_text_state_update_count: self.non_text_state_update_count,
4753 }
4754 }
4755}
4756
4757impl Deref for BufferSnapshot {
4758 type Target = text::BufferSnapshot;
4759
4760 fn deref(&self) -> &Self::Target {
4761 &self.text
4762 }
4763}
4764
4765unsafe impl Send for BufferChunks<'_> {}
4766
4767impl<'a> BufferChunks<'a> {
4768 pub(crate) fn new(
4769 text: &'a Rope,
4770 range: Range<usize>,
4771 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4772 diagnostics: bool,
4773 buffer_snapshot: Option<&'a BufferSnapshot>,
4774 ) -> Self {
4775 let mut highlights = None;
4776 if let Some((captures, highlight_maps)) = syntax {
4777 highlights = Some(BufferChunkHighlights {
4778 captures,
4779 next_capture: None,
4780 stack: Default::default(),
4781 highlight_maps,
4782 })
4783 }
4784
4785 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4786 let chunks = text.chunks_in_range(range.clone());
4787
4788 let mut this = BufferChunks {
4789 range,
4790 buffer_snapshot,
4791 chunks,
4792 diagnostic_endpoints,
4793 error_depth: 0,
4794 warning_depth: 0,
4795 information_depth: 0,
4796 hint_depth: 0,
4797 unnecessary_depth: 0,
4798 underline: true,
4799 highlights,
4800 };
4801 this.initialize_diagnostic_endpoints();
4802 this
4803 }
4804
4805 /// Seeks to the given byte offset in the buffer.
4806 pub fn seek(&mut self, range: Range<usize>) {
4807 let old_range = std::mem::replace(&mut self.range, range.clone());
4808 self.chunks.set_range(self.range.clone());
4809 if let Some(highlights) = self.highlights.as_mut() {
4810 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4811 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4812 highlights
4813 .stack
4814 .retain(|(end_offset, _)| *end_offset > range.start);
4815 if let Some(capture) = &highlights.next_capture
4816 && range.start >= capture.node.start_byte()
4817 {
4818 let next_capture_end = capture.node.end_byte();
4819 if range.start < next_capture_end {
4820 highlights.stack.push((
4821 next_capture_end,
4822 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4823 ));
4824 }
4825 highlights.next_capture.take();
4826 }
4827 } else if let Some(snapshot) = self.buffer_snapshot {
4828 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4829 *highlights = BufferChunkHighlights {
4830 captures,
4831 next_capture: None,
4832 stack: Default::default(),
4833 highlight_maps,
4834 };
4835 } else {
4836 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4837 // Seeking such BufferChunks is not supported.
4838 debug_assert!(
4839 false,
4840 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4841 );
4842 }
4843
4844 highlights.captures.set_byte_range(self.range.clone());
4845 self.initialize_diagnostic_endpoints();
4846 }
4847 }
4848
4849 fn initialize_diagnostic_endpoints(&mut self) {
4850 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4851 && let Some(buffer) = self.buffer_snapshot
4852 {
4853 let mut diagnostic_endpoints = Vec::new();
4854 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4855 diagnostic_endpoints.push(DiagnosticEndpoint {
4856 offset: entry.range.start,
4857 is_start: true,
4858 severity: entry.diagnostic.severity,
4859 is_unnecessary: entry.diagnostic.is_unnecessary,
4860 underline: entry.diagnostic.underline,
4861 });
4862 diagnostic_endpoints.push(DiagnosticEndpoint {
4863 offset: entry.range.end,
4864 is_start: false,
4865 severity: entry.diagnostic.severity,
4866 is_unnecessary: entry.diagnostic.is_unnecessary,
4867 underline: entry.diagnostic.underline,
4868 });
4869 }
4870 diagnostic_endpoints
4871 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4872 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4873 self.hint_depth = 0;
4874 self.error_depth = 0;
4875 self.warning_depth = 0;
4876 self.information_depth = 0;
4877 }
4878 }
4879
4880 /// The current byte offset in the buffer.
4881 pub fn offset(&self) -> usize {
4882 self.range.start
4883 }
4884
4885 pub fn range(&self) -> Range<usize> {
4886 self.range.clone()
4887 }
4888
4889 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4890 let depth = match endpoint.severity {
4891 DiagnosticSeverity::ERROR => &mut self.error_depth,
4892 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4893 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4894 DiagnosticSeverity::HINT => &mut self.hint_depth,
4895 _ => return,
4896 };
4897 if endpoint.is_start {
4898 *depth += 1;
4899 } else {
4900 *depth -= 1;
4901 }
4902
4903 if endpoint.is_unnecessary {
4904 if endpoint.is_start {
4905 self.unnecessary_depth += 1;
4906 } else {
4907 self.unnecessary_depth -= 1;
4908 }
4909 }
4910 }
4911
4912 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4913 if self.error_depth > 0 {
4914 Some(DiagnosticSeverity::ERROR)
4915 } else if self.warning_depth > 0 {
4916 Some(DiagnosticSeverity::WARNING)
4917 } else if self.information_depth > 0 {
4918 Some(DiagnosticSeverity::INFORMATION)
4919 } else if self.hint_depth > 0 {
4920 Some(DiagnosticSeverity::HINT)
4921 } else {
4922 None
4923 }
4924 }
4925
4926 fn current_code_is_unnecessary(&self) -> bool {
4927 self.unnecessary_depth > 0
4928 }
4929}
4930
4931impl<'a> Iterator for BufferChunks<'a> {
4932 type Item = Chunk<'a>;
4933
4934 fn next(&mut self) -> Option<Self::Item> {
4935 let mut next_capture_start = usize::MAX;
4936 let mut next_diagnostic_endpoint = usize::MAX;
4937
4938 if let Some(highlights) = self.highlights.as_mut() {
4939 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4940 if *parent_capture_end <= self.range.start {
4941 highlights.stack.pop();
4942 } else {
4943 break;
4944 }
4945 }
4946
4947 if highlights.next_capture.is_none() {
4948 highlights.next_capture = highlights.captures.next();
4949 }
4950
4951 while let Some(capture) = highlights.next_capture.as_ref() {
4952 if self.range.start < capture.node.start_byte() {
4953 next_capture_start = capture.node.start_byte();
4954 break;
4955 } else {
4956 let highlight_id =
4957 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4958 highlights
4959 .stack
4960 .push((capture.node.end_byte(), highlight_id));
4961 highlights.next_capture = highlights.captures.next();
4962 }
4963 }
4964 }
4965
4966 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4967 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4968 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4969 if endpoint.offset <= self.range.start {
4970 self.update_diagnostic_depths(endpoint);
4971 diagnostic_endpoints.next();
4972 self.underline = endpoint.underline;
4973 } else {
4974 next_diagnostic_endpoint = endpoint.offset;
4975 break;
4976 }
4977 }
4978 }
4979 self.diagnostic_endpoints = diagnostic_endpoints;
4980
4981 if let Some(ChunkBitmaps {
4982 text: chunk,
4983 chars: chars_map,
4984 tabs,
4985 }) = self.chunks.peek_tabs()
4986 {
4987 let chunk_start = self.range.start;
4988 let mut chunk_end = (self.chunks.offset() + chunk.len())
4989 .min(next_capture_start)
4990 .min(next_diagnostic_endpoint);
4991 let mut highlight_id = None;
4992 if let Some(highlights) = self.highlights.as_ref()
4993 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4994 {
4995 chunk_end = chunk_end.min(*parent_capture_end);
4996 highlight_id = Some(*parent_highlight_id);
4997 }
4998
4999 let slice =
5000 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
5001 let bit_end = chunk_end - self.chunks.offset();
5002
5003 let mask = if bit_end >= 128 {
5004 u128::MAX
5005 } else {
5006 (1u128 << bit_end) - 1
5007 };
5008 let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
5009 let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
5010
5011 self.range.start = chunk_end;
5012 if self.range.start == self.chunks.offset() + chunk.len() {
5013 self.chunks.next().unwrap();
5014 }
5015
5016 Some(Chunk {
5017 text: slice,
5018 syntax_highlight_id: highlight_id,
5019 underline: self.underline,
5020 diagnostic_severity: self.current_diagnostic_severity(),
5021 is_unnecessary: self.current_code_is_unnecessary(),
5022 tabs,
5023 chars: chars_map,
5024 ..Chunk::default()
5025 })
5026 } else {
5027 None
5028 }
5029 }
5030}
5031
5032impl operation_queue::Operation for Operation {
5033 fn lamport_timestamp(&self) -> clock::Lamport {
5034 match self {
5035 Operation::Buffer(_) => {
5036 unreachable!("buffer operations should never be deferred at this layer")
5037 }
5038 Operation::UpdateDiagnostics {
5039 lamport_timestamp, ..
5040 }
5041 | Operation::UpdateSelections {
5042 lamport_timestamp, ..
5043 }
5044 | Operation::UpdateCompletionTriggers {
5045 lamport_timestamp, ..
5046 }
5047 | Operation::UpdateLineEnding {
5048 lamport_timestamp, ..
5049 } => *lamport_timestamp,
5050 }
5051 }
5052}
5053
5054impl Default for Diagnostic {
5055 fn default() -> Self {
5056 Self {
5057 source: Default::default(),
5058 source_kind: DiagnosticSourceKind::Other,
5059 code: None,
5060 code_description: None,
5061 severity: DiagnosticSeverity::ERROR,
5062 message: Default::default(),
5063 markdown: None,
5064 group_id: 0,
5065 is_primary: false,
5066 is_disk_based: false,
5067 is_unnecessary: false,
5068 underline: true,
5069 data: None,
5070 }
5071 }
5072}
5073
5074impl IndentSize {
5075 /// Returns an [`IndentSize`] representing the given spaces.
5076 pub fn spaces(len: u32) -> Self {
5077 Self {
5078 len,
5079 kind: IndentKind::Space,
5080 }
5081 }
5082
5083 /// Returns an [`IndentSize`] representing a tab.
5084 pub fn tab() -> Self {
5085 Self {
5086 len: 1,
5087 kind: IndentKind::Tab,
5088 }
5089 }
5090
5091 /// An iterator over the characters represented by this [`IndentSize`].
5092 pub fn chars(&self) -> impl Iterator<Item = char> {
5093 iter::repeat(self.char()).take(self.len as usize)
5094 }
5095
5096 /// The character representation of this [`IndentSize`].
5097 pub fn char(&self) -> char {
5098 match self.kind {
5099 IndentKind::Space => ' ',
5100 IndentKind::Tab => '\t',
5101 }
5102 }
5103
5104 /// Consumes the current [`IndentSize`] and returns a new one that has
5105 /// been shrunk or enlarged by the given size along the given direction.
5106 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5107 match direction {
5108 Ordering::Less => {
5109 if self.kind == size.kind && self.len >= size.len {
5110 self.len -= size.len;
5111 }
5112 }
5113 Ordering::Equal => {}
5114 Ordering::Greater => {
5115 if self.len == 0 {
5116 self = size;
5117 } else if self.kind == size.kind {
5118 self.len += size.len;
5119 }
5120 }
5121 }
5122 self
5123 }
5124
5125 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5126 match self.kind {
5127 IndentKind::Space => self.len as usize,
5128 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5129 }
5130 }
5131}
5132
5133#[cfg(any(test, feature = "test-support"))]
5134pub struct TestFile {
5135 pub path: Arc<RelPath>,
5136 pub root_name: String,
5137 pub local_root: Option<PathBuf>,
5138}
5139
5140#[cfg(any(test, feature = "test-support"))]
5141impl File for TestFile {
5142 fn path(&self) -> &Arc<RelPath> {
5143 &self.path
5144 }
5145
5146 fn full_path(&self, _: &gpui::App) -> PathBuf {
5147 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5148 }
5149
5150 fn as_local(&self) -> Option<&dyn LocalFile> {
5151 if self.local_root.is_some() {
5152 Some(self)
5153 } else {
5154 None
5155 }
5156 }
5157
5158 fn disk_state(&self) -> DiskState {
5159 unimplemented!()
5160 }
5161
5162 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5163 self.path().file_name().unwrap_or(self.root_name.as_ref())
5164 }
5165
5166 fn worktree_id(&self, _: &App) -> WorktreeId {
5167 WorktreeId::from_usize(0)
5168 }
5169
5170 fn to_proto(&self, _: &App) -> rpc::proto::File {
5171 unimplemented!()
5172 }
5173
5174 fn is_private(&self) -> bool {
5175 false
5176 }
5177
5178 fn path_style(&self, _cx: &App) -> PathStyle {
5179 PathStyle::local()
5180 }
5181}
5182
5183#[cfg(any(test, feature = "test-support"))]
5184impl LocalFile for TestFile {
5185 fn abs_path(&self, _cx: &App) -> PathBuf {
5186 PathBuf::from(self.local_root.as_ref().unwrap())
5187 .join(&self.root_name)
5188 .join(self.path.as_std_path())
5189 }
5190
5191 fn load(&self, _cx: &App) -> Task<Result<String>> {
5192 unimplemented!()
5193 }
5194
5195 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5196 unimplemented!()
5197 }
5198}
5199
5200pub(crate) fn contiguous_ranges(
5201 values: impl Iterator<Item = u32>,
5202 max_len: usize,
5203) -> impl Iterator<Item = Range<u32>> {
5204 let mut values = values;
5205 let mut current_range: Option<Range<u32>> = None;
5206 std::iter::from_fn(move || {
5207 loop {
5208 if let Some(value) = values.next() {
5209 if let Some(range) = &mut current_range
5210 && value == range.end
5211 && range.len() < max_len
5212 {
5213 range.end += 1;
5214 continue;
5215 }
5216
5217 let prev_range = current_range.clone();
5218 current_range = Some(value..(value + 1));
5219 if prev_range.is_some() {
5220 return prev_range;
5221 }
5222 } else {
5223 return current_range.take();
5224 }
5225 }
5226 })
5227}
5228
5229#[derive(Default, Debug)]
5230pub struct CharClassifier {
5231 scope: Option<LanguageScope>,
5232 scope_context: Option<CharScopeContext>,
5233 ignore_punctuation: bool,
5234}
5235
5236impl CharClassifier {
5237 pub fn new(scope: Option<LanguageScope>) -> Self {
5238 Self {
5239 scope,
5240 scope_context: None,
5241 ignore_punctuation: false,
5242 }
5243 }
5244
5245 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5246 Self {
5247 scope_context,
5248 ..self
5249 }
5250 }
5251
5252 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5253 Self {
5254 ignore_punctuation,
5255 ..self
5256 }
5257 }
5258
5259 pub fn is_whitespace(&self, c: char) -> bool {
5260 self.kind(c) == CharKind::Whitespace
5261 }
5262
5263 pub fn is_word(&self, c: char) -> bool {
5264 self.kind(c) == CharKind::Word
5265 }
5266
5267 pub fn is_punctuation(&self, c: char) -> bool {
5268 self.kind(c) == CharKind::Punctuation
5269 }
5270
5271 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5272 if c.is_alphanumeric() || c == '_' {
5273 return CharKind::Word;
5274 }
5275
5276 if let Some(scope) = &self.scope {
5277 let characters = match self.scope_context {
5278 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5279 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5280 None => scope.word_characters(),
5281 };
5282 if let Some(characters) = characters
5283 && characters.contains(&c)
5284 {
5285 return CharKind::Word;
5286 }
5287 }
5288
5289 if c.is_whitespace() {
5290 return CharKind::Whitespace;
5291 }
5292
5293 if ignore_punctuation {
5294 CharKind::Word
5295 } else {
5296 CharKind::Punctuation
5297 }
5298 }
5299
5300 pub fn kind(&self, c: char) -> CharKind {
5301 self.kind_with(c, self.ignore_punctuation)
5302 }
5303}
5304
5305/// Find all of the ranges of whitespace that occur at the ends of lines
5306/// in the given rope.
5307///
5308/// This could also be done with a regex search, but this implementation
5309/// avoids copying text.
5310pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5311 let mut ranges = Vec::new();
5312
5313 let mut offset = 0;
5314 let mut prev_chunk_trailing_whitespace_range = 0..0;
5315 for chunk in rope.chunks() {
5316 let mut prev_line_trailing_whitespace_range = 0..0;
5317 for (i, line) in chunk.split('\n').enumerate() {
5318 let line_end_offset = offset + line.len();
5319 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5320 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5321
5322 if i == 0 && trimmed_line_len == 0 {
5323 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5324 }
5325 if !prev_line_trailing_whitespace_range.is_empty() {
5326 ranges.push(prev_line_trailing_whitespace_range);
5327 }
5328
5329 offset = line_end_offset + 1;
5330 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5331 }
5332
5333 offset -= 1;
5334 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5335 }
5336
5337 if !prev_chunk_trailing_whitespace_range.is_empty() {
5338 ranges.push(prev_chunk_trailing_whitespace_range);
5339 }
5340
5341 ranges
5342}