1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, impl AsRef<str>)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot =
751 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
752
753 let unchanged_range_in_preview_snapshot =
754 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
755 if !unchanged_range_in_preview_snapshot.is_empty() {
756 highlighted_text.add_text_from_buffer_range(
757 unchanged_range_in_preview_snapshot,
758 &self.applied_edits_snapshot,
759 &self.syntax_snapshot,
760 None,
761 syntax_theme,
762 );
763 }
764
765 let range_in_current_snapshot = range.to_offset(current_snapshot);
766 if include_deletions && !range_in_current_snapshot.is_empty() {
767 highlighted_text.add_text_from_buffer_range(
768 range_in_current_snapshot,
769 ¤t_snapshot.text,
770 ¤t_snapshot.syntax,
771 Some(deletion_highlight_style),
772 syntax_theme,
773 );
774 }
775
776 if !edit_text.as_ref().is_empty() {
777 highlighted_text.add_text_from_buffer_range(
778 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
779 &self.applied_edits_snapshot,
780 &self.syntax_snapshot,
781 Some(insertion_highlight_style),
782 syntax_theme,
783 );
784 }
785
786 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
787 }
788
789 highlighted_text.add_text_from_buffer_range(
790 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
791 &self.applied_edits_snapshot,
792 &self.syntax_snapshot,
793 None,
794 syntax_theme,
795 );
796
797 highlighted_text.build()
798 }
799
800 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
801 let (first, _) = edits.first()?;
802 let (last, _) = edits.last()?;
803
804 let start = first
805 .start
806 .bias_left(&self.old_snapshot)
807 .to_point(&self.applied_edits_snapshot);
808 let end = last
809 .end
810 .bias_right(&self.old_snapshot)
811 .to_point(&self.applied_edits_snapshot);
812
813 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
814 let range = Point::new(start.row, 0)
815 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
816
817 Some(range.to_offset(&self.applied_edits_snapshot))
818 }
819}
820
821#[derive(Clone, Debug, PartialEq, Eq)]
822pub struct BracketMatch {
823 pub open_range: Range<usize>,
824 pub close_range: Range<usize>,
825 pub newline_only: bool,
826}
827
828impl Buffer {
829 /// Create a new buffer with the given base text.
830 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
831 Self::build(
832 TextBuffer::new(
833 ReplicaId::LOCAL,
834 cx.entity_id().as_non_zero_u64().into(),
835 base_text.into(),
836 ),
837 None,
838 Capability::ReadWrite,
839 )
840 }
841
842 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
843 pub fn local_normalized(
844 base_text_normalized: Rope,
845 line_ending: LineEnding,
846 cx: &Context<Self>,
847 ) -> Self {
848 Self::build(
849 TextBuffer::new_normalized(
850 ReplicaId::LOCAL,
851 cx.entity_id().as_non_zero_u64().into(),
852 line_ending,
853 base_text_normalized,
854 ),
855 None,
856 Capability::ReadWrite,
857 )
858 }
859
860 /// Create a new buffer that is a replica of a remote buffer.
861 pub fn remote(
862 remote_id: BufferId,
863 replica_id: ReplicaId,
864 capability: Capability,
865 base_text: impl Into<String>,
866 ) -> Self {
867 Self::build(
868 TextBuffer::new(replica_id, remote_id, base_text.into()),
869 None,
870 capability,
871 )
872 }
873
874 /// Create a new buffer that is a replica of a remote buffer, populating its
875 /// state from the given protobuf message.
876 pub fn from_proto(
877 replica_id: ReplicaId,
878 capability: Capability,
879 message: proto::BufferState,
880 file: Option<Arc<dyn File>>,
881 ) -> Result<Self> {
882 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
883 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
884 let mut this = Self::build(buffer, file, capability);
885 this.text.set_line_ending(proto::deserialize_line_ending(
886 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
887 ));
888 this.saved_version = proto::deserialize_version(&message.saved_version);
889 this.saved_mtime = message.saved_mtime.map(|time| time.into());
890 Ok(this)
891 }
892
893 /// Serialize the buffer's state to a protobuf message.
894 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
895 proto::BufferState {
896 id: self.remote_id().into(),
897 file: self.file.as_ref().map(|f| f.to_proto(cx)),
898 base_text: self.base_text().to_string(),
899 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
900 saved_version: proto::serialize_version(&self.saved_version),
901 saved_mtime: self.saved_mtime.map(|time| time.into()),
902 }
903 }
904
905 /// Serialize as protobufs all of the changes to the buffer since the given version.
906 pub fn serialize_ops(
907 &self,
908 since: Option<clock::Global>,
909 cx: &App,
910 ) -> Task<Vec<proto::Operation>> {
911 let mut operations = Vec::new();
912 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
913
914 operations.extend(self.remote_selections.iter().map(|(_, set)| {
915 proto::serialize_operation(&Operation::UpdateSelections {
916 selections: set.selections.clone(),
917 lamport_timestamp: set.lamport_timestamp,
918 line_mode: set.line_mode,
919 cursor_shape: set.cursor_shape,
920 })
921 }));
922
923 for (server_id, diagnostics) in &self.diagnostics {
924 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
925 lamport_timestamp: self.diagnostics_timestamp,
926 server_id: *server_id,
927 diagnostics: diagnostics.iter().cloned().collect(),
928 }));
929 }
930
931 for (server_id, completions) in &self.completion_triggers_per_language_server {
932 operations.push(proto::serialize_operation(
933 &Operation::UpdateCompletionTriggers {
934 triggers: completions.iter().cloned().collect(),
935 lamport_timestamp: self.completion_triggers_timestamp,
936 server_id: *server_id,
937 },
938 ));
939 }
940
941 let text_operations = self.text.operations().clone();
942 cx.background_spawn(async move {
943 let since = since.unwrap_or_default();
944 operations.extend(
945 text_operations
946 .iter()
947 .filter(|(_, op)| !since.observed(op.timestamp()))
948 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
949 );
950 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
951 operations
952 })
953 }
954
955 /// Assign a language to the buffer, returning the buffer.
956 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
957 self.set_language(Some(language), cx);
958 self
959 }
960
961 /// Returns the [`Capability`] of this buffer.
962 pub fn capability(&self) -> Capability {
963 self.capability
964 }
965
966 /// Whether this buffer can only be read.
967 pub fn read_only(&self) -> bool {
968 self.capability == Capability::ReadOnly
969 }
970
971 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
972 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
973 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
974 let snapshot = buffer.snapshot();
975 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
976 Self {
977 saved_mtime,
978 saved_version: buffer.version(),
979 preview_version: buffer.version(),
980 reload_task: None,
981 transaction_depth: 0,
982 was_dirty_before_starting_transaction: None,
983 has_unsaved_edits: Cell::new((buffer.version(), false)),
984 text: buffer,
985 branch_state: None,
986 file,
987 capability,
988 syntax_map,
989 reparse: None,
990 non_text_state_update_count: 0,
991 sync_parse_timeout: Duration::from_millis(1),
992 parse_status: watch::channel(ParseStatus::Idle),
993 autoindent_requests: Default::default(),
994 wait_for_autoindent_txs: Default::default(),
995 pending_autoindent: Default::default(),
996 language: None,
997 remote_selections: Default::default(),
998 diagnostics: Default::default(),
999 diagnostics_timestamp: Lamport::MIN,
1000 completion_triggers: Default::default(),
1001 completion_triggers_per_language_server: Default::default(),
1002 completion_triggers_timestamp: Lamport::MIN,
1003 deferred_ops: OperationQueue::new(),
1004 has_conflict: false,
1005 change_bits: Default::default(),
1006 _subscriptions: Vec::new(),
1007 }
1008 }
1009
1010 pub fn build_snapshot(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> impl Future<Output = BufferSnapshot> + use<> {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 async move {
1019 let text =
1020 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1021 .snapshot();
1022 let mut syntax = SyntaxMap::new(&text).snapshot();
1023 if let Some(language) = language.clone() {
1024 let language_registry = language_registry.clone();
1025 syntax.reparse(&text, language_registry, language);
1026 }
1027 BufferSnapshot {
1028 text,
1029 syntax,
1030 file: None,
1031 diagnostics: Default::default(),
1032 remote_selections: Default::default(),
1033 language,
1034 non_text_state_update_count: 0,
1035 }
1036 }
1037 }
1038
1039 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1040 let entity_id = cx.reserve_entity::<Self>().entity_id();
1041 let buffer_id = entity_id.as_non_zero_u64().into();
1042 let text = TextBuffer::new_normalized(
1043 ReplicaId::LOCAL,
1044 buffer_id,
1045 Default::default(),
1046 Rope::new(),
1047 )
1048 .snapshot();
1049 let syntax = SyntaxMap::new(&text).snapshot();
1050 BufferSnapshot {
1051 text,
1052 syntax,
1053 file: None,
1054 diagnostics: Default::default(),
1055 remote_selections: Default::default(),
1056 language: None,
1057 non_text_state_update_count: 0,
1058 }
1059 }
1060
1061 #[cfg(any(test, feature = "test-support"))]
1062 pub fn build_snapshot_sync(
1063 text: Rope,
1064 language: Option<Arc<Language>>,
1065 language_registry: Option<Arc<LanguageRegistry>>,
1066 cx: &mut App,
1067 ) -> BufferSnapshot {
1068 let entity_id = cx.reserve_entity::<Self>().entity_id();
1069 let buffer_id = entity_id.as_non_zero_u64().into();
1070 let text =
1071 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1072 .snapshot();
1073 let mut syntax = SyntaxMap::new(&text).snapshot();
1074 if let Some(language) = language.clone() {
1075 syntax.reparse(&text, language_registry, language);
1076 }
1077 BufferSnapshot {
1078 text,
1079 syntax,
1080 file: None,
1081 diagnostics: Default::default(),
1082 remote_selections: Default::default(),
1083 language,
1084 non_text_state_update_count: 0,
1085 }
1086 }
1087
1088 /// Retrieve a snapshot of the buffer's current state. This is computationally
1089 /// cheap, and allows reading from the buffer on a background thread.
1090 pub fn snapshot(&self) -> BufferSnapshot {
1091 let text = self.text.snapshot();
1092 let mut syntax_map = self.syntax_map.lock();
1093 syntax_map.interpolate(&text);
1094 let syntax = syntax_map.snapshot();
1095
1096 BufferSnapshot {
1097 text,
1098 syntax,
1099 file: self.file.clone(),
1100 remote_selections: self.remote_selections.clone(),
1101 diagnostics: self.diagnostics.clone(),
1102 language: self.language.clone(),
1103 non_text_state_update_count: self.non_text_state_update_count,
1104 }
1105 }
1106
1107 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1108 let this = cx.entity();
1109 cx.new(|cx| {
1110 let mut branch = Self {
1111 branch_state: Some(BufferBranchState {
1112 base_buffer: this.clone(),
1113 merged_operations: Default::default(),
1114 }),
1115 language: self.language.clone(),
1116 has_conflict: self.has_conflict,
1117 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1118 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1119 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1120 };
1121 if let Some(language_registry) = self.language_registry() {
1122 branch.set_language_registry(language_registry);
1123 }
1124
1125 // Reparse the branch buffer so that we get syntax highlighting immediately.
1126 branch.reparse(cx);
1127
1128 branch
1129 })
1130 }
1131
1132 pub fn preview_edits(
1133 &self,
1134 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1135 cx: &App,
1136 ) -> Task<EditPreview> {
1137 let registry = self.language_registry();
1138 let language = self.language().cloned();
1139 let old_snapshot = self.text.snapshot();
1140 let mut branch_buffer = self.text.branch();
1141 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1142 cx.background_spawn(async move {
1143 if !edits.is_empty() {
1144 if let Some(language) = language.clone() {
1145 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1146 }
1147
1148 branch_buffer.edit(edits.iter().cloned());
1149 let snapshot = branch_buffer.snapshot();
1150 syntax_snapshot.interpolate(&snapshot);
1151
1152 if let Some(language) = language {
1153 syntax_snapshot.reparse(&snapshot, registry, language);
1154 }
1155 }
1156 EditPreview {
1157 old_snapshot,
1158 applied_edits_snapshot: branch_buffer.snapshot(),
1159 syntax_snapshot,
1160 }
1161 })
1162 }
1163
1164 /// Applies all of the changes in this buffer that intersect any of the
1165 /// given `ranges` to its base buffer.
1166 ///
1167 /// If `ranges` is empty, then all changes will be applied. This buffer must
1168 /// be a branch buffer to call this method.
1169 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1170 let Some(base_buffer) = self.base_buffer() else {
1171 debug_panic!("not a branch buffer");
1172 return;
1173 };
1174
1175 let mut ranges = if ranges.is_empty() {
1176 &[0..usize::MAX]
1177 } else {
1178 ranges.as_slice()
1179 }
1180 .iter()
1181 .peekable();
1182
1183 let mut edits = Vec::new();
1184 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1185 let mut is_included = false;
1186 while let Some(range) = ranges.peek() {
1187 if range.end < edit.new.start {
1188 ranges.next().unwrap();
1189 } else {
1190 if range.start <= edit.new.end {
1191 is_included = true;
1192 }
1193 break;
1194 }
1195 }
1196
1197 if is_included {
1198 edits.push((
1199 edit.old.clone(),
1200 self.text_for_range(edit.new.clone()).collect::<String>(),
1201 ));
1202 }
1203 }
1204
1205 let operation = base_buffer.update(cx, |base_buffer, cx| {
1206 // cx.emit(BufferEvent::DiffBaseChanged);
1207 base_buffer.edit(edits, None, cx)
1208 });
1209
1210 if let Some(operation) = operation
1211 && let Some(BufferBranchState {
1212 merged_operations, ..
1213 }) = &mut self.branch_state
1214 {
1215 merged_operations.push(operation);
1216 }
1217 }
1218
1219 fn on_base_buffer_event(
1220 &mut self,
1221 _: Entity<Buffer>,
1222 event: &BufferEvent,
1223 cx: &mut Context<Self>,
1224 ) {
1225 let BufferEvent::Operation { operation, .. } = event else {
1226 return;
1227 };
1228 let Some(BufferBranchState {
1229 merged_operations, ..
1230 }) = &mut self.branch_state
1231 else {
1232 return;
1233 };
1234
1235 let mut operation_to_undo = None;
1236 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1237 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1238 {
1239 merged_operations.remove(ix);
1240 operation_to_undo = Some(operation.timestamp);
1241 }
1242
1243 self.apply_ops([operation.clone()], cx);
1244
1245 if let Some(timestamp) = operation_to_undo {
1246 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1247 self.undo_operations(counts, cx);
1248 }
1249 }
1250
1251 #[cfg(test)]
1252 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1253 &self.text
1254 }
1255
1256 /// Retrieve a snapshot of the buffer's raw text, without any
1257 /// language-related state like the syntax tree or diagnostics.
1258 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1259 self.text.snapshot()
1260 }
1261
1262 /// The file associated with the buffer, if any.
1263 pub fn file(&self) -> Option<&Arc<dyn File>> {
1264 self.file.as_ref()
1265 }
1266
1267 /// The version of the buffer that was last saved or reloaded from disk.
1268 pub fn saved_version(&self) -> &clock::Global {
1269 &self.saved_version
1270 }
1271
1272 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1273 pub fn saved_mtime(&self) -> Option<MTime> {
1274 self.saved_mtime
1275 }
1276
1277 /// Assign a language to the buffer.
1278 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1279 self.non_text_state_update_count += 1;
1280 self.syntax_map.lock().clear(&self.text);
1281 self.language = language;
1282 self.was_changed();
1283 self.reparse(cx);
1284 cx.emit(BufferEvent::LanguageChanged);
1285 }
1286
1287 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1288 /// other languages if parts of the buffer are written in different languages.
1289 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1290 self.syntax_map
1291 .lock()
1292 .set_language_registry(language_registry);
1293 }
1294
1295 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1296 self.syntax_map.lock().language_registry()
1297 }
1298
1299 /// Assign the line ending type to the buffer.
1300 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1301 self.text.set_line_ending(line_ending);
1302
1303 let lamport_timestamp = self.text.lamport_clock.tick();
1304 self.send_operation(
1305 Operation::UpdateLineEnding {
1306 line_ending,
1307 lamport_timestamp,
1308 },
1309 true,
1310 cx,
1311 );
1312 }
1313
1314 /// Assign the buffer a new [`Capability`].
1315 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1316 if self.capability != capability {
1317 self.capability = capability;
1318 cx.emit(BufferEvent::CapabilityChanged)
1319 }
1320 }
1321
1322 /// This method is called to signal that the buffer has been saved.
1323 pub fn did_save(
1324 &mut self,
1325 version: clock::Global,
1326 mtime: Option<MTime>,
1327 cx: &mut Context<Self>,
1328 ) {
1329 self.saved_version = version.clone();
1330 self.has_unsaved_edits.set((version, false));
1331 self.has_conflict = false;
1332 self.saved_mtime = mtime;
1333 self.was_changed();
1334 cx.emit(BufferEvent::Saved);
1335 cx.notify();
1336 }
1337
1338 /// Reloads the contents of the buffer from disk.
1339 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1340 let (tx, rx) = futures::channel::oneshot::channel();
1341 let prev_version = self.text.version();
1342 self.reload_task = Some(cx.spawn(async move |this, cx| {
1343 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1344 let file = this.file.as_ref()?.as_local()?;
1345
1346 Some((file.disk_state().mtime(), file.load(cx)))
1347 })?
1348 else {
1349 return Ok(());
1350 };
1351
1352 let new_text = new_text.await?;
1353 let diff = this
1354 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1355 .await;
1356 this.update(cx, |this, cx| {
1357 if this.version() == diff.base_version {
1358 this.finalize_last_transaction();
1359 this.apply_diff(diff, cx);
1360 tx.send(this.finalize_last_transaction().cloned()).ok();
1361 this.has_conflict = false;
1362 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1363 } else {
1364 if !diff.edits.is_empty()
1365 || this
1366 .edits_since::<usize>(&diff.base_version)
1367 .next()
1368 .is_some()
1369 {
1370 this.has_conflict = true;
1371 }
1372
1373 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1374 }
1375
1376 this.reload_task.take();
1377 })
1378 }));
1379 rx
1380 }
1381
1382 /// This method is called to signal that the buffer has been reloaded.
1383 pub fn did_reload(
1384 &mut self,
1385 version: clock::Global,
1386 line_ending: LineEnding,
1387 mtime: Option<MTime>,
1388 cx: &mut Context<Self>,
1389 ) {
1390 self.saved_version = version;
1391 self.has_unsaved_edits
1392 .set((self.saved_version.clone(), false));
1393 self.text.set_line_ending(line_ending);
1394 self.saved_mtime = mtime;
1395 cx.emit(BufferEvent::Reloaded);
1396 cx.notify();
1397 }
1398
1399 /// Updates the [`File`] backing this buffer. This should be called when
1400 /// the file has changed or has been deleted.
1401 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1402 let was_dirty = self.is_dirty();
1403 let mut file_changed = false;
1404
1405 if let Some(old_file) = self.file.as_ref() {
1406 if new_file.path() != old_file.path() {
1407 file_changed = true;
1408 }
1409
1410 let old_state = old_file.disk_state();
1411 let new_state = new_file.disk_state();
1412 if old_state != new_state {
1413 file_changed = true;
1414 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1415 cx.emit(BufferEvent::ReloadNeeded)
1416 }
1417 }
1418 } else {
1419 file_changed = true;
1420 };
1421
1422 self.file = Some(new_file);
1423 if file_changed {
1424 self.was_changed();
1425 self.non_text_state_update_count += 1;
1426 if was_dirty != self.is_dirty() {
1427 cx.emit(BufferEvent::DirtyChanged);
1428 }
1429 cx.emit(BufferEvent::FileHandleChanged);
1430 cx.notify();
1431 }
1432 }
1433
1434 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1435 Some(self.branch_state.as_ref()?.base_buffer.clone())
1436 }
1437
1438 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1439 pub fn language(&self) -> Option<&Arc<Language>> {
1440 self.language.as_ref()
1441 }
1442
1443 /// Returns the [`Language`] at the given location.
1444 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1445 let offset = position.to_offset(self);
1446 let mut is_first = true;
1447 let start_anchor = self.anchor_before(offset);
1448 let end_anchor = self.anchor_after(offset);
1449 self.syntax_map
1450 .lock()
1451 .layers_for_range(offset..offset, &self.text, false)
1452 .filter(|layer| {
1453 if is_first {
1454 is_first = false;
1455 return true;
1456 }
1457
1458 layer
1459 .included_sub_ranges
1460 .map(|sub_ranges| {
1461 sub_ranges.iter().any(|sub_range| {
1462 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1463 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1464 !is_before_start && !is_after_end
1465 })
1466 })
1467 .unwrap_or(true)
1468 })
1469 .last()
1470 .map(|info| info.language.clone())
1471 .or_else(|| self.language.clone())
1472 }
1473
1474 /// Returns each [`Language`] for the active syntax layers at the given location.
1475 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1476 let offset = position.to_offset(self);
1477 let mut languages: Vec<Arc<Language>> = self
1478 .syntax_map
1479 .lock()
1480 .layers_for_range(offset..offset, &self.text, false)
1481 .map(|info| info.language.clone())
1482 .collect();
1483
1484 if languages.is_empty()
1485 && let Some(buffer_language) = self.language()
1486 {
1487 languages.push(buffer_language.clone());
1488 }
1489
1490 languages
1491 }
1492
1493 /// An integer version number that accounts for all updates besides
1494 /// the buffer's text itself (which is versioned via a version vector).
1495 pub fn non_text_state_update_count(&self) -> usize {
1496 self.non_text_state_update_count
1497 }
1498
1499 /// Whether the buffer is being parsed in the background.
1500 #[cfg(any(test, feature = "test-support"))]
1501 pub fn is_parsing(&self) -> bool {
1502 self.reparse.is_some()
1503 }
1504
1505 /// Indicates whether the buffer contains any regions that may be
1506 /// written in a language that hasn't been loaded yet.
1507 pub fn contains_unknown_injections(&self) -> bool {
1508 self.syntax_map.lock().contains_unknown_injections()
1509 }
1510
1511 #[cfg(any(test, feature = "test-support"))]
1512 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1513 self.sync_parse_timeout = timeout;
1514 }
1515
1516 /// Called after an edit to synchronize the buffer's main parse tree with
1517 /// the buffer's new underlying state.
1518 ///
1519 /// Locks the syntax map and interpolates the edits since the last reparse
1520 /// into the foreground syntax tree.
1521 ///
1522 /// Then takes a stable snapshot of the syntax map before unlocking it.
1523 /// The snapshot with the interpolated edits is sent to a background thread,
1524 /// where we ask Tree-sitter to perform an incremental parse.
1525 ///
1526 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1527 /// waiting on the parse to complete. As soon as it completes, we proceed
1528 /// synchronously, unless a 1ms timeout elapses.
1529 ///
1530 /// If we time out waiting on the parse, we spawn a second task waiting
1531 /// until the parse does complete and return with the interpolated tree still
1532 /// in the foreground. When the background parse completes, call back into
1533 /// the main thread and assign the foreground parse state.
1534 ///
1535 /// If the buffer or grammar changed since the start of the background parse,
1536 /// initiate an additional reparse recursively. To avoid concurrent parses
1537 /// for the same buffer, we only initiate a new parse if we are not already
1538 /// parsing in the background.
1539 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1540 if self.reparse.is_some() {
1541 return;
1542 }
1543 let language = if let Some(language) = self.language.clone() {
1544 language
1545 } else {
1546 return;
1547 };
1548
1549 let text = self.text_snapshot();
1550 let parsed_version = self.version();
1551
1552 let mut syntax_map = self.syntax_map.lock();
1553 syntax_map.interpolate(&text);
1554 let language_registry = syntax_map.language_registry();
1555 let mut syntax_snapshot = syntax_map.snapshot();
1556 drop(syntax_map);
1557
1558 let parse_task = cx.background_spawn({
1559 let language = language.clone();
1560 let language_registry = language_registry.clone();
1561 async move {
1562 syntax_snapshot.reparse(&text, language_registry, language);
1563 syntax_snapshot
1564 }
1565 });
1566
1567 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1568 match cx
1569 .background_executor()
1570 .block_with_timeout(self.sync_parse_timeout, parse_task)
1571 {
1572 Ok(new_syntax_snapshot) => {
1573 self.did_finish_parsing(new_syntax_snapshot, cx);
1574 self.reparse = None;
1575 }
1576 Err(parse_task) => {
1577 // todo(lw): hot foreground spawn
1578 self.reparse = Some(cx.spawn(async move |this, cx| {
1579 let new_syntax_map = cx.background_spawn(parse_task).await;
1580 this.update(cx, move |this, cx| {
1581 let grammar_changed = || {
1582 this.language.as_ref().is_none_or(|current_language| {
1583 !Arc::ptr_eq(&language, current_language)
1584 })
1585 };
1586 let language_registry_changed = || {
1587 new_syntax_map.contains_unknown_injections()
1588 && language_registry.is_some_and(|registry| {
1589 registry.version() != new_syntax_map.language_registry_version()
1590 })
1591 };
1592 let parse_again = this.version.changed_since(&parsed_version)
1593 || language_registry_changed()
1594 || grammar_changed();
1595 this.did_finish_parsing(new_syntax_map, cx);
1596 this.reparse = None;
1597 if parse_again {
1598 this.reparse(cx);
1599 }
1600 })
1601 .ok();
1602 }));
1603 }
1604 }
1605 }
1606
1607 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1608 self.was_changed();
1609 self.non_text_state_update_count += 1;
1610 self.syntax_map.lock().did_parse(syntax_snapshot);
1611 self.request_autoindent(cx);
1612 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1613 cx.emit(BufferEvent::Reparsed);
1614 cx.notify();
1615 }
1616
1617 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1618 self.parse_status.1.clone()
1619 }
1620
1621 /// Wait until the buffer is no longer parsing
1622 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1623 let mut parse_status = self.parse_status();
1624 async move {
1625 while *parse_status.borrow() != ParseStatus::Idle {
1626 if parse_status.changed().await.is_err() {
1627 break;
1628 }
1629 }
1630 }
1631 }
1632
1633 /// Assign to the buffer a set of diagnostics created by a given language server.
1634 pub fn update_diagnostics(
1635 &mut self,
1636 server_id: LanguageServerId,
1637 diagnostics: DiagnosticSet,
1638 cx: &mut Context<Self>,
1639 ) {
1640 let lamport_timestamp = self.text.lamport_clock.tick();
1641 let op = Operation::UpdateDiagnostics {
1642 server_id,
1643 diagnostics: diagnostics.iter().cloned().collect(),
1644 lamport_timestamp,
1645 };
1646
1647 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1648 self.send_operation(op, true, cx);
1649 }
1650
1651 pub fn buffer_diagnostics(
1652 &self,
1653 for_server: Option<LanguageServerId>,
1654 ) -> Vec<&DiagnosticEntry<Anchor>> {
1655 match for_server {
1656 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1657 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1658 Err(_) => Vec::new(),
1659 },
1660 None => self
1661 .diagnostics
1662 .iter()
1663 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1664 .collect(),
1665 }
1666 }
1667
1668 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1669 if let Some(indent_sizes) = self.compute_autoindents() {
1670 let indent_sizes = cx.background_spawn(indent_sizes);
1671 match cx
1672 .background_executor()
1673 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1674 {
1675 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1676 Err(indent_sizes) => {
1677 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1678 let indent_sizes = indent_sizes.await;
1679 this.update(cx, |this, cx| {
1680 this.apply_autoindents(indent_sizes, cx);
1681 })
1682 .ok();
1683 }));
1684 }
1685 }
1686 } else {
1687 self.autoindent_requests.clear();
1688 for tx in self.wait_for_autoindent_txs.drain(..) {
1689 tx.send(()).ok();
1690 }
1691 }
1692 }
1693
1694 fn compute_autoindents(
1695 &self,
1696 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1697 let max_rows_between_yields = 100;
1698 let snapshot = self.snapshot();
1699 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1700 return None;
1701 }
1702
1703 let autoindent_requests = self.autoindent_requests.clone();
1704 Some(async move {
1705 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1706 for request in autoindent_requests {
1707 // Resolve each edited range to its row in the current buffer and in the
1708 // buffer before this batch of edits.
1709 let mut row_ranges = Vec::new();
1710 let mut old_to_new_rows = BTreeMap::new();
1711 let mut language_indent_sizes_by_new_row = Vec::new();
1712 for entry in &request.entries {
1713 let position = entry.range.start;
1714 let new_row = position.to_point(&snapshot).row;
1715 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1716 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1717
1718 if !entry.first_line_is_new {
1719 let old_row = position.to_point(&request.before_edit).row;
1720 old_to_new_rows.insert(old_row, new_row);
1721 }
1722 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1723 }
1724
1725 // Build a map containing the suggested indentation for each of the edited lines
1726 // with respect to the state of the buffer before these edits. This map is keyed
1727 // by the rows for these lines in the current state of the buffer.
1728 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1729 let old_edited_ranges =
1730 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1731 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1732 let mut language_indent_size = IndentSize::default();
1733 for old_edited_range in old_edited_ranges {
1734 let suggestions = request
1735 .before_edit
1736 .suggest_autoindents(old_edited_range.clone())
1737 .into_iter()
1738 .flatten();
1739 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1740 if let Some(suggestion) = suggestion {
1741 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1742
1743 // Find the indent size based on the language for this row.
1744 while let Some((row, size)) = language_indent_sizes.peek() {
1745 if *row > new_row {
1746 break;
1747 }
1748 language_indent_size = *size;
1749 language_indent_sizes.next();
1750 }
1751
1752 let suggested_indent = old_to_new_rows
1753 .get(&suggestion.basis_row)
1754 .and_then(|from_row| {
1755 Some(old_suggestions.get(from_row).copied()?.0)
1756 })
1757 .unwrap_or_else(|| {
1758 request
1759 .before_edit
1760 .indent_size_for_line(suggestion.basis_row)
1761 })
1762 .with_delta(suggestion.delta, language_indent_size);
1763 old_suggestions
1764 .insert(new_row, (suggested_indent, suggestion.within_error));
1765 }
1766 }
1767 yield_now().await;
1768 }
1769
1770 // Compute new suggestions for each line, but only include them in the result
1771 // if they differ from the old suggestion for that line.
1772 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1773 let mut language_indent_size = IndentSize::default();
1774 for (row_range, original_indent_column) in row_ranges {
1775 let new_edited_row_range = if request.is_block_mode {
1776 row_range.start..row_range.start + 1
1777 } else {
1778 row_range.clone()
1779 };
1780
1781 let suggestions = snapshot
1782 .suggest_autoindents(new_edited_row_range.clone())
1783 .into_iter()
1784 .flatten();
1785 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1786 if let Some(suggestion) = suggestion {
1787 // Find the indent size based on the language for this row.
1788 while let Some((row, size)) = language_indent_sizes.peek() {
1789 if *row > new_row {
1790 break;
1791 }
1792 language_indent_size = *size;
1793 language_indent_sizes.next();
1794 }
1795
1796 let suggested_indent = indent_sizes
1797 .get(&suggestion.basis_row)
1798 .copied()
1799 .map(|e| e.0)
1800 .unwrap_or_else(|| {
1801 snapshot.indent_size_for_line(suggestion.basis_row)
1802 })
1803 .with_delta(suggestion.delta, language_indent_size);
1804
1805 if old_suggestions.get(&new_row).is_none_or(
1806 |(old_indentation, was_within_error)| {
1807 suggested_indent != *old_indentation
1808 && (!suggestion.within_error || *was_within_error)
1809 },
1810 ) {
1811 indent_sizes.insert(
1812 new_row,
1813 (suggested_indent, request.ignore_empty_lines),
1814 );
1815 }
1816 }
1817 }
1818
1819 if let (true, Some(original_indent_column)) =
1820 (request.is_block_mode, original_indent_column)
1821 {
1822 let new_indent =
1823 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1824 *indent
1825 } else {
1826 snapshot.indent_size_for_line(row_range.start)
1827 };
1828 let delta = new_indent.len as i64 - original_indent_column as i64;
1829 if delta != 0 {
1830 for row in row_range.skip(1) {
1831 indent_sizes.entry(row).or_insert_with(|| {
1832 let mut size = snapshot.indent_size_for_line(row);
1833 if size.kind == new_indent.kind {
1834 match delta.cmp(&0) {
1835 Ordering::Greater => size.len += delta as u32,
1836 Ordering::Less => {
1837 size.len = size.len.saturating_sub(-delta as u32)
1838 }
1839 Ordering::Equal => {}
1840 }
1841 }
1842 (size, request.ignore_empty_lines)
1843 });
1844 }
1845 }
1846 }
1847
1848 yield_now().await;
1849 }
1850 }
1851
1852 indent_sizes
1853 .into_iter()
1854 .filter_map(|(row, (indent, ignore_empty_lines))| {
1855 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1856 None
1857 } else {
1858 Some((row, indent))
1859 }
1860 })
1861 .collect()
1862 })
1863 }
1864
1865 fn apply_autoindents(
1866 &mut self,
1867 indent_sizes: BTreeMap<u32, IndentSize>,
1868 cx: &mut Context<Self>,
1869 ) {
1870 self.autoindent_requests.clear();
1871 for tx in self.wait_for_autoindent_txs.drain(..) {
1872 tx.send(()).ok();
1873 }
1874
1875 let edits: Vec<_> = indent_sizes
1876 .into_iter()
1877 .filter_map(|(row, indent_size)| {
1878 let current_size = indent_size_for_line(self, row);
1879 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1880 })
1881 .collect();
1882
1883 let preserve_preview = self.preserve_preview();
1884 self.edit(edits, None, cx);
1885 if preserve_preview {
1886 self.refresh_preview();
1887 }
1888 }
1889
1890 /// Create a minimal edit that will cause the given row to be indented
1891 /// with the given size. After applying this edit, the length of the line
1892 /// will always be at least `new_size.len`.
1893 pub fn edit_for_indent_size_adjustment(
1894 row: u32,
1895 current_size: IndentSize,
1896 new_size: IndentSize,
1897 ) -> Option<(Range<Point>, String)> {
1898 if new_size.kind == current_size.kind {
1899 match new_size.len.cmp(¤t_size.len) {
1900 Ordering::Greater => {
1901 let point = Point::new(row, 0);
1902 Some((
1903 point..point,
1904 iter::repeat(new_size.char())
1905 .take((new_size.len - current_size.len) as usize)
1906 .collect::<String>(),
1907 ))
1908 }
1909
1910 Ordering::Less => Some((
1911 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1912 String::new(),
1913 )),
1914
1915 Ordering::Equal => None,
1916 }
1917 } else {
1918 Some((
1919 Point::new(row, 0)..Point::new(row, current_size.len),
1920 iter::repeat(new_size.char())
1921 .take(new_size.len as usize)
1922 .collect::<String>(),
1923 ))
1924 }
1925 }
1926
1927 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1928 /// and the given new text.
1929 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1930 let old_text = self.as_rope().clone();
1931 let base_version = self.version();
1932 cx.background_executor()
1933 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1934 let old_text = old_text.to_string();
1935 let line_ending = LineEnding::detect(&new_text);
1936 LineEnding::normalize(&mut new_text);
1937 let edits = text_diff(&old_text, &new_text);
1938 Diff {
1939 base_version,
1940 line_ending,
1941 edits,
1942 }
1943 })
1944 }
1945
1946 /// Spawns a background task that searches the buffer for any whitespace
1947 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1948 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1949 let old_text = self.as_rope().clone();
1950 let line_ending = self.line_ending();
1951 let base_version = self.version();
1952 cx.background_spawn(async move {
1953 let ranges = trailing_whitespace_ranges(&old_text);
1954 let empty = Arc::<str>::from("");
1955 Diff {
1956 base_version,
1957 line_ending,
1958 edits: ranges
1959 .into_iter()
1960 .map(|range| (range, empty.clone()))
1961 .collect(),
1962 }
1963 })
1964 }
1965
1966 /// Ensures that the buffer ends with a single newline character, and
1967 /// no other whitespace. Skips if the buffer is empty.
1968 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1969 let len = self.len();
1970 if len == 0 {
1971 return;
1972 }
1973 let mut offset = len;
1974 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1975 let non_whitespace_len = chunk
1976 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1977 .len();
1978 offset -= chunk.len();
1979 offset += non_whitespace_len;
1980 if non_whitespace_len != 0 {
1981 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1982 return;
1983 }
1984 break;
1985 }
1986 }
1987 self.edit([(offset..len, "\n")], None, cx);
1988 }
1989
1990 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1991 /// calculated, then adjust the diff to account for those changes, and discard any
1992 /// parts of the diff that conflict with those changes.
1993 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1994 let snapshot = self.snapshot();
1995 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1996 let mut delta = 0;
1997 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1998 while let Some(edit_since) = edits_since.peek() {
1999 // If the edit occurs after a diff hunk, then it does not
2000 // affect that hunk.
2001 if edit_since.old.start > range.end {
2002 break;
2003 }
2004 // If the edit precedes the diff hunk, then adjust the hunk
2005 // to reflect the edit.
2006 else if edit_since.old.end < range.start {
2007 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2008 edits_since.next();
2009 }
2010 // If the edit intersects a diff hunk, then discard that hunk.
2011 else {
2012 return None;
2013 }
2014 }
2015
2016 let start = (range.start as i64 + delta) as usize;
2017 let end = (range.end as i64 + delta) as usize;
2018 Some((start..end, new_text))
2019 });
2020
2021 self.start_transaction();
2022 self.text.set_line_ending(diff.line_ending);
2023 self.edit(adjusted_edits, None, cx);
2024 self.end_transaction(cx)
2025 }
2026
2027 pub fn has_unsaved_edits(&self) -> bool {
2028 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2029
2030 if last_version == self.version {
2031 self.has_unsaved_edits
2032 .set((last_version, has_unsaved_edits));
2033 return has_unsaved_edits;
2034 }
2035
2036 let has_edits = self.has_edits_since(&self.saved_version);
2037 self.has_unsaved_edits
2038 .set((self.version.clone(), has_edits));
2039 has_edits
2040 }
2041
2042 /// Checks if the buffer has unsaved changes.
2043 pub fn is_dirty(&self) -> bool {
2044 if self.capability == Capability::ReadOnly {
2045 return false;
2046 }
2047 if self.has_conflict {
2048 return true;
2049 }
2050 match self.file.as_ref().map(|f| f.disk_state()) {
2051 Some(DiskState::New) | Some(DiskState::Deleted) => {
2052 !self.is_empty() && self.has_unsaved_edits()
2053 }
2054 _ => self.has_unsaved_edits(),
2055 }
2056 }
2057
2058 /// Marks the buffer as having a conflict regardless of current buffer state.
2059 pub fn set_conflict(&mut self) {
2060 self.has_conflict = true;
2061 }
2062
2063 /// Checks if the buffer and its file have both changed since the buffer
2064 /// was last saved or reloaded.
2065 pub fn has_conflict(&self) -> bool {
2066 if self.has_conflict {
2067 return true;
2068 }
2069 let Some(file) = self.file.as_ref() else {
2070 return false;
2071 };
2072 match file.disk_state() {
2073 DiskState::New => false,
2074 DiskState::Present { mtime } => match self.saved_mtime {
2075 Some(saved_mtime) => {
2076 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2077 }
2078 None => true,
2079 },
2080 DiskState::Deleted => false,
2081 }
2082 }
2083
2084 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2085 pub fn subscribe(&mut self) -> Subscription {
2086 self.text.subscribe()
2087 }
2088
2089 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2090 ///
2091 /// This allows downstream code to check if the buffer's text has changed without
2092 /// waiting for an effect cycle, which would be required if using eents.
2093 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2094 if let Err(ix) = self
2095 .change_bits
2096 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2097 {
2098 self.change_bits.insert(ix, bit);
2099 }
2100 }
2101
2102 /// Set the change bit for all "listeners".
2103 fn was_changed(&mut self) {
2104 self.change_bits.retain(|change_bit| {
2105 change_bit
2106 .upgrade()
2107 .inspect(|bit| {
2108 _ = bit.replace(true);
2109 })
2110 .is_some()
2111 });
2112 }
2113
2114 /// Starts a transaction, if one is not already in-progress. When undoing or
2115 /// redoing edits, all of the edits performed within a transaction are undone
2116 /// or redone together.
2117 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2118 self.start_transaction_at(Instant::now())
2119 }
2120
2121 /// Starts a transaction, providing the current time. Subsequent transactions
2122 /// that occur within a short period of time will be grouped together. This
2123 /// is controlled by the buffer's undo grouping duration.
2124 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2125 self.transaction_depth += 1;
2126 if self.was_dirty_before_starting_transaction.is_none() {
2127 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2128 }
2129 self.text.start_transaction_at(now)
2130 }
2131
2132 /// Terminates the current transaction, if this is the outermost transaction.
2133 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2134 self.end_transaction_at(Instant::now(), cx)
2135 }
2136
2137 /// Terminates the current transaction, providing the current time. Subsequent transactions
2138 /// that occur within a short period of time will be grouped together. This
2139 /// is controlled by the buffer's undo grouping duration.
2140 pub fn end_transaction_at(
2141 &mut self,
2142 now: Instant,
2143 cx: &mut Context<Self>,
2144 ) -> Option<TransactionId> {
2145 assert!(self.transaction_depth > 0);
2146 self.transaction_depth -= 1;
2147 let was_dirty = if self.transaction_depth == 0 {
2148 self.was_dirty_before_starting_transaction.take().unwrap()
2149 } else {
2150 false
2151 };
2152 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2153 self.did_edit(&start_version, was_dirty, cx);
2154 Some(transaction_id)
2155 } else {
2156 None
2157 }
2158 }
2159
2160 /// Manually add a transaction to the buffer's undo history.
2161 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2162 self.text.push_transaction(transaction, now);
2163 }
2164
2165 /// Differs from `push_transaction` in that it does not clear the redo
2166 /// stack. Intended to be used to create a parent transaction to merge
2167 /// potential child transactions into.
2168 ///
2169 /// The caller is responsible for removing it from the undo history using
2170 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2171 /// are merged into this transaction, the caller is responsible for ensuring
2172 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2173 /// cleared is to create transactions with the usual `start_transaction` and
2174 /// `end_transaction` methods and merging the resulting transactions into
2175 /// the transaction created by this method
2176 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2177 self.text.push_empty_transaction(now)
2178 }
2179
2180 /// Prevent the last transaction from being grouped with any subsequent transactions,
2181 /// even if they occur with the buffer's undo grouping duration.
2182 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2183 self.text.finalize_last_transaction()
2184 }
2185
2186 /// Manually group all changes since a given transaction.
2187 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2188 self.text.group_until_transaction(transaction_id);
2189 }
2190
2191 /// Manually remove a transaction from the buffer's undo history
2192 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2193 self.text.forget_transaction(transaction_id)
2194 }
2195
2196 /// Retrieve a transaction from the buffer's undo history
2197 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2198 self.text.get_transaction(transaction_id)
2199 }
2200
2201 /// Manually merge two transactions in the buffer's undo history.
2202 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2203 self.text.merge_transactions(transaction, destination);
2204 }
2205
2206 /// Waits for the buffer to receive operations with the given timestamps.
2207 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2208 &mut self,
2209 edit_ids: It,
2210 ) -> impl Future<Output = Result<()>> + use<It> {
2211 self.text.wait_for_edits(edit_ids)
2212 }
2213
2214 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2215 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2216 &mut self,
2217 anchors: It,
2218 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2219 self.text.wait_for_anchors(anchors)
2220 }
2221
2222 /// Waits for the buffer to receive operations up to the given version.
2223 pub fn wait_for_version(
2224 &mut self,
2225 version: clock::Global,
2226 ) -> impl Future<Output = Result<()>> + use<> {
2227 self.text.wait_for_version(version)
2228 }
2229
2230 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2231 /// [`Buffer::wait_for_version`] to resolve with an error.
2232 pub fn give_up_waiting(&mut self) {
2233 self.text.give_up_waiting();
2234 }
2235
2236 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2237 let mut rx = None;
2238 if !self.autoindent_requests.is_empty() {
2239 let channel = oneshot::channel();
2240 self.wait_for_autoindent_txs.push(channel.0);
2241 rx = Some(channel.1);
2242 }
2243 rx
2244 }
2245
2246 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2247 pub fn set_active_selections(
2248 &mut self,
2249 selections: Arc<[Selection<Anchor>]>,
2250 line_mode: bool,
2251 cursor_shape: CursorShape,
2252 cx: &mut Context<Self>,
2253 ) {
2254 let lamport_timestamp = self.text.lamport_clock.tick();
2255 self.remote_selections.insert(
2256 self.text.replica_id(),
2257 SelectionSet {
2258 selections: selections.clone(),
2259 lamport_timestamp,
2260 line_mode,
2261 cursor_shape,
2262 },
2263 );
2264 self.send_operation(
2265 Operation::UpdateSelections {
2266 selections,
2267 line_mode,
2268 lamport_timestamp,
2269 cursor_shape,
2270 },
2271 true,
2272 cx,
2273 );
2274 self.non_text_state_update_count += 1;
2275 cx.notify();
2276 }
2277
2278 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2279 /// this replica.
2280 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2281 if self
2282 .remote_selections
2283 .get(&self.text.replica_id())
2284 .is_none_or(|set| !set.selections.is_empty())
2285 {
2286 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2287 }
2288 }
2289
2290 pub fn set_agent_selections(
2291 &mut self,
2292 selections: Arc<[Selection<Anchor>]>,
2293 line_mode: bool,
2294 cursor_shape: CursorShape,
2295 cx: &mut Context<Self>,
2296 ) {
2297 let lamport_timestamp = self.text.lamport_clock.tick();
2298 self.remote_selections.insert(
2299 ReplicaId::AGENT,
2300 SelectionSet {
2301 selections,
2302 lamport_timestamp,
2303 line_mode,
2304 cursor_shape,
2305 },
2306 );
2307 self.non_text_state_update_count += 1;
2308 cx.notify();
2309 }
2310
2311 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2312 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2313 }
2314
2315 /// Replaces the buffer's entire text.
2316 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2317 where
2318 T: Into<Arc<str>>,
2319 {
2320 self.autoindent_requests.clear();
2321 self.edit([(0..self.len(), text)], None, cx)
2322 }
2323
2324 /// Appends the given text to the end of the buffer.
2325 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2326 where
2327 T: Into<Arc<str>>,
2328 {
2329 self.edit([(self.len()..self.len(), text)], None, cx)
2330 }
2331
2332 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2333 /// delete, and a string of text to insert at that location.
2334 ///
2335 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2336 /// request for the edited ranges, which will be processed when the buffer finishes
2337 /// parsing.
2338 ///
2339 /// Parsing takes place at the end of a transaction, and may compute synchronously
2340 /// or asynchronously, depending on the changes.
2341 pub fn edit<I, S, T>(
2342 &mut self,
2343 edits_iter: I,
2344 autoindent_mode: Option<AutoindentMode>,
2345 cx: &mut Context<Self>,
2346 ) -> Option<clock::Lamport>
2347 where
2348 I: IntoIterator<Item = (Range<S>, T)>,
2349 S: ToOffset,
2350 T: Into<Arc<str>>,
2351 {
2352 // Skip invalid edits and coalesce contiguous ones.
2353 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2354
2355 for (range, new_text) in edits_iter {
2356 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2357
2358 if range.start > range.end {
2359 mem::swap(&mut range.start, &mut range.end);
2360 }
2361 let new_text = new_text.into();
2362 if !new_text.is_empty() || !range.is_empty() {
2363 if let Some((prev_range, prev_text)) = edits.last_mut()
2364 && prev_range.end >= range.start
2365 {
2366 prev_range.end = cmp::max(prev_range.end, range.end);
2367 *prev_text = format!("{prev_text}{new_text}").into();
2368 } else {
2369 edits.push((range, new_text));
2370 }
2371 }
2372 }
2373 if edits.is_empty() {
2374 return None;
2375 }
2376
2377 self.start_transaction();
2378 self.pending_autoindent.take();
2379 let autoindent_request = autoindent_mode
2380 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2381
2382 let edit_operation = self.text.edit(edits.iter().cloned());
2383 let edit_id = edit_operation.timestamp();
2384
2385 if let Some((before_edit, mode)) = autoindent_request {
2386 let mut delta = 0isize;
2387 let mut previous_setting = None;
2388 let entries: Vec<_> = edits
2389 .into_iter()
2390 .enumerate()
2391 .zip(&edit_operation.as_edit().unwrap().new_text)
2392 .filter(|((_, (range, _)), _)| {
2393 let language = before_edit.language_at(range.start);
2394 let language_id = language.map(|l| l.id());
2395 if let Some((cached_language_id, auto_indent)) = previous_setting
2396 && cached_language_id == language_id
2397 {
2398 auto_indent
2399 } else {
2400 // The auto-indent setting is not present in editorconfigs, hence
2401 // we can avoid passing the file here.
2402 let auto_indent =
2403 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2404 previous_setting = Some((language_id, auto_indent));
2405 auto_indent
2406 }
2407 })
2408 .map(|((ix, (range, _)), new_text)| {
2409 let new_text_length = new_text.len();
2410 let old_start = range.start.to_point(&before_edit);
2411 let new_start = (delta + range.start as isize) as usize;
2412 let range_len = range.end - range.start;
2413 delta += new_text_length as isize - range_len as isize;
2414
2415 // Decide what range of the insertion to auto-indent, and whether
2416 // the first line of the insertion should be considered a newly-inserted line
2417 // or an edit to an existing line.
2418 let mut range_of_insertion_to_indent = 0..new_text_length;
2419 let mut first_line_is_new = true;
2420
2421 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2422 let old_line_end = before_edit.line_len(old_start.row);
2423
2424 if old_start.column > old_line_start {
2425 first_line_is_new = false;
2426 }
2427
2428 if !new_text.contains('\n')
2429 && (old_start.column + (range_len as u32) < old_line_end
2430 || old_line_end == old_line_start)
2431 {
2432 first_line_is_new = false;
2433 }
2434
2435 // When inserting text starting with a newline, avoid auto-indenting the
2436 // previous line.
2437 if new_text.starts_with('\n') {
2438 range_of_insertion_to_indent.start += 1;
2439 first_line_is_new = true;
2440 }
2441
2442 let mut original_indent_column = None;
2443 if let AutoindentMode::Block {
2444 original_indent_columns,
2445 } = &mode
2446 {
2447 original_indent_column = Some(if new_text.starts_with('\n') {
2448 indent_size_for_text(
2449 new_text[range_of_insertion_to_indent.clone()].chars(),
2450 )
2451 .len
2452 } else {
2453 original_indent_columns
2454 .get(ix)
2455 .copied()
2456 .flatten()
2457 .unwrap_or_else(|| {
2458 indent_size_for_text(
2459 new_text[range_of_insertion_to_indent.clone()].chars(),
2460 )
2461 .len
2462 })
2463 });
2464
2465 // Avoid auto-indenting the line after the edit.
2466 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2467 range_of_insertion_to_indent.end -= 1;
2468 }
2469 }
2470
2471 AutoindentRequestEntry {
2472 first_line_is_new,
2473 original_indent_column,
2474 indent_size: before_edit.language_indent_size_at(range.start, cx),
2475 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2476 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2477 }
2478 })
2479 .collect();
2480
2481 if !entries.is_empty() {
2482 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2483 before_edit,
2484 entries,
2485 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2486 ignore_empty_lines: false,
2487 }));
2488 }
2489 }
2490
2491 self.end_transaction(cx);
2492 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2493 Some(edit_id)
2494 }
2495
2496 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2497 self.was_changed();
2498
2499 if self.edits_since::<usize>(old_version).next().is_none() {
2500 return;
2501 }
2502
2503 self.reparse(cx);
2504 cx.emit(BufferEvent::Edited);
2505 if was_dirty != self.is_dirty() {
2506 cx.emit(BufferEvent::DirtyChanged);
2507 }
2508 cx.notify();
2509 }
2510
2511 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2512 where
2513 I: IntoIterator<Item = Range<T>>,
2514 T: ToOffset + Copy,
2515 {
2516 let before_edit = self.snapshot();
2517 let entries = ranges
2518 .into_iter()
2519 .map(|range| AutoindentRequestEntry {
2520 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2521 first_line_is_new: true,
2522 indent_size: before_edit.language_indent_size_at(range.start, cx),
2523 original_indent_column: None,
2524 })
2525 .collect();
2526 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2527 before_edit,
2528 entries,
2529 is_block_mode: false,
2530 ignore_empty_lines: true,
2531 }));
2532 self.request_autoindent(cx);
2533 }
2534
2535 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2536 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2537 pub fn insert_empty_line(
2538 &mut self,
2539 position: impl ToPoint,
2540 space_above: bool,
2541 space_below: bool,
2542 cx: &mut Context<Self>,
2543 ) -> Point {
2544 let mut position = position.to_point(self);
2545
2546 self.start_transaction();
2547
2548 self.edit(
2549 [(position..position, "\n")],
2550 Some(AutoindentMode::EachLine),
2551 cx,
2552 );
2553
2554 if position.column > 0 {
2555 position += Point::new(1, 0);
2556 }
2557
2558 if !self.is_line_blank(position.row) {
2559 self.edit(
2560 [(position..position, "\n")],
2561 Some(AutoindentMode::EachLine),
2562 cx,
2563 );
2564 }
2565
2566 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2567 self.edit(
2568 [(position..position, "\n")],
2569 Some(AutoindentMode::EachLine),
2570 cx,
2571 );
2572 position.row += 1;
2573 }
2574
2575 if space_below
2576 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2577 {
2578 self.edit(
2579 [(position..position, "\n")],
2580 Some(AutoindentMode::EachLine),
2581 cx,
2582 );
2583 }
2584
2585 self.end_transaction(cx);
2586
2587 position
2588 }
2589
2590 /// Applies the given remote operations to the buffer.
2591 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2592 self.pending_autoindent.take();
2593 let was_dirty = self.is_dirty();
2594 let old_version = self.version.clone();
2595 let mut deferred_ops = Vec::new();
2596 let buffer_ops = ops
2597 .into_iter()
2598 .filter_map(|op| match op {
2599 Operation::Buffer(op) => Some(op),
2600 _ => {
2601 if self.can_apply_op(&op) {
2602 self.apply_op(op, cx);
2603 } else {
2604 deferred_ops.push(op);
2605 }
2606 None
2607 }
2608 })
2609 .collect::<Vec<_>>();
2610 for operation in buffer_ops.iter() {
2611 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2612 }
2613 self.text.apply_ops(buffer_ops);
2614 self.deferred_ops.insert(deferred_ops);
2615 self.flush_deferred_ops(cx);
2616 self.did_edit(&old_version, was_dirty, cx);
2617 // Notify independently of whether the buffer was edited as the operations could include a
2618 // selection update.
2619 cx.notify();
2620 }
2621
2622 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2623 let mut deferred_ops = Vec::new();
2624 for op in self.deferred_ops.drain().iter().cloned() {
2625 if self.can_apply_op(&op) {
2626 self.apply_op(op, cx);
2627 } else {
2628 deferred_ops.push(op);
2629 }
2630 }
2631 self.deferred_ops.insert(deferred_ops);
2632 }
2633
2634 pub fn has_deferred_ops(&self) -> bool {
2635 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2636 }
2637
2638 fn can_apply_op(&self, operation: &Operation) -> bool {
2639 match operation {
2640 Operation::Buffer(_) => {
2641 unreachable!("buffer operations should never be applied at this layer")
2642 }
2643 Operation::UpdateDiagnostics {
2644 diagnostics: diagnostic_set,
2645 ..
2646 } => diagnostic_set.iter().all(|diagnostic| {
2647 self.text.can_resolve(&diagnostic.range.start)
2648 && self.text.can_resolve(&diagnostic.range.end)
2649 }),
2650 Operation::UpdateSelections { selections, .. } => selections
2651 .iter()
2652 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2653 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2654 }
2655 }
2656
2657 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2658 match operation {
2659 Operation::Buffer(_) => {
2660 unreachable!("buffer operations should never be applied at this layer")
2661 }
2662 Operation::UpdateDiagnostics {
2663 server_id,
2664 diagnostics: diagnostic_set,
2665 lamport_timestamp,
2666 } => {
2667 let snapshot = self.snapshot();
2668 self.apply_diagnostic_update(
2669 server_id,
2670 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2671 lamport_timestamp,
2672 cx,
2673 );
2674 }
2675 Operation::UpdateSelections {
2676 selections,
2677 lamport_timestamp,
2678 line_mode,
2679 cursor_shape,
2680 } => {
2681 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2682 && set.lamport_timestamp > lamport_timestamp
2683 {
2684 return;
2685 }
2686
2687 self.remote_selections.insert(
2688 lamport_timestamp.replica_id,
2689 SelectionSet {
2690 selections,
2691 lamport_timestamp,
2692 line_mode,
2693 cursor_shape,
2694 },
2695 );
2696 self.text.lamport_clock.observe(lamport_timestamp);
2697 self.non_text_state_update_count += 1;
2698 }
2699 Operation::UpdateCompletionTriggers {
2700 triggers,
2701 lamport_timestamp,
2702 server_id,
2703 } => {
2704 if triggers.is_empty() {
2705 self.completion_triggers_per_language_server
2706 .remove(&server_id);
2707 self.completion_triggers = self
2708 .completion_triggers_per_language_server
2709 .values()
2710 .flat_map(|triggers| triggers.iter().cloned())
2711 .collect();
2712 } else {
2713 self.completion_triggers_per_language_server
2714 .insert(server_id, triggers.iter().cloned().collect());
2715 self.completion_triggers.extend(triggers);
2716 }
2717 self.text.lamport_clock.observe(lamport_timestamp);
2718 }
2719 Operation::UpdateLineEnding {
2720 line_ending,
2721 lamport_timestamp,
2722 } => {
2723 self.text.set_line_ending(line_ending);
2724 self.text.lamport_clock.observe(lamport_timestamp);
2725 }
2726 }
2727 }
2728
2729 fn apply_diagnostic_update(
2730 &mut self,
2731 server_id: LanguageServerId,
2732 diagnostics: DiagnosticSet,
2733 lamport_timestamp: clock::Lamport,
2734 cx: &mut Context<Self>,
2735 ) {
2736 if lamport_timestamp > self.diagnostics_timestamp {
2737 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2738 if diagnostics.is_empty() {
2739 if let Ok(ix) = ix {
2740 self.diagnostics.remove(ix);
2741 }
2742 } else {
2743 match ix {
2744 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2745 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2746 };
2747 }
2748 self.diagnostics_timestamp = lamport_timestamp;
2749 self.non_text_state_update_count += 1;
2750 self.text.lamport_clock.observe(lamport_timestamp);
2751 cx.notify();
2752 cx.emit(BufferEvent::DiagnosticsUpdated);
2753 }
2754 }
2755
2756 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2757 self.was_changed();
2758 cx.emit(BufferEvent::Operation {
2759 operation,
2760 is_local,
2761 });
2762 }
2763
2764 /// Removes the selections for a given peer.
2765 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2766 self.remote_selections.remove(&replica_id);
2767 cx.notify();
2768 }
2769
2770 /// Undoes the most recent transaction.
2771 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2772 let was_dirty = self.is_dirty();
2773 let old_version = self.version.clone();
2774
2775 if let Some((transaction_id, operation)) = self.text.undo() {
2776 self.send_operation(Operation::Buffer(operation), true, cx);
2777 self.did_edit(&old_version, was_dirty, cx);
2778 Some(transaction_id)
2779 } else {
2780 None
2781 }
2782 }
2783
2784 /// Manually undoes a specific transaction in the buffer's undo history.
2785 pub fn undo_transaction(
2786 &mut self,
2787 transaction_id: TransactionId,
2788 cx: &mut Context<Self>,
2789 ) -> bool {
2790 let was_dirty = self.is_dirty();
2791 let old_version = self.version.clone();
2792 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2793 self.send_operation(Operation::Buffer(operation), true, cx);
2794 self.did_edit(&old_version, was_dirty, cx);
2795 true
2796 } else {
2797 false
2798 }
2799 }
2800
2801 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2802 pub fn undo_to_transaction(
2803 &mut self,
2804 transaction_id: TransactionId,
2805 cx: &mut Context<Self>,
2806 ) -> bool {
2807 let was_dirty = self.is_dirty();
2808 let old_version = self.version.clone();
2809
2810 let operations = self.text.undo_to_transaction(transaction_id);
2811 let undone = !operations.is_empty();
2812 for operation in operations {
2813 self.send_operation(Operation::Buffer(operation), true, cx);
2814 }
2815 if undone {
2816 self.did_edit(&old_version, was_dirty, cx)
2817 }
2818 undone
2819 }
2820
2821 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2822 let was_dirty = self.is_dirty();
2823 let operation = self.text.undo_operations(counts);
2824 let old_version = self.version.clone();
2825 self.send_operation(Operation::Buffer(operation), true, cx);
2826 self.did_edit(&old_version, was_dirty, cx);
2827 }
2828
2829 /// Manually redoes a specific transaction in the buffer's redo history.
2830 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2831 let was_dirty = self.is_dirty();
2832 let old_version = self.version.clone();
2833
2834 if let Some((transaction_id, operation)) = self.text.redo() {
2835 self.send_operation(Operation::Buffer(operation), true, cx);
2836 self.did_edit(&old_version, was_dirty, cx);
2837 Some(transaction_id)
2838 } else {
2839 None
2840 }
2841 }
2842
2843 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2844 pub fn redo_to_transaction(
2845 &mut self,
2846 transaction_id: TransactionId,
2847 cx: &mut Context<Self>,
2848 ) -> bool {
2849 let was_dirty = self.is_dirty();
2850 let old_version = self.version.clone();
2851
2852 let operations = self.text.redo_to_transaction(transaction_id);
2853 let redone = !operations.is_empty();
2854 for operation in operations {
2855 self.send_operation(Operation::Buffer(operation), true, cx);
2856 }
2857 if redone {
2858 self.did_edit(&old_version, was_dirty, cx)
2859 }
2860 redone
2861 }
2862
2863 /// Override current completion triggers with the user-provided completion triggers.
2864 pub fn set_completion_triggers(
2865 &mut self,
2866 server_id: LanguageServerId,
2867 triggers: BTreeSet<String>,
2868 cx: &mut Context<Self>,
2869 ) {
2870 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2871 if triggers.is_empty() {
2872 self.completion_triggers_per_language_server
2873 .remove(&server_id);
2874 self.completion_triggers = self
2875 .completion_triggers_per_language_server
2876 .values()
2877 .flat_map(|triggers| triggers.iter().cloned())
2878 .collect();
2879 } else {
2880 self.completion_triggers_per_language_server
2881 .insert(server_id, triggers.clone());
2882 self.completion_triggers.extend(triggers.iter().cloned());
2883 }
2884 self.send_operation(
2885 Operation::UpdateCompletionTriggers {
2886 triggers: triggers.into_iter().collect(),
2887 lamport_timestamp: self.completion_triggers_timestamp,
2888 server_id,
2889 },
2890 true,
2891 cx,
2892 );
2893 cx.notify();
2894 }
2895
2896 /// Returns a list of strings which trigger a completion menu for this language.
2897 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2898 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2899 &self.completion_triggers
2900 }
2901
2902 /// Call this directly after performing edits to prevent the preview tab
2903 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2904 /// to return false until there are additional edits.
2905 pub fn refresh_preview(&mut self) {
2906 self.preview_version = self.version.clone();
2907 }
2908
2909 /// Whether we should preserve the preview status of a tab containing this buffer.
2910 pub fn preserve_preview(&self) -> bool {
2911 !self.has_edits_since(&self.preview_version)
2912 }
2913}
2914
2915#[doc(hidden)]
2916#[cfg(any(test, feature = "test-support"))]
2917impl Buffer {
2918 pub fn edit_via_marked_text(
2919 &mut self,
2920 marked_string: &str,
2921 autoindent_mode: Option<AutoindentMode>,
2922 cx: &mut Context<Self>,
2923 ) {
2924 let edits = self.edits_for_marked_text(marked_string);
2925 self.edit(edits, autoindent_mode, cx);
2926 }
2927
2928 pub fn set_group_interval(&mut self, group_interval: Duration) {
2929 self.text.set_group_interval(group_interval);
2930 }
2931
2932 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2933 where
2934 T: rand::Rng,
2935 {
2936 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2937 let mut last_end = None;
2938 for _ in 0..old_range_count {
2939 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2940 break;
2941 }
2942
2943 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2944 let mut range = self.random_byte_range(new_start, rng);
2945 if rng.random_bool(0.2) {
2946 mem::swap(&mut range.start, &mut range.end);
2947 }
2948 last_end = Some(range.end);
2949
2950 let new_text_len = rng.random_range(0..10);
2951 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2952 new_text = new_text.to_uppercase();
2953
2954 edits.push((range, new_text));
2955 }
2956 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2957 self.edit(edits, None, cx);
2958 }
2959
2960 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2961 let was_dirty = self.is_dirty();
2962 let old_version = self.version.clone();
2963
2964 let ops = self.text.randomly_undo_redo(rng);
2965 if !ops.is_empty() {
2966 for op in ops {
2967 self.send_operation(Operation::Buffer(op), true, cx);
2968 self.did_edit(&old_version, was_dirty, cx);
2969 }
2970 }
2971 }
2972}
2973
2974impl EventEmitter<BufferEvent> for Buffer {}
2975
2976impl Deref for Buffer {
2977 type Target = TextBuffer;
2978
2979 fn deref(&self) -> &Self::Target {
2980 &self.text
2981 }
2982}
2983
2984impl BufferSnapshot {
2985 /// Returns [`IndentSize`] for a given line that respects user settings and
2986 /// language preferences.
2987 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2988 indent_size_for_line(self, row)
2989 }
2990
2991 /// Returns [`IndentSize`] for a given position that respects user settings
2992 /// and language preferences.
2993 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2994 let settings = language_settings(
2995 self.language_at(position).map(|l| l.name()),
2996 self.file(),
2997 cx,
2998 );
2999 if settings.hard_tabs {
3000 IndentSize::tab()
3001 } else {
3002 IndentSize::spaces(settings.tab_size.get())
3003 }
3004 }
3005
3006 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3007 /// is passed in as `single_indent_size`.
3008 pub fn suggested_indents(
3009 &self,
3010 rows: impl Iterator<Item = u32>,
3011 single_indent_size: IndentSize,
3012 ) -> BTreeMap<u32, IndentSize> {
3013 let mut result = BTreeMap::new();
3014
3015 for row_range in contiguous_ranges(rows, 10) {
3016 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3017 Some(suggestions) => suggestions,
3018 _ => break,
3019 };
3020
3021 for (row, suggestion) in row_range.zip(suggestions) {
3022 let indent_size = if let Some(suggestion) = suggestion {
3023 result
3024 .get(&suggestion.basis_row)
3025 .copied()
3026 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3027 .with_delta(suggestion.delta, single_indent_size)
3028 } else {
3029 self.indent_size_for_line(row)
3030 };
3031
3032 result.insert(row, indent_size);
3033 }
3034 }
3035
3036 result
3037 }
3038
3039 fn suggest_autoindents(
3040 &self,
3041 row_range: Range<u32>,
3042 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3043 let config = &self.language.as_ref()?.config;
3044 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3045
3046 #[derive(Debug, Clone)]
3047 struct StartPosition {
3048 start: Point,
3049 suffix: SharedString,
3050 }
3051
3052 // Find the suggested indentation ranges based on the syntax tree.
3053 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3054 let end = Point::new(row_range.end, 0);
3055 let range = (start..end).to_offset(&self.text);
3056 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3057 Some(&grammar.indents_config.as_ref()?.query)
3058 });
3059 let indent_configs = matches
3060 .grammars()
3061 .iter()
3062 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3063 .collect::<Vec<_>>();
3064
3065 let mut indent_ranges = Vec::<Range<Point>>::new();
3066 let mut start_positions = Vec::<StartPosition>::new();
3067 let mut outdent_positions = Vec::<Point>::new();
3068 while let Some(mat) = matches.peek() {
3069 let mut start: Option<Point> = None;
3070 let mut end: Option<Point> = None;
3071
3072 let config = indent_configs[mat.grammar_index];
3073 for capture in mat.captures {
3074 if capture.index == config.indent_capture_ix {
3075 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3076 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3077 } else if Some(capture.index) == config.start_capture_ix {
3078 start = Some(Point::from_ts_point(capture.node.end_position()));
3079 } else if Some(capture.index) == config.end_capture_ix {
3080 end = Some(Point::from_ts_point(capture.node.start_position()));
3081 } else if Some(capture.index) == config.outdent_capture_ix {
3082 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3083 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3084 start_positions.push(StartPosition {
3085 start: Point::from_ts_point(capture.node.start_position()),
3086 suffix: suffix.clone(),
3087 });
3088 }
3089 }
3090
3091 matches.advance();
3092 if let Some((start, end)) = start.zip(end) {
3093 if start.row == end.row {
3094 continue;
3095 }
3096 let range = start..end;
3097 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3098 Err(ix) => indent_ranges.insert(ix, range),
3099 Ok(ix) => {
3100 let prev_range = &mut indent_ranges[ix];
3101 prev_range.end = prev_range.end.max(range.end);
3102 }
3103 }
3104 }
3105 }
3106
3107 let mut error_ranges = Vec::<Range<Point>>::new();
3108 let mut matches = self
3109 .syntax
3110 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3111 while let Some(mat) = matches.peek() {
3112 let node = mat.captures[0].node;
3113 let start = Point::from_ts_point(node.start_position());
3114 let end = Point::from_ts_point(node.end_position());
3115 let range = start..end;
3116 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3117 Ok(ix) | Err(ix) => ix,
3118 };
3119 let mut end_ix = ix;
3120 while let Some(existing_range) = error_ranges.get(end_ix) {
3121 if existing_range.end < end {
3122 end_ix += 1;
3123 } else {
3124 break;
3125 }
3126 }
3127 error_ranges.splice(ix..end_ix, [range]);
3128 matches.advance();
3129 }
3130
3131 outdent_positions.sort();
3132 for outdent_position in outdent_positions {
3133 // find the innermost indent range containing this outdent_position
3134 // set its end to the outdent position
3135 if let Some(range_to_truncate) = indent_ranges
3136 .iter_mut()
3137 .filter(|indent_range| indent_range.contains(&outdent_position))
3138 .next_back()
3139 {
3140 range_to_truncate.end = outdent_position;
3141 }
3142 }
3143
3144 start_positions.sort_by_key(|b| b.start);
3145
3146 // Find the suggested indentation increases and decreased based on regexes.
3147 let mut regex_outdent_map = HashMap::default();
3148 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3149 let mut start_positions_iter = start_positions.iter().peekable();
3150
3151 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3152 self.for_each_line(
3153 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3154 ..Point::new(row_range.end, 0),
3155 |row, line| {
3156 if config
3157 .decrease_indent_pattern
3158 .as_ref()
3159 .is_some_and(|regex| regex.is_match(line))
3160 {
3161 indent_change_rows.push((row, Ordering::Less));
3162 }
3163 if config
3164 .increase_indent_pattern
3165 .as_ref()
3166 .is_some_and(|regex| regex.is_match(line))
3167 {
3168 indent_change_rows.push((row + 1, Ordering::Greater));
3169 }
3170 while let Some(pos) = start_positions_iter.peek() {
3171 if pos.start.row < row {
3172 let pos = start_positions_iter.next().unwrap();
3173 last_seen_suffix
3174 .entry(pos.suffix.to_string())
3175 .or_default()
3176 .push(pos.start);
3177 } else {
3178 break;
3179 }
3180 }
3181 for rule in &config.decrease_indent_patterns {
3182 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3183 let row_start_column = self.indent_size_for_line(row).len;
3184 let basis_row = rule
3185 .valid_after
3186 .iter()
3187 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3188 .flatten()
3189 .filter(|start_point| start_point.column <= row_start_column)
3190 .max_by_key(|start_point| start_point.row);
3191 if let Some(outdent_to_row) = basis_row {
3192 regex_outdent_map.insert(row, outdent_to_row.row);
3193 }
3194 break;
3195 }
3196 }
3197 },
3198 );
3199
3200 let mut indent_changes = indent_change_rows.into_iter().peekable();
3201 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3202 prev_non_blank_row.unwrap_or(0)
3203 } else {
3204 row_range.start.saturating_sub(1)
3205 };
3206
3207 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3208 Some(row_range.map(move |row| {
3209 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3210
3211 let mut indent_from_prev_row = false;
3212 let mut outdent_from_prev_row = false;
3213 let mut outdent_to_row = u32::MAX;
3214 let mut from_regex = false;
3215
3216 while let Some((indent_row, delta)) = indent_changes.peek() {
3217 match indent_row.cmp(&row) {
3218 Ordering::Equal => match delta {
3219 Ordering::Less => {
3220 from_regex = true;
3221 outdent_from_prev_row = true
3222 }
3223 Ordering::Greater => {
3224 indent_from_prev_row = true;
3225 from_regex = true
3226 }
3227 _ => {}
3228 },
3229
3230 Ordering::Greater => break,
3231 Ordering::Less => {}
3232 }
3233
3234 indent_changes.next();
3235 }
3236
3237 for range in &indent_ranges {
3238 if range.start.row >= row {
3239 break;
3240 }
3241 if range.start.row == prev_row && range.end > row_start {
3242 indent_from_prev_row = true;
3243 }
3244 if range.end > prev_row_start && range.end <= row_start {
3245 outdent_to_row = outdent_to_row.min(range.start.row);
3246 }
3247 }
3248
3249 if let Some(basis_row) = regex_outdent_map.get(&row) {
3250 indent_from_prev_row = false;
3251 outdent_to_row = *basis_row;
3252 from_regex = true;
3253 }
3254
3255 let within_error = error_ranges
3256 .iter()
3257 .any(|e| e.start.row < row && e.end > row_start);
3258
3259 let suggestion = if outdent_to_row == prev_row
3260 || (outdent_from_prev_row && indent_from_prev_row)
3261 {
3262 Some(IndentSuggestion {
3263 basis_row: prev_row,
3264 delta: Ordering::Equal,
3265 within_error: within_error && !from_regex,
3266 })
3267 } else if indent_from_prev_row {
3268 Some(IndentSuggestion {
3269 basis_row: prev_row,
3270 delta: Ordering::Greater,
3271 within_error: within_error && !from_regex,
3272 })
3273 } else if outdent_to_row < prev_row {
3274 Some(IndentSuggestion {
3275 basis_row: outdent_to_row,
3276 delta: Ordering::Equal,
3277 within_error: within_error && !from_regex,
3278 })
3279 } else if outdent_from_prev_row {
3280 Some(IndentSuggestion {
3281 basis_row: prev_row,
3282 delta: Ordering::Less,
3283 within_error: within_error && !from_regex,
3284 })
3285 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3286 {
3287 Some(IndentSuggestion {
3288 basis_row: prev_row,
3289 delta: Ordering::Equal,
3290 within_error: within_error && !from_regex,
3291 })
3292 } else {
3293 None
3294 };
3295
3296 prev_row = row;
3297 prev_row_start = row_start;
3298 suggestion
3299 }))
3300 }
3301
3302 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3303 while row > 0 {
3304 row -= 1;
3305 if !self.is_line_blank(row) {
3306 return Some(row);
3307 }
3308 }
3309 None
3310 }
3311
3312 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3313 let captures = self.syntax.captures(range, &self.text, |grammar| {
3314 grammar
3315 .highlights_config
3316 .as_ref()
3317 .map(|config| &config.query)
3318 });
3319 let highlight_maps = captures
3320 .grammars()
3321 .iter()
3322 .map(|grammar| grammar.highlight_map())
3323 .collect();
3324 (captures, highlight_maps)
3325 }
3326
3327 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3328 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3329 /// returned in chunks where each chunk has a single syntax highlighting style and
3330 /// diagnostic status.
3331 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3332 let range = range.start.to_offset(self)..range.end.to_offset(self);
3333
3334 let mut syntax = None;
3335 if language_aware {
3336 syntax = Some(self.get_highlights(range.clone()));
3337 }
3338 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3339 let diagnostics = language_aware;
3340 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3341 }
3342
3343 pub fn highlighted_text_for_range<T: ToOffset>(
3344 &self,
3345 range: Range<T>,
3346 override_style: Option<HighlightStyle>,
3347 syntax_theme: &SyntaxTheme,
3348 ) -> HighlightedText {
3349 HighlightedText::from_buffer_range(
3350 range,
3351 &self.text,
3352 &self.syntax,
3353 override_style,
3354 syntax_theme,
3355 )
3356 }
3357
3358 /// Invokes the given callback for each line of text in the given range of the buffer.
3359 /// Uses callback to avoid allocating a string for each line.
3360 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3361 let mut line = String::new();
3362 let mut row = range.start.row;
3363 for chunk in self
3364 .as_rope()
3365 .chunks_in_range(range.to_offset(self))
3366 .chain(["\n"])
3367 {
3368 for (newline_ix, text) in chunk.split('\n').enumerate() {
3369 if newline_ix > 0 {
3370 callback(row, &line);
3371 row += 1;
3372 line.clear();
3373 }
3374 line.push_str(text);
3375 }
3376 }
3377 }
3378
3379 /// Iterates over every [`SyntaxLayer`] in the buffer.
3380 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3381 self.syntax_layers_for_range(0..self.len(), true)
3382 }
3383
3384 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3385 let offset = position.to_offset(self);
3386 self.syntax_layers_for_range(offset..offset, false)
3387 .filter(|l| {
3388 if let Some(ranges) = l.included_sub_ranges {
3389 ranges.iter().any(|range| {
3390 let start = range.start.to_offset(self);
3391 start <= offset && {
3392 let end = range.end.to_offset(self);
3393 offset < end
3394 }
3395 })
3396 } else {
3397 l.node().start_byte() <= offset && l.node().end_byte() > offset
3398 }
3399 })
3400 .last()
3401 }
3402
3403 pub fn syntax_layers_for_range<D: ToOffset>(
3404 &self,
3405 range: Range<D>,
3406 include_hidden: bool,
3407 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3408 self.syntax
3409 .layers_for_range(range, &self.text, include_hidden)
3410 }
3411
3412 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3413 &self,
3414 range: Range<D>,
3415 ) -> Option<SyntaxLayer<'_>> {
3416 let range = range.to_offset(self);
3417 self.syntax
3418 .layers_for_range(range, &self.text, false)
3419 .max_by(|a, b| {
3420 if a.depth != b.depth {
3421 a.depth.cmp(&b.depth)
3422 } else if a.offset.0 != b.offset.0 {
3423 a.offset.0.cmp(&b.offset.0)
3424 } else {
3425 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3426 }
3427 })
3428 }
3429
3430 /// Returns the main [`Language`].
3431 pub fn language(&self) -> Option<&Arc<Language>> {
3432 self.language.as_ref()
3433 }
3434
3435 /// Returns the [`Language`] at the given location.
3436 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3437 self.syntax_layer_at(position)
3438 .map(|info| info.language)
3439 .or(self.language.as_ref())
3440 }
3441
3442 /// Returns the settings for the language at the given location.
3443 pub fn settings_at<'a, D: ToOffset>(
3444 &'a self,
3445 position: D,
3446 cx: &'a App,
3447 ) -> Cow<'a, LanguageSettings> {
3448 language_settings(
3449 self.language_at(position).map(|l| l.name()),
3450 self.file.as_ref(),
3451 cx,
3452 )
3453 }
3454
3455 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3456 CharClassifier::new(self.language_scope_at(point))
3457 }
3458
3459 /// Returns the [`LanguageScope`] at the given location.
3460 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3461 let offset = position.to_offset(self);
3462 let mut scope = None;
3463 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3464
3465 // Use the layer that has the smallest node intersecting the given point.
3466 for layer in self
3467 .syntax
3468 .layers_for_range(offset..offset, &self.text, false)
3469 {
3470 let mut cursor = layer.node().walk();
3471
3472 let mut range = None;
3473 loop {
3474 let child_range = cursor.node().byte_range();
3475 if !child_range.contains(&offset) {
3476 break;
3477 }
3478
3479 range = Some(child_range);
3480 if cursor.goto_first_child_for_byte(offset).is_none() {
3481 break;
3482 }
3483 }
3484
3485 if let Some(range) = range
3486 && smallest_range_and_depth.as_ref().is_none_or(
3487 |(smallest_range, smallest_range_depth)| {
3488 if layer.depth > *smallest_range_depth {
3489 true
3490 } else if layer.depth == *smallest_range_depth {
3491 range.len() < smallest_range.len()
3492 } else {
3493 false
3494 }
3495 },
3496 )
3497 {
3498 smallest_range_and_depth = Some((range, layer.depth));
3499 scope = Some(LanguageScope {
3500 language: layer.language.clone(),
3501 override_id: layer.override_id(offset, &self.text),
3502 });
3503 }
3504 }
3505
3506 scope.or_else(|| {
3507 self.language.clone().map(|language| LanguageScope {
3508 language,
3509 override_id: None,
3510 })
3511 })
3512 }
3513
3514 /// Returns a tuple of the range and character kind of the word
3515 /// surrounding the given position.
3516 pub fn surrounding_word<T: ToOffset>(
3517 &self,
3518 start: T,
3519 scope_context: Option<CharScopeContext>,
3520 ) -> (Range<usize>, Option<CharKind>) {
3521 let mut start = start.to_offset(self);
3522 let mut end = start;
3523 let mut next_chars = self.chars_at(start).take(128).peekable();
3524 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3525
3526 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3527 let word_kind = cmp::max(
3528 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3529 next_chars.peek().copied().map(|c| classifier.kind(c)),
3530 );
3531
3532 for ch in prev_chars {
3533 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3534 start -= ch.len_utf8();
3535 } else {
3536 break;
3537 }
3538 }
3539
3540 for ch in next_chars {
3541 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3542 end += ch.len_utf8();
3543 } else {
3544 break;
3545 }
3546 }
3547
3548 (start..end, word_kind)
3549 }
3550
3551 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3552 /// range. When `require_larger` is true, the node found must be larger than the query range.
3553 ///
3554 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3555 /// be moved to the root of the tree.
3556 fn goto_node_enclosing_range(
3557 cursor: &mut tree_sitter::TreeCursor,
3558 query_range: &Range<usize>,
3559 require_larger: bool,
3560 ) -> bool {
3561 let mut ascending = false;
3562 loop {
3563 let mut range = cursor.node().byte_range();
3564 if query_range.is_empty() {
3565 // When the query range is empty and the current node starts after it, move to the
3566 // previous sibling to find the node the containing node.
3567 if range.start > query_range.start {
3568 cursor.goto_previous_sibling();
3569 range = cursor.node().byte_range();
3570 }
3571 } else {
3572 // When the query range is non-empty and the current node ends exactly at the start,
3573 // move to the next sibling to find a node that extends beyond the start.
3574 if range.end == query_range.start {
3575 cursor.goto_next_sibling();
3576 range = cursor.node().byte_range();
3577 }
3578 }
3579
3580 let encloses = range.contains_inclusive(query_range)
3581 && (!require_larger || range.len() > query_range.len());
3582 if !encloses {
3583 ascending = true;
3584 if !cursor.goto_parent() {
3585 return false;
3586 }
3587 continue;
3588 } else if ascending {
3589 return true;
3590 }
3591
3592 // Descend into the current node.
3593 if cursor
3594 .goto_first_child_for_byte(query_range.start)
3595 .is_none()
3596 {
3597 return true;
3598 }
3599 }
3600 }
3601
3602 pub fn syntax_ancestor<'a, T: ToOffset>(
3603 &'a self,
3604 range: Range<T>,
3605 ) -> Option<tree_sitter::Node<'a>> {
3606 let range = range.start.to_offset(self)..range.end.to_offset(self);
3607 let mut result: Option<tree_sitter::Node<'a>> = None;
3608 for layer in self
3609 .syntax
3610 .layers_for_range(range.clone(), &self.text, true)
3611 {
3612 let mut cursor = layer.node().walk();
3613
3614 // Find the node that both contains the range and is larger than it.
3615 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3616 continue;
3617 }
3618
3619 let left_node = cursor.node();
3620 let mut layer_result = left_node;
3621
3622 // For an empty range, try to find another node immediately to the right of the range.
3623 if left_node.end_byte() == range.start {
3624 let mut right_node = None;
3625 while !cursor.goto_next_sibling() {
3626 if !cursor.goto_parent() {
3627 break;
3628 }
3629 }
3630
3631 while cursor.node().start_byte() == range.start {
3632 right_node = Some(cursor.node());
3633 if !cursor.goto_first_child() {
3634 break;
3635 }
3636 }
3637
3638 // If there is a candidate node on both sides of the (empty) range, then
3639 // decide between the two by favoring a named node over an anonymous token.
3640 // If both nodes are the same in that regard, favor the right one.
3641 if let Some(right_node) = right_node
3642 && (right_node.is_named() || !left_node.is_named())
3643 {
3644 layer_result = right_node;
3645 }
3646 }
3647
3648 if let Some(previous_result) = &result
3649 && previous_result.byte_range().len() < layer_result.byte_range().len()
3650 {
3651 continue;
3652 }
3653 result = Some(layer_result);
3654 }
3655
3656 result
3657 }
3658
3659 /// Find the previous sibling syntax node at the given range.
3660 ///
3661 /// This function locates the syntax node that precedes the node containing
3662 /// the given range. It searches hierarchically by:
3663 /// 1. Finding the node that contains the given range
3664 /// 2. Looking for the previous sibling at the same tree level
3665 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3666 ///
3667 /// Returns `None` if there is no previous sibling at any ancestor level.
3668 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3669 &'a self,
3670 range: Range<T>,
3671 ) -> Option<tree_sitter::Node<'a>> {
3672 let range = range.start.to_offset(self)..range.end.to_offset(self);
3673 let mut result: Option<tree_sitter::Node<'a>> = None;
3674
3675 for layer in self
3676 .syntax
3677 .layers_for_range(range.clone(), &self.text, true)
3678 {
3679 let mut cursor = layer.node().walk();
3680
3681 // Find the node that contains the range
3682 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3683 continue;
3684 }
3685
3686 // Look for the previous sibling, moving up ancestor levels if needed
3687 loop {
3688 if cursor.goto_previous_sibling() {
3689 let layer_result = cursor.node();
3690
3691 if let Some(previous_result) = &result {
3692 if previous_result.byte_range().end < layer_result.byte_range().end {
3693 continue;
3694 }
3695 }
3696 result = Some(layer_result);
3697 break;
3698 }
3699
3700 // No sibling found at this level, try moving up to parent
3701 if !cursor.goto_parent() {
3702 break;
3703 }
3704 }
3705 }
3706
3707 result
3708 }
3709
3710 /// Find the next sibling syntax node at the given range.
3711 ///
3712 /// This function locates the syntax node that follows the node containing
3713 /// the given range. It searches hierarchically by:
3714 /// 1. Finding the node that contains the given range
3715 /// 2. Looking for the next sibling at the same tree level
3716 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3717 ///
3718 /// Returns `None` if there is no next sibling at any ancestor level.
3719 pub fn syntax_next_sibling<'a, T: ToOffset>(
3720 &'a self,
3721 range: Range<T>,
3722 ) -> Option<tree_sitter::Node<'a>> {
3723 let range = range.start.to_offset(self)..range.end.to_offset(self);
3724 let mut result: Option<tree_sitter::Node<'a>> = None;
3725
3726 for layer in self
3727 .syntax
3728 .layers_for_range(range.clone(), &self.text, true)
3729 {
3730 let mut cursor = layer.node().walk();
3731
3732 // Find the node that contains the range
3733 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3734 continue;
3735 }
3736
3737 // Look for the next sibling, moving up ancestor levels if needed
3738 loop {
3739 if cursor.goto_next_sibling() {
3740 let layer_result = cursor.node();
3741
3742 if let Some(previous_result) = &result {
3743 if previous_result.byte_range().start > layer_result.byte_range().start {
3744 continue;
3745 }
3746 }
3747 result = Some(layer_result);
3748 break;
3749 }
3750
3751 // No sibling found at this level, try moving up to parent
3752 if !cursor.goto_parent() {
3753 break;
3754 }
3755 }
3756 }
3757
3758 result
3759 }
3760
3761 /// Returns the root syntax node within the given row
3762 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3763 let start_offset = position.to_offset(self);
3764
3765 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3766
3767 let layer = self
3768 .syntax
3769 .layers_for_range(start_offset..start_offset, &self.text, true)
3770 .next()?;
3771
3772 let mut cursor = layer.node().walk();
3773
3774 // Descend to the first leaf that touches the start of the range.
3775 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3776 if cursor.node().end_byte() == start_offset {
3777 cursor.goto_next_sibling();
3778 }
3779 }
3780
3781 // Ascend to the root node within the same row.
3782 while cursor.goto_parent() {
3783 if cursor.node().start_position().row != row {
3784 break;
3785 }
3786 }
3787
3788 Some(cursor.node())
3789 }
3790
3791 /// Returns the outline for the buffer.
3792 ///
3793 /// This method allows passing an optional [`SyntaxTheme`] to
3794 /// syntax-highlight the returned symbols.
3795 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3796 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3797 }
3798
3799 /// Returns all the symbols that contain the given position.
3800 ///
3801 /// This method allows passing an optional [`SyntaxTheme`] to
3802 /// syntax-highlight the returned symbols.
3803 pub fn symbols_containing<T: ToOffset>(
3804 &self,
3805 position: T,
3806 theme: Option<&SyntaxTheme>,
3807 ) -> Vec<OutlineItem<Anchor>> {
3808 let position = position.to_offset(self);
3809 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3810 let end = self.clip_offset(position + 1, Bias::Right);
3811 let mut items = self.outline_items_containing(start..end, false, theme);
3812 let mut prev_depth = None;
3813 items.retain(|item| {
3814 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3815 prev_depth = Some(item.depth);
3816 result
3817 });
3818 items
3819 }
3820
3821 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3822 let range = range.to_offset(self);
3823 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3824 grammar.outline_config.as_ref().map(|c| &c.query)
3825 });
3826 let configs = matches
3827 .grammars()
3828 .iter()
3829 .map(|g| g.outline_config.as_ref().unwrap())
3830 .collect::<Vec<_>>();
3831
3832 while let Some(mat) = matches.peek() {
3833 let config = &configs[mat.grammar_index];
3834 let containing_item_node = maybe!({
3835 let item_node = mat.captures.iter().find_map(|cap| {
3836 if cap.index == config.item_capture_ix {
3837 Some(cap.node)
3838 } else {
3839 None
3840 }
3841 })?;
3842
3843 let item_byte_range = item_node.byte_range();
3844 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3845 None
3846 } else {
3847 Some(item_node)
3848 }
3849 });
3850
3851 if let Some(item_node) = containing_item_node {
3852 return Some(
3853 Point::from_ts_point(item_node.start_position())
3854 ..Point::from_ts_point(item_node.end_position()),
3855 );
3856 }
3857
3858 matches.advance();
3859 }
3860 None
3861 }
3862
3863 pub fn outline_items_containing<T: ToOffset>(
3864 &self,
3865 range: Range<T>,
3866 include_extra_context: bool,
3867 theme: Option<&SyntaxTheme>,
3868 ) -> Vec<OutlineItem<Anchor>> {
3869 self.outline_items_containing_internal(
3870 range,
3871 include_extra_context,
3872 theme,
3873 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3874 )
3875 }
3876
3877 pub fn outline_items_as_points_containing<T: ToOffset>(
3878 &self,
3879 range: Range<T>,
3880 include_extra_context: bool,
3881 theme: Option<&SyntaxTheme>,
3882 ) -> Vec<OutlineItem<Point>> {
3883 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3884 range
3885 })
3886 }
3887
3888 fn outline_items_containing_internal<T: ToOffset, U>(
3889 &self,
3890 range: Range<T>,
3891 include_extra_context: bool,
3892 theme: Option<&SyntaxTheme>,
3893 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3894 ) -> Vec<OutlineItem<U>> {
3895 let range = range.to_offset(self);
3896 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3897 grammar.outline_config.as_ref().map(|c| &c.query)
3898 });
3899
3900 let mut items = Vec::new();
3901 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3902 while let Some(mat) = matches.peek() {
3903 let config = matches.grammars()[mat.grammar_index]
3904 .outline_config
3905 .as_ref()
3906 .unwrap();
3907 if let Some(item) =
3908 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3909 {
3910 items.push(item);
3911 } else if let Some(capture) = mat
3912 .captures
3913 .iter()
3914 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3915 {
3916 let capture_range = capture.node.start_position()..capture.node.end_position();
3917 let mut capture_row_range =
3918 capture_range.start.row as u32..capture_range.end.row as u32;
3919 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3920 {
3921 capture_row_range.end -= 1;
3922 }
3923 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3924 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3925 last_row_range.end = capture_row_range.end;
3926 } else {
3927 annotation_row_ranges.push(capture_row_range);
3928 }
3929 } else {
3930 annotation_row_ranges.push(capture_row_range);
3931 }
3932 }
3933 matches.advance();
3934 }
3935
3936 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3937
3938 // Assign depths based on containment relationships and convert to anchors.
3939 let mut item_ends_stack = Vec::<Point>::new();
3940 let mut anchor_items = Vec::new();
3941 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3942 for item in items {
3943 while let Some(last_end) = item_ends_stack.last().copied() {
3944 if last_end < item.range.end {
3945 item_ends_stack.pop();
3946 } else {
3947 break;
3948 }
3949 }
3950
3951 let mut annotation_row_range = None;
3952 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3953 let row_preceding_item = item.range.start.row.saturating_sub(1);
3954 if next_annotation_row_range.end < row_preceding_item {
3955 annotation_row_ranges.next();
3956 } else {
3957 if next_annotation_row_range.end == row_preceding_item {
3958 annotation_row_range = Some(next_annotation_row_range.clone());
3959 annotation_row_ranges.next();
3960 }
3961 break;
3962 }
3963 }
3964
3965 anchor_items.push(OutlineItem {
3966 depth: item_ends_stack.len(),
3967 range: range_callback(self, item.range.clone()),
3968 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3969 text: item.text,
3970 highlight_ranges: item.highlight_ranges,
3971 name_ranges: item.name_ranges,
3972 body_range: item.body_range.map(|r| range_callback(self, r)),
3973 annotation_range: annotation_row_range.map(|annotation_range| {
3974 let point_range = Point::new(annotation_range.start, 0)
3975 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3976 range_callback(self, point_range)
3977 }),
3978 });
3979 item_ends_stack.push(item.range.end);
3980 }
3981
3982 anchor_items
3983 }
3984
3985 fn next_outline_item(
3986 &self,
3987 config: &OutlineConfig,
3988 mat: &SyntaxMapMatch,
3989 range: &Range<usize>,
3990 include_extra_context: bool,
3991 theme: Option<&SyntaxTheme>,
3992 ) -> Option<OutlineItem<Point>> {
3993 let item_node = mat.captures.iter().find_map(|cap| {
3994 if cap.index == config.item_capture_ix {
3995 Some(cap.node)
3996 } else {
3997 None
3998 }
3999 })?;
4000
4001 let item_byte_range = item_node.byte_range();
4002 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4003 return None;
4004 }
4005 let item_point_range = Point::from_ts_point(item_node.start_position())
4006 ..Point::from_ts_point(item_node.end_position());
4007
4008 let mut open_point = None;
4009 let mut close_point = None;
4010
4011 let mut buffer_ranges = Vec::new();
4012 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4013 let mut range = node.start_byte()..node.end_byte();
4014 let start = node.start_position();
4015 if node.end_position().row > start.row {
4016 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4017 }
4018
4019 if !range.is_empty() {
4020 buffer_ranges.push((range, node_is_name));
4021 }
4022 };
4023
4024 for capture in mat.captures {
4025 if capture.index == config.name_capture_ix {
4026 add_to_buffer_ranges(capture.node, true);
4027 } else if Some(capture.index) == config.context_capture_ix
4028 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4029 {
4030 add_to_buffer_ranges(capture.node, false);
4031 } else {
4032 if Some(capture.index) == config.open_capture_ix {
4033 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4034 } else if Some(capture.index) == config.close_capture_ix {
4035 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4036 }
4037 }
4038 }
4039
4040 if buffer_ranges.is_empty() {
4041 return None;
4042 }
4043 let source_range_for_text =
4044 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4045
4046 let mut text = String::new();
4047 let mut highlight_ranges = Vec::new();
4048 let mut name_ranges = Vec::new();
4049 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4050 let mut last_buffer_range_end = 0;
4051 for (buffer_range, is_name) in buffer_ranges {
4052 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4053 if space_added {
4054 text.push(' ');
4055 }
4056 let before_append_len = text.len();
4057 let mut offset = buffer_range.start;
4058 chunks.seek(buffer_range.clone());
4059 for mut chunk in chunks.by_ref() {
4060 if chunk.text.len() > buffer_range.end - offset {
4061 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4062 offset = buffer_range.end;
4063 } else {
4064 offset += chunk.text.len();
4065 }
4066 let style = chunk
4067 .syntax_highlight_id
4068 .zip(theme)
4069 .and_then(|(highlight, theme)| highlight.style(theme));
4070 if let Some(style) = style {
4071 let start = text.len();
4072 let end = start + chunk.text.len();
4073 highlight_ranges.push((start..end, style));
4074 }
4075 text.push_str(chunk.text);
4076 if offset >= buffer_range.end {
4077 break;
4078 }
4079 }
4080 if is_name {
4081 let after_append_len = text.len();
4082 let start = if space_added && !name_ranges.is_empty() {
4083 before_append_len - 1
4084 } else {
4085 before_append_len
4086 };
4087 name_ranges.push(start..after_append_len);
4088 }
4089 last_buffer_range_end = buffer_range.end;
4090 }
4091
4092 Some(OutlineItem {
4093 depth: 0, // We'll calculate the depth later
4094 range: item_point_range,
4095 source_range_for_text: source_range_for_text.to_point(self),
4096 text,
4097 highlight_ranges,
4098 name_ranges,
4099 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4100 annotation_range: None,
4101 })
4102 }
4103
4104 pub fn function_body_fold_ranges<T: ToOffset>(
4105 &self,
4106 within: Range<T>,
4107 ) -> impl Iterator<Item = Range<usize>> + '_ {
4108 self.text_object_ranges(within, TreeSitterOptions::default())
4109 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4110 }
4111
4112 /// For each grammar in the language, runs the provided
4113 /// [`tree_sitter::Query`] against the given range.
4114 pub fn matches(
4115 &self,
4116 range: Range<usize>,
4117 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4118 ) -> SyntaxMapMatches<'_> {
4119 self.syntax.matches(range, self, query)
4120 }
4121
4122 pub fn all_bracket_ranges(
4123 &self,
4124 range: Range<usize>,
4125 ) -> impl Iterator<Item = BracketMatch> + '_ {
4126 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4127 grammar.brackets_config.as_ref().map(|c| &c.query)
4128 });
4129 let configs = matches
4130 .grammars()
4131 .iter()
4132 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4133 .collect::<Vec<_>>();
4134
4135 iter::from_fn(move || {
4136 while let Some(mat) = matches.peek() {
4137 let mut open = None;
4138 let mut close = None;
4139 let config = &configs[mat.grammar_index];
4140 let pattern = &config.patterns[mat.pattern_index];
4141 for capture in mat.captures {
4142 if capture.index == config.open_capture_ix {
4143 open = Some(capture.node.byte_range());
4144 } else if capture.index == config.close_capture_ix {
4145 close = Some(capture.node.byte_range());
4146 }
4147 }
4148
4149 matches.advance();
4150
4151 let Some((open_range, close_range)) = open.zip(close) else {
4152 continue;
4153 };
4154
4155 let bracket_range = open_range.start..=close_range.end;
4156 if !bracket_range.overlaps(&range) {
4157 continue;
4158 }
4159
4160 return Some(BracketMatch {
4161 open_range,
4162 close_range,
4163 newline_only: pattern.newline_only,
4164 });
4165 }
4166 None
4167 })
4168 }
4169
4170 /// Returns bracket range pairs overlapping or adjacent to `range`
4171 pub fn bracket_ranges<T: ToOffset>(
4172 &self,
4173 range: Range<T>,
4174 ) -> impl Iterator<Item = BracketMatch> + '_ {
4175 // Find bracket pairs that *inclusively* contain the given range.
4176 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4177 self.all_bracket_ranges(range)
4178 .filter(|pair| !pair.newline_only)
4179 }
4180
4181 pub fn debug_variables_query<T: ToOffset>(
4182 &self,
4183 range: Range<T>,
4184 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4185 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4186
4187 let mut matches = self.syntax.matches_with_options(
4188 range.clone(),
4189 &self.text,
4190 TreeSitterOptions::default(),
4191 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4192 );
4193
4194 let configs = matches
4195 .grammars()
4196 .iter()
4197 .map(|grammar| grammar.debug_variables_config.as_ref())
4198 .collect::<Vec<_>>();
4199
4200 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4201
4202 iter::from_fn(move || {
4203 loop {
4204 while let Some(capture) = captures.pop() {
4205 if capture.0.overlaps(&range) {
4206 return Some(capture);
4207 }
4208 }
4209
4210 let mat = matches.peek()?;
4211
4212 let Some(config) = configs[mat.grammar_index].as_ref() else {
4213 matches.advance();
4214 continue;
4215 };
4216
4217 for capture in mat.captures {
4218 let Some(ix) = config
4219 .objects_by_capture_ix
4220 .binary_search_by_key(&capture.index, |e| e.0)
4221 .ok()
4222 else {
4223 continue;
4224 };
4225 let text_object = config.objects_by_capture_ix[ix].1;
4226 let byte_range = capture.node.byte_range();
4227
4228 let mut found = false;
4229 for (range, existing) in captures.iter_mut() {
4230 if existing == &text_object {
4231 range.start = range.start.min(byte_range.start);
4232 range.end = range.end.max(byte_range.end);
4233 found = true;
4234 break;
4235 }
4236 }
4237
4238 if !found {
4239 captures.push((byte_range, text_object));
4240 }
4241 }
4242
4243 matches.advance();
4244 }
4245 })
4246 }
4247
4248 pub fn text_object_ranges<T: ToOffset>(
4249 &self,
4250 range: Range<T>,
4251 options: TreeSitterOptions,
4252 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4253 let range =
4254 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4255
4256 let mut matches =
4257 self.syntax
4258 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4259 grammar.text_object_config.as_ref().map(|c| &c.query)
4260 });
4261
4262 let configs = matches
4263 .grammars()
4264 .iter()
4265 .map(|grammar| grammar.text_object_config.as_ref())
4266 .collect::<Vec<_>>();
4267
4268 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4269
4270 iter::from_fn(move || {
4271 loop {
4272 while let Some(capture) = captures.pop() {
4273 if capture.0.overlaps(&range) {
4274 return Some(capture);
4275 }
4276 }
4277
4278 let mat = matches.peek()?;
4279
4280 let Some(config) = configs[mat.grammar_index].as_ref() else {
4281 matches.advance();
4282 continue;
4283 };
4284
4285 for capture in mat.captures {
4286 let Some(ix) = config
4287 .text_objects_by_capture_ix
4288 .binary_search_by_key(&capture.index, |e| e.0)
4289 .ok()
4290 else {
4291 continue;
4292 };
4293 let text_object = config.text_objects_by_capture_ix[ix].1;
4294 let byte_range = capture.node.byte_range();
4295
4296 let mut found = false;
4297 for (range, existing) in captures.iter_mut() {
4298 if existing == &text_object {
4299 range.start = range.start.min(byte_range.start);
4300 range.end = range.end.max(byte_range.end);
4301 found = true;
4302 break;
4303 }
4304 }
4305
4306 if !found {
4307 captures.push((byte_range, text_object));
4308 }
4309 }
4310
4311 matches.advance();
4312 }
4313 })
4314 }
4315
4316 /// Returns enclosing bracket ranges containing the given range
4317 pub fn enclosing_bracket_ranges<T: ToOffset>(
4318 &self,
4319 range: Range<T>,
4320 ) -> impl Iterator<Item = BracketMatch> + '_ {
4321 let range = range.start.to_offset(self)..range.end.to_offset(self);
4322
4323 self.bracket_ranges(range.clone()).filter(move |pair| {
4324 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4325 })
4326 }
4327
4328 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4329 ///
4330 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4331 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4332 &self,
4333 range: Range<T>,
4334 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4335 ) -> Option<(Range<usize>, Range<usize>)> {
4336 let range = range.start.to_offset(self)..range.end.to_offset(self);
4337
4338 // Get the ranges of the innermost pair of brackets.
4339 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4340
4341 for pair in self.enclosing_bracket_ranges(range) {
4342 if let Some(range_filter) = range_filter
4343 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4344 {
4345 continue;
4346 }
4347
4348 let len = pair.close_range.end - pair.open_range.start;
4349
4350 if let Some((existing_open, existing_close)) = &result {
4351 let existing_len = existing_close.end - existing_open.start;
4352 if len > existing_len {
4353 continue;
4354 }
4355 }
4356
4357 result = Some((pair.open_range, pair.close_range));
4358 }
4359
4360 result
4361 }
4362
4363 /// Returns anchor ranges for any matches of the redaction query.
4364 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4365 /// will be run on the relevant section of the buffer.
4366 pub fn redacted_ranges<T: ToOffset>(
4367 &self,
4368 range: Range<T>,
4369 ) -> impl Iterator<Item = Range<usize>> + '_ {
4370 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4371 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4372 grammar
4373 .redactions_config
4374 .as_ref()
4375 .map(|config| &config.query)
4376 });
4377
4378 let configs = syntax_matches
4379 .grammars()
4380 .iter()
4381 .map(|grammar| grammar.redactions_config.as_ref())
4382 .collect::<Vec<_>>();
4383
4384 iter::from_fn(move || {
4385 let redacted_range = syntax_matches
4386 .peek()
4387 .and_then(|mat| {
4388 configs[mat.grammar_index].and_then(|config| {
4389 mat.captures
4390 .iter()
4391 .find(|capture| capture.index == config.redaction_capture_ix)
4392 })
4393 })
4394 .map(|mat| mat.node.byte_range());
4395 syntax_matches.advance();
4396 redacted_range
4397 })
4398 }
4399
4400 pub fn injections_intersecting_range<T: ToOffset>(
4401 &self,
4402 range: Range<T>,
4403 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4404 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4405
4406 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4407 grammar
4408 .injection_config
4409 .as_ref()
4410 .map(|config| &config.query)
4411 });
4412
4413 let configs = syntax_matches
4414 .grammars()
4415 .iter()
4416 .map(|grammar| grammar.injection_config.as_ref())
4417 .collect::<Vec<_>>();
4418
4419 iter::from_fn(move || {
4420 let ranges = syntax_matches.peek().and_then(|mat| {
4421 let config = &configs[mat.grammar_index]?;
4422 let content_capture_range = mat.captures.iter().find_map(|capture| {
4423 if capture.index == config.content_capture_ix {
4424 Some(capture.node.byte_range())
4425 } else {
4426 None
4427 }
4428 })?;
4429 let language = self.language_at(content_capture_range.start)?;
4430 Some((content_capture_range, language))
4431 });
4432 syntax_matches.advance();
4433 ranges
4434 })
4435 }
4436
4437 pub fn runnable_ranges(
4438 &self,
4439 offset_range: Range<usize>,
4440 ) -> impl Iterator<Item = RunnableRange> + '_ {
4441 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4442 grammar.runnable_config.as_ref().map(|config| &config.query)
4443 });
4444
4445 let test_configs = syntax_matches
4446 .grammars()
4447 .iter()
4448 .map(|grammar| grammar.runnable_config.as_ref())
4449 .collect::<Vec<_>>();
4450
4451 iter::from_fn(move || {
4452 loop {
4453 let mat = syntax_matches.peek()?;
4454
4455 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4456 let mut run_range = None;
4457 let full_range = mat.captures.iter().fold(
4458 Range {
4459 start: usize::MAX,
4460 end: 0,
4461 },
4462 |mut acc, next| {
4463 let byte_range = next.node.byte_range();
4464 if acc.start > byte_range.start {
4465 acc.start = byte_range.start;
4466 }
4467 if acc.end < byte_range.end {
4468 acc.end = byte_range.end;
4469 }
4470 acc
4471 },
4472 );
4473 if full_range.start > full_range.end {
4474 // We did not find a full spanning range of this match.
4475 return None;
4476 }
4477 let extra_captures: SmallVec<[_; 1]> =
4478 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4479 test_configs
4480 .extra_captures
4481 .get(capture.index as usize)
4482 .cloned()
4483 .and_then(|tag_name| match tag_name {
4484 RunnableCapture::Named(name) => {
4485 Some((capture.node.byte_range(), name))
4486 }
4487 RunnableCapture::Run => {
4488 let _ = run_range.insert(capture.node.byte_range());
4489 None
4490 }
4491 })
4492 }));
4493 let run_range = run_range?;
4494 let tags = test_configs
4495 .query
4496 .property_settings(mat.pattern_index)
4497 .iter()
4498 .filter_map(|property| {
4499 if *property.key == *"tag" {
4500 property
4501 .value
4502 .as_ref()
4503 .map(|value| RunnableTag(value.to_string().into()))
4504 } else {
4505 None
4506 }
4507 })
4508 .collect();
4509 let extra_captures = extra_captures
4510 .into_iter()
4511 .map(|(range, name)| {
4512 (
4513 name.to_string(),
4514 self.text_for_range(range).collect::<String>(),
4515 )
4516 })
4517 .collect();
4518 // All tags should have the same range.
4519 Some(RunnableRange {
4520 run_range,
4521 full_range,
4522 runnable: Runnable {
4523 tags,
4524 language: mat.language,
4525 buffer: self.remote_id(),
4526 },
4527 extra_captures,
4528 buffer_id: self.remote_id(),
4529 })
4530 });
4531
4532 syntax_matches.advance();
4533 if test_range.is_some() {
4534 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4535 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4536 return test_range;
4537 }
4538 }
4539 })
4540 }
4541
4542 /// Returns selections for remote peers intersecting the given range.
4543 #[allow(clippy::type_complexity)]
4544 pub fn selections_in_range(
4545 &self,
4546 range: Range<Anchor>,
4547 include_local: bool,
4548 ) -> impl Iterator<
4549 Item = (
4550 ReplicaId,
4551 bool,
4552 CursorShape,
4553 impl Iterator<Item = &Selection<Anchor>> + '_,
4554 ),
4555 > + '_ {
4556 self.remote_selections
4557 .iter()
4558 .filter(move |(replica_id, set)| {
4559 (include_local || **replica_id != self.text.replica_id())
4560 && !set.selections.is_empty()
4561 })
4562 .map(move |(replica_id, set)| {
4563 let start_ix = match set.selections.binary_search_by(|probe| {
4564 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4565 }) {
4566 Ok(ix) | Err(ix) => ix,
4567 };
4568 let end_ix = match set.selections.binary_search_by(|probe| {
4569 probe.start.cmp(&range.end, self).then(Ordering::Less)
4570 }) {
4571 Ok(ix) | Err(ix) => ix,
4572 };
4573
4574 (
4575 *replica_id,
4576 set.line_mode,
4577 set.cursor_shape,
4578 set.selections[start_ix..end_ix].iter(),
4579 )
4580 })
4581 }
4582
4583 /// Returns if the buffer contains any diagnostics.
4584 pub fn has_diagnostics(&self) -> bool {
4585 !self.diagnostics.is_empty()
4586 }
4587
4588 /// Returns all the diagnostics intersecting the given range.
4589 pub fn diagnostics_in_range<'a, T, O>(
4590 &'a self,
4591 search_range: Range<T>,
4592 reversed: bool,
4593 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4594 where
4595 T: 'a + Clone + ToOffset,
4596 O: 'a + FromAnchor,
4597 {
4598 let mut iterators: Vec<_> = self
4599 .diagnostics
4600 .iter()
4601 .map(|(_, collection)| {
4602 collection
4603 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4604 .peekable()
4605 })
4606 .collect();
4607
4608 std::iter::from_fn(move || {
4609 let (next_ix, _) = iterators
4610 .iter_mut()
4611 .enumerate()
4612 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4613 .min_by(|(_, a), (_, b)| {
4614 let cmp = a
4615 .range
4616 .start
4617 .cmp(&b.range.start, self)
4618 // when range is equal, sort by diagnostic severity
4619 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4620 // and stabilize order with group_id
4621 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4622 if reversed { cmp.reverse() } else { cmp }
4623 })?;
4624 iterators[next_ix]
4625 .next()
4626 .map(
4627 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4628 diagnostic,
4629 range: FromAnchor::from_anchor(&range.start, self)
4630 ..FromAnchor::from_anchor(&range.end, self),
4631 },
4632 )
4633 })
4634 }
4635
4636 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4637 /// should be used instead.
4638 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4639 &self.diagnostics
4640 }
4641
4642 /// Returns all the diagnostic groups associated with the given
4643 /// language server ID. If no language server ID is provided,
4644 /// all diagnostics groups are returned.
4645 pub fn diagnostic_groups(
4646 &self,
4647 language_server_id: Option<LanguageServerId>,
4648 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4649 let mut groups = Vec::new();
4650
4651 if let Some(language_server_id) = language_server_id {
4652 if let Ok(ix) = self
4653 .diagnostics
4654 .binary_search_by_key(&language_server_id, |e| e.0)
4655 {
4656 self.diagnostics[ix]
4657 .1
4658 .groups(language_server_id, &mut groups, self);
4659 }
4660 } else {
4661 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4662 diagnostics.groups(*language_server_id, &mut groups, self);
4663 }
4664 }
4665
4666 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4667 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4668 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4669 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4670 });
4671
4672 groups
4673 }
4674
4675 /// Returns an iterator over the diagnostics for the given group.
4676 pub fn diagnostic_group<O>(
4677 &self,
4678 group_id: usize,
4679 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4680 where
4681 O: FromAnchor + 'static,
4682 {
4683 self.diagnostics
4684 .iter()
4685 .flat_map(move |(_, set)| set.group(group_id, self))
4686 }
4687
4688 /// An integer version number that accounts for all updates besides
4689 /// the buffer's text itself (which is versioned via a version vector).
4690 pub fn non_text_state_update_count(&self) -> usize {
4691 self.non_text_state_update_count
4692 }
4693
4694 /// An integer version that changes when the buffer's syntax changes.
4695 pub fn syntax_update_count(&self) -> usize {
4696 self.syntax.update_count()
4697 }
4698
4699 /// Returns a snapshot of underlying file.
4700 pub fn file(&self) -> Option<&Arc<dyn File>> {
4701 self.file.as_ref()
4702 }
4703
4704 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4705 if let Some(file) = self.file() {
4706 if file.path().file_name().is_none() || include_root {
4707 Some(file.full_path(cx).to_string_lossy().into_owned())
4708 } else {
4709 Some(file.path().display(file.path_style(cx)).to_string())
4710 }
4711 } else {
4712 None
4713 }
4714 }
4715
4716 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4717 let query_str = query.fuzzy_contents;
4718 if query_str.is_some_and(|query| query.is_empty()) {
4719 return BTreeMap::default();
4720 }
4721
4722 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4723 language,
4724 override_id: None,
4725 }));
4726
4727 let mut query_ix = 0;
4728 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4729 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4730
4731 let mut words = BTreeMap::default();
4732 let mut current_word_start_ix = None;
4733 let mut chunk_ix = query.range.start;
4734 for chunk in self.chunks(query.range, false) {
4735 for (i, c) in chunk.text.char_indices() {
4736 let ix = chunk_ix + i;
4737 if classifier.is_word(c) {
4738 if current_word_start_ix.is_none() {
4739 current_word_start_ix = Some(ix);
4740 }
4741
4742 if let Some(query_chars) = &query_chars
4743 && query_ix < query_len
4744 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4745 {
4746 query_ix += 1;
4747 }
4748 continue;
4749 } else if let Some(word_start) = current_word_start_ix.take()
4750 && query_ix == query_len
4751 {
4752 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4753 let mut word_text = self.text_for_range(word_start..ix).peekable();
4754 let first_char = word_text
4755 .peek()
4756 .and_then(|first_chunk| first_chunk.chars().next());
4757 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4758 if !query.skip_digits
4759 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4760 {
4761 words.insert(word_text.collect(), word_range);
4762 }
4763 }
4764 query_ix = 0;
4765 }
4766 chunk_ix += chunk.text.len();
4767 }
4768
4769 words
4770 }
4771}
4772
4773pub struct WordsQuery<'a> {
4774 /// Only returns words with all chars from the fuzzy string in them.
4775 pub fuzzy_contents: Option<&'a str>,
4776 /// Skips words that start with a digit.
4777 pub skip_digits: bool,
4778 /// Buffer offset range, to look for words.
4779 pub range: Range<usize>,
4780}
4781
4782fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4783 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4784}
4785
4786fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4787 let mut result = IndentSize::spaces(0);
4788 for c in text {
4789 let kind = match c {
4790 ' ' => IndentKind::Space,
4791 '\t' => IndentKind::Tab,
4792 _ => break,
4793 };
4794 if result.len == 0 {
4795 result.kind = kind;
4796 }
4797 result.len += 1;
4798 }
4799 result
4800}
4801
4802impl Clone for BufferSnapshot {
4803 fn clone(&self) -> Self {
4804 Self {
4805 text: self.text.clone(),
4806 syntax: self.syntax.clone(),
4807 file: self.file.clone(),
4808 remote_selections: self.remote_selections.clone(),
4809 diagnostics: self.diagnostics.clone(),
4810 language: self.language.clone(),
4811 non_text_state_update_count: self.non_text_state_update_count,
4812 }
4813 }
4814}
4815
4816impl Deref for BufferSnapshot {
4817 type Target = text::BufferSnapshot;
4818
4819 fn deref(&self) -> &Self::Target {
4820 &self.text
4821 }
4822}
4823
4824unsafe impl Send for BufferChunks<'_> {}
4825
4826impl<'a> BufferChunks<'a> {
4827 pub(crate) fn new(
4828 text: &'a Rope,
4829 range: Range<usize>,
4830 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4831 diagnostics: bool,
4832 buffer_snapshot: Option<&'a BufferSnapshot>,
4833 ) -> Self {
4834 let mut highlights = None;
4835 if let Some((captures, highlight_maps)) = syntax {
4836 highlights = Some(BufferChunkHighlights {
4837 captures,
4838 next_capture: None,
4839 stack: Default::default(),
4840 highlight_maps,
4841 })
4842 }
4843
4844 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4845 let chunks = text.chunks_in_range(range.clone());
4846
4847 let mut this = BufferChunks {
4848 range,
4849 buffer_snapshot,
4850 chunks,
4851 diagnostic_endpoints,
4852 error_depth: 0,
4853 warning_depth: 0,
4854 information_depth: 0,
4855 hint_depth: 0,
4856 unnecessary_depth: 0,
4857 underline: true,
4858 highlights,
4859 };
4860 this.initialize_diagnostic_endpoints();
4861 this
4862 }
4863
4864 /// Seeks to the given byte offset in the buffer.
4865 pub fn seek(&mut self, range: Range<usize>) {
4866 let old_range = std::mem::replace(&mut self.range, range.clone());
4867 self.chunks.set_range(self.range.clone());
4868 if let Some(highlights) = self.highlights.as_mut() {
4869 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4870 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4871 highlights
4872 .stack
4873 .retain(|(end_offset, _)| *end_offset > range.start);
4874 if let Some(capture) = &highlights.next_capture
4875 && range.start >= capture.node.start_byte()
4876 {
4877 let next_capture_end = capture.node.end_byte();
4878 if range.start < next_capture_end {
4879 highlights.stack.push((
4880 next_capture_end,
4881 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4882 ));
4883 }
4884 highlights.next_capture.take();
4885 }
4886 } else if let Some(snapshot) = self.buffer_snapshot {
4887 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4888 *highlights = BufferChunkHighlights {
4889 captures,
4890 next_capture: None,
4891 stack: Default::default(),
4892 highlight_maps,
4893 };
4894 } else {
4895 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4896 // Seeking such BufferChunks is not supported.
4897 debug_assert!(
4898 false,
4899 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4900 );
4901 }
4902
4903 highlights.captures.set_byte_range(self.range.clone());
4904 self.initialize_diagnostic_endpoints();
4905 }
4906 }
4907
4908 fn initialize_diagnostic_endpoints(&mut self) {
4909 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4910 && let Some(buffer) = self.buffer_snapshot
4911 {
4912 let mut diagnostic_endpoints = Vec::new();
4913 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4914 diagnostic_endpoints.push(DiagnosticEndpoint {
4915 offset: entry.range.start,
4916 is_start: true,
4917 severity: entry.diagnostic.severity,
4918 is_unnecessary: entry.diagnostic.is_unnecessary,
4919 underline: entry.diagnostic.underline,
4920 });
4921 diagnostic_endpoints.push(DiagnosticEndpoint {
4922 offset: entry.range.end,
4923 is_start: false,
4924 severity: entry.diagnostic.severity,
4925 is_unnecessary: entry.diagnostic.is_unnecessary,
4926 underline: entry.diagnostic.underline,
4927 });
4928 }
4929 diagnostic_endpoints
4930 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4931 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4932 self.hint_depth = 0;
4933 self.error_depth = 0;
4934 self.warning_depth = 0;
4935 self.information_depth = 0;
4936 }
4937 }
4938
4939 /// The current byte offset in the buffer.
4940 pub fn offset(&self) -> usize {
4941 self.range.start
4942 }
4943
4944 pub fn range(&self) -> Range<usize> {
4945 self.range.clone()
4946 }
4947
4948 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4949 let depth = match endpoint.severity {
4950 DiagnosticSeverity::ERROR => &mut self.error_depth,
4951 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4952 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4953 DiagnosticSeverity::HINT => &mut self.hint_depth,
4954 _ => return,
4955 };
4956 if endpoint.is_start {
4957 *depth += 1;
4958 } else {
4959 *depth -= 1;
4960 }
4961
4962 if endpoint.is_unnecessary {
4963 if endpoint.is_start {
4964 self.unnecessary_depth += 1;
4965 } else {
4966 self.unnecessary_depth -= 1;
4967 }
4968 }
4969 }
4970
4971 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4972 if self.error_depth > 0 {
4973 Some(DiagnosticSeverity::ERROR)
4974 } else if self.warning_depth > 0 {
4975 Some(DiagnosticSeverity::WARNING)
4976 } else if self.information_depth > 0 {
4977 Some(DiagnosticSeverity::INFORMATION)
4978 } else if self.hint_depth > 0 {
4979 Some(DiagnosticSeverity::HINT)
4980 } else {
4981 None
4982 }
4983 }
4984
4985 fn current_code_is_unnecessary(&self) -> bool {
4986 self.unnecessary_depth > 0
4987 }
4988}
4989
4990impl<'a> Iterator for BufferChunks<'a> {
4991 type Item = Chunk<'a>;
4992
4993 fn next(&mut self) -> Option<Self::Item> {
4994 let mut next_capture_start = usize::MAX;
4995 let mut next_diagnostic_endpoint = usize::MAX;
4996
4997 if let Some(highlights) = self.highlights.as_mut() {
4998 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4999 if *parent_capture_end <= self.range.start {
5000 highlights.stack.pop();
5001 } else {
5002 break;
5003 }
5004 }
5005
5006 if highlights.next_capture.is_none() {
5007 highlights.next_capture = highlights.captures.next();
5008 }
5009
5010 while let Some(capture) = highlights.next_capture.as_ref() {
5011 if self.range.start < capture.node.start_byte() {
5012 next_capture_start = capture.node.start_byte();
5013 break;
5014 } else {
5015 let highlight_id =
5016 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5017 highlights
5018 .stack
5019 .push((capture.node.end_byte(), highlight_id));
5020 highlights.next_capture = highlights.captures.next();
5021 }
5022 }
5023 }
5024
5025 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5026 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5027 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5028 if endpoint.offset <= self.range.start {
5029 self.update_diagnostic_depths(endpoint);
5030 diagnostic_endpoints.next();
5031 self.underline = endpoint.underline;
5032 } else {
5033 next_diagnostic_endpoint = endpoint.offset;
5034 break;
5035 }
5036 }
5037 }
5038 self.diagnostic_endpoints = diagnostic_endpoints;
5039
5040 if let Some(ChunkBitmaps {
5041 text: chunk,
5042 chars: chars_map,
5043 tabs,
5044 }) = self.chunks.peek_with_bitmaps()
5045 {
5046 let chunk_start = self.range.start;
5047 let mut chunk_end = (self.chunks.offset() + chunk.len())
5048 .min(next_capture_start)
5049 .min(next_diagnostic_endpoint);
5050 let mut highlight_id = None;
5051 if let Some(highlights) = self.highlights.as_ref()
5052 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5053 {
5054 chunk_end = chunk_end.min(*parent_capture_end);
5055 highlight_id = Some(*parent_highlight_id);
5056 }
5057 let bit_start = chunk_start - self.chunks.offset();
5058 let bit_end = chunk_end - self.chunks.offset();
5059
5060 let slice = &chunk[bit_start..bit_end];
5061
5062 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5063 let tabs = (tabs >> bit_start) & mask;
5064 let chars = (chars_map >> bit_start) & mask;
5065
5066 self.range.start = chunk_end;
5067 if self.range.start == self.chunks.offset() + chunk.len() {
5068 self.chunks.next().unwrap();
5069 }
5070
5071 Some(Chunk {
5072 text: slice,
5073 syntax_highlight_id: highlight_id,
5074 underline: self.underline,
5075 diagnostic_severity: self.current_diagnostic_severity(),
5076 is_unnecessary: self.current_code_is_unnecessary(),
5077 tabs,
5078 chars,
5079 ..Chunk::default()
5080 })
5081 } else {
5082 None
5083 }
5084 }
5085}
5086
5087impl operation_queue::Operation for Operation {
5088 fn lamport_timestamp(&self) -> clock::Lamport {
5089 match self {
5090 Operation::Buffer(_) => {
5091 unreachable!("buffer operations should never be deferred at this layer")
5092 }
5093 Operation::UpdateDiagnostics {
5094 lamport_timestamp, ..
5095 }
5096 | Operation::UpdateSelections {
5097 lamport_timestamp, ..
5098 }
5099 | Operation::UpdateCompletionTriggers {
5100 lamport_timestamp, ..
5101 }
5102 | Operation::UpdateLineEnding {
5103 lamport_timestamp, ..
5104 } => *lamport_timestamp,
5105 }
5106 }
5107}
5108
5109impl Default for Diagnostic {
5110 fn default() -> Self {
5111 Self {
5112 source: Default::default(),
5113 source_kind: DiagnosticSourceKind::Other,
5114 code: None,
5115 code_description: None,
5116 severity: DiagnosticSeverity::ERROR,
5117 message: Default::default(),
5118 markdown: None,
5119 group_id: 0,
5120 is_primary: false,
5121 is_disk_based: false,
5122 is_unnecessary: false,
5123 underline: true,
5124 data: None,
5125 }
5126 }
5127}
5128
5129impl IndentSize {
5130 /// Returns an [`IndentSize`] representing the given spaces.
5131 pub fn spaces(len: u32) -> Self {
5132 Self {
5133 len,
5134 kind: IndentKind::Space,
5135 }
5136 }
5137
5138 /// Returns an [`IndentSize`] representing a tab.
5139 pub fn tab() -> Self {
5140 Self {
5141 len: 1,
5142 kind: IndentKind::Tab,
5143 }
5144 }
5145
5146 /// An iterator over the characters represented by this [`IndentSize`].
5147 pub fn chars(&self) -> impl Iterator<Item = char> {
5148 iter::repeat(self.char()).take(self.len as usize)
5149 }
5150
5151 /// The character representation of this [`IndentSize`].
5152 pub fn char(&self) -> char {
5153 match self.kind {
5154 IndentKind::Space => ' ',
5155 IndentKind::Tab => '\t',
5156 }
5157 }
5158
5159 /// Consumes the current [`IndentSize`] and returns a new one that has
5160 /// been shrunk or enlarged by the given size along the given direction.
5161 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5162 match direction {
5163 Ordering::Less => {
5164 if self.kind == size.kind && self.len >= size.len {
5165 self.len -= size.len;
5166 }
5167 }
5168 Ordering::Equal => {}
5169 Ordering::Greater => {
5170 if self.len == 0 {
5171 self = size;
5172 } else if self.kind == size.kind {
5173 self.len += size.len;
5174 }
5175 }
5176 }
5177 self
5178 }
5179
5180 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5181 match self.kind {
5182 IndentKind::Space => self.len as usize,
5183 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5184 }
5185 }
5186}
5187
5188#[cfg(any(test, feature = "test-support"))]
5189pub struct TestFile {
5190 pub path: Arc<RelPath>,
5191 pub root_name: String,
5192 pub local_root: Option<PathBuf>,
5193}
5194
5195#[cfg(any(test, feature = "test-support"))]
5196impl File for TestFile {
5197 fn path(&self) -> &Arc<RelPath> {
5198 &self.path
5199 }
5200
5201 fn full_path(&self, _: &gpui::App) -> PathBuf {
5202 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5203 }
5204
5205 fn as_local(&self) -> Option<&dyn LocalFile> {
5206 if self.local_root.is_some() {
5207 Some(self)
5208 } else {
5209 None
5210 }
5211 }
5212
5213 fn disk_state(&self) -> DiskState {
5214 unimplemented!()
5215 }
5216
5217 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5218 self.path().file_name().unwrap_or(self.root_name.as_ref())
5219 }
5220
5221 fn worktree_id(&self, _: &App) -> WorktreeId {
5222 WorktreeId::from_usize(0)
5223 }
5224
5225 fn to_proto(&self, _: &App) -> rpc::proto::File {
5226 unimplemented!()
5227 }
5228
5229 fn is_private(&self) -> bool {
5230 false
5231 }
5232
5233 fn path_style(&self, _cx: &App) -> PathStyle {
5234 PathStyle::local()
5235 }
5236}
5237
5238#[cfg(any(test, feature = "test-support"))]
5239impl LocalFile for TestFile {
5240 fn abs_path(&self, _cx: &App) -> PathBuf {
5241 PathBuf::from(self.local_root.as_ref().unwrap())
5242 .join(&self.root_name)
5243 .join(self.path.as_std_path())
5244 }
5245
5246 fn load(&self, _cx: &App) -> Task<Result<String>> {
5247 unimplemented!()
5248 }
5249
5250 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5251 unimplemented!()
5252 }
5253}
5254
5255pub(crate) fn contiguous_ranges(
5256 values: impl Iterator<Item = u32>,
5257 max_len: usize,
5258) -> impl Iterator<Item = Range<u32>> {
5259 let mut values = values;
5260 let mut current_range: Option<Range<u32>> = None;
5261 std::iter::from_fn(move || {
5262 loop {
5263 if let Some(value) = values.next() {
5264 if let Some(range) = &mut current_range
5265 && value == range.end
5266 && range.len() < max_len
5267 {
5268 range.end += 1;
5269 continue;
5270 }
5271
5272 let prev_range = current_range.clone();
5273 current_range = Some(value..(value + 1));
5274 if prev_range.is_some() {
5275 return prev_range;
5276 }
5277 } else {
5278 return current_range.take();
5279 }
5280 }
5281 })
5282}
5283
5284#[derive(Default, Debug)]
5285pub struct CharClassifier {
5286 scope: Option<LanguageScope>,
5287 scope_context: Option<CharScopeContext>,
5288 ignore_punctuation: bool,
5289}
5290
5291impl CharClassifier {
5292 pub fn new(scope: Option<LanguageScope>) -> Self {
5293 Self {
5294 scope,
5295 scope_context: None,
5296 ignore_punctuation: false,
5297 }
5298 }
5299
5300 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5301 Self {
5302 scope_context,
5303 ..self
5304 }
5305 }
5306
5307 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5308 Self {
5309 ignore_punctuation,
5310 ..self
5311 }
5312 }
5313
5314 pub fn is_whitespace(&self, c: char) -> bool {
5315 self.kind(c) == CharKind::Whitespace
5316 }
5317
5318 pub fn is_word(&self, c: char) -> bool {
5319 self.kind(c) == CharKind::Word
5320 }
5321
5322 pub fn is_punctuation(&self, c: char) -> bool {
5323 self.kind(c) == CharKind::Punctuation
5324 }
5325
5326 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5327 if c.is_alphanumeric() || c == '_' {
5328 return CharKind::Word;
5329 }
5330
5331 if let Some(scope) = &self.scope {
5332 let characters = match self.scope_context {
5333 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5334 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5335 None => scope.word_characters(),
5336 };
5337 if let Some(characters) = characters
5338 && characters.contains(&c)
5339 {
5340 return CharKind::Word;
5341 }
5342 }
5343
5344 if c.is_whitespace() {
5345 return CharKind::Whitespace;
5346 }
5347
5348 if ignore_punctuation {
5349 CharKind::Word
5350 } else {
5351 CharKind::Punctuation
5352 }
5353 }
5354
5355 pub fn kind(&self, c: char) -> CharKind {
5356 self.kind_with(c, self.ignore_punctuation)
5357 }
5358}
5359
5360/// Find all of the ranges of whitespace that occur at the ends of lines
5361/// in the given rope.
5362///
5363/// This could also be done with a regex search, but this implementation
5364/// avoids copying text.
5365pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5366 let mut ranges = Vec::new();
5367
5368 let mut offset = 0;
5369 let mut prev_chunk_trailing_whitespace_range = 0..0;
5370 for chunk in rope.chunks() {
5371 let mut prev_line_trailing_whitespace_range = 0..0;
5372 for (i, line) in chunk.split('\n').enumerate() {
5373 let line_end_offset = offset + line.len();
5374 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5375 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5376
5377 if i == 0 && trimmed_line_len == 0 {
5378 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5379 }
5380 if !prev_line_trailing_whitespace_range.is_empty() {
5381 ranges.push(prev_line_trailing_whitespace_range);
5382 }
5383
5384 offset = line_end_offset + 1;
5385 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5386 }
5387
5388 offset -= 1;
5389 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5390 }
5391
5392 if !prev_chunk_trailing_whitespace_range.is_empty() {
5393 ranges.push(prev_chunk_trailing_whitespace_range);
5394 }
5395
5396 ranges
5397}