1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, impl AsRef<str>)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot =
751 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
752
753 let unchanged_range_in_preview_snapshot =
754 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
755 if !unchanged_range_in_preview_snapshot.is_empty() {
756 highlighted_text.add_text_from_buffer_range(
757 unchanged_range_in_preview_snapshot,
758 &self.applied_edits_snapshot,
759 &self.syntax_snapshot,
760 None,
761 syntax_theme,
762 );
763 }
764
765 let range_in_current_snapshot = range.to_offset(current_snapshot);
766 if include_deletions && !range_in_current_snapshot.is_empty() {
767 highlighted_text.add_text_from_buffer_range(
768 range_in_current_snapshot,
769 ¤t_snapshot.text,
770 ¤t_snapshot.syntax,
771 Some(deletion_highlight_style),
772 syntax_theme,
773 );
774 }
775
776 if !edit_text.as_ref().is_empty() {
777 highlighted_text.add_text_from_buffer_range(
778 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
779 &self.applied_edits_snapshot,
780 &self.syntax_snapshot,
781 Some(insertion_highlight_style),
782 syntax_theme,
783 );
784 }
785
786 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
787 }
788
789 highlighted_text.add_text_from_buffer_range(
790 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
791 &self.applied_edits_snapshot,
792 &self.syntax_snapshot,
793 None,
794 syntax_theme,
795 );
796
797 highlighted_text.build()
798 }
799
800 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
801 let (first, _) = edits.first()?;
802 let (last, _) = edits.last()?;
803
804 let start = first
805 .start
806 .bias_left(&self.old_snapshot)
807 .to_point(&self.applied_edits_snapshot);
808 let end = last
809 .end
810 .bias_right(&self.old_snapshot)
811 .to_point(&self.applied_edits_snapshot);
812
813 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
814 let range = Point::new(start.row, 0)
815 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
816
817 Some(range.to_offset(&self.applied_edits_snapshot))
818 }
819}
820
821#[derive(Clone, Debug, PartialEq, Eq)]
822pub struct BracketMatch {
823 pub open_range: Range<usize>,
824 pub close_range: Range<usize>,
825 pub newline_only: bool,
826 pub depth: usize,
827}
828
829impl Buffer {
830 /// Create a new buffer with the given base text.
831 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
832 Self::build(
833 TextBuffer::new(
834 ReplicaId::LOCAL,
835 cx.entity_id().as_non_zero_u64().into(),
836 base_text.into(),
837 ),
838 None,
839 Capability::ReadWrite,
840 )
841 }
842
843 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
844 pub fn local_normalized(
845 base_text_normalized: Rope,
846 line_ending: LineEnding,
847 cx: &Context<Self>,
848 ) -> Self {
849 Self::build(
850 TextBuffer::new_normalized(
851 ReplicaId::LOCAL,
852 cx.entity_id().as_non_zero_u64().into(),
853 line_ending,
854 base_text_normalized,
855 ),
856 None,
857 Capability::ReadWrite,
858 )
859 }
860
861 /// Create a new buffer that is a replica of a remote buffer.
862 pub fn remote(
863 remote_id: BufferId,
864 replica_id: ReplicaId,
865 capability: Capability,
866 base_text: impl Into<String>,
867 ) -> Self {
868 Self::build(
869 TextBuffer::new(replica_id, remote_id, base_text.into()),
870 None,
871 capability,
872 )
873 }
874
875 /// Create a new buffer that is a replica of a remote buffer, populating its
876 /// state from the given protobuf message.
877 pub fn from_proto(
878 replica_id: ReplicaId,
879 capability: Capability,
880 message: proto::BufferState,
881 file: Option<Arc<dyn File>>,
882 ) -> Result<Self> {
883 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
884 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
885 let mut this = Self::build(buffer, file, capability);
886 this.text.set_line_ending(proto::deserialize_line_ending(
887 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
888 ));
889 this.saved_version = proto::deserialize_version(&message.saved_version);
890 this.saved_mtime = message.saved_mtime.map(|time| time.into());
891 Ok(this)
892 }
893
894 /// Serialize the buffer's state to a protobuf message.
895 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
896 proto::BufferState {
897 id: self.remote_id().into(),
898 file: self.file.as_ref().map(|f| f.to_proto(cx)),
899 base_text: self.base_text().to_string(),
900 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
901 saved_version: proto::serialize_version(&self.saved_version),
902 saved_mtime: self.saved_mtime.map(|time| time.into()),
903 }
904 }
905
906 /// Serialize as protobufs all of the changes to the buffer since the given version.
907 pub fn serialize_ops(
908 &self,
909 since: Option<clock::Global>,
910 cx: &App,
911 ) -> Task<Vec<proto::Operation>> {
912 let mut operations = Vec::new();
913 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
914
915 operations.extend(self.remote_selections.iter().map(|(_, set)| {
916 proto::serialize_operation(&Operation::UpdateSelections {
917 selections: set.selections.clone(),
918 lamport_timestamp: set.lamport_timestamp,
919 line_mode: set.line_mode,
920 cursor_shape: set.cursor_shape,
921 })
922 }));
923
924 for (server_id, diagnostics) in &self.diagnostics {
925 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
926 lamport_timestamp: self.diagnostics_timestamp,
927 server_id: *server_id,
928 diagnostics: diagnostics.iter().cloned().collect(),
929 }));
930 }
931
932 for (server_id, completions) in &self.completion_triggers_per_language_server {
933 operations.push(proto::serialize_operation(
934 &Operation::UpdateCompletionTriggers {
935 triggers: completions.iter().cloned().collect(),
936 lamport_timestamp: self.completion_triggers_timestamp,
937 server_id: *server_id,
938 },
939 ));
940 }
941
942 let text_operations = self.text.operations().clone();
943 cx.background_spawn(async move {
944 let since = since.unwrap_or_default();
945 operations.extend(
946 text_operations
947 .iter()
948 .filter(|(_, op)| !since.observed(op.timestamp()))
949 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
950 );
951 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
952 operations
953 })
954 }
955
956 /// Assign a language to the buffer, returning the buffer.
957 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
958 self.set_language(Some(language), cx);
959 self
960 }
961
962 /// Returns the [`Capability`] of this buffer.
963 pub fn capability(&self) -> Capability {
964 self.capability
965 }
966
967 /// Whether this buffer can only be read.
968 pub fn read_only(&self) -> bool {
969 self.capability == Capability::ReadOnly
970 }
971
972 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
973 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
974 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
975 let snapshot = buffer.snapshot();
976 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
977 Self {
978 saved_mtime,
979 saved_version: buffer.version(),
980 preview_version: buffer.version(),
981 reload_task: None,
982 transaction_depth: 0,
983 was_dirty_before_starting_transaction: None,
984 has_unsaved_edits: Cell::new((buffer.version(), false)),
985 text: buffer,
986 branch_state: None,
987 file,
988 capability,
989 syntax_map,
990 reparse: None,
991 non_text_state_update_count: 0,
992 sync_parse_timeout: Duration::from_millis(1),
993 parse_status: watch::channel(ParseStatus::Idle),
994 autoindent_requests: Default::default(),
995 wait_for_autoindent_txs: Default::default(),
996 pending_autoindent: Default::default(),
997 language: None,
998 remote_selections: Default::default(),
999 diagnostics: Default::default(),
1000 diagnostics_timestamp: Lamport::MIN,
1001 completion_triggers: Default::default(),
1002 completion_triggers_per_language_server: Default::default(),
1003 completion_triggers_timestamp: Lamport::MIN,
1004 deferred_ops: OperationQueue::new(),
1005 has_conflict: false,
1006 change_bits: Default::default(),
1007 _subscriptions: Vec::new(),
1008 }
1009 }
1010
1011 pub fn build_snapshot(
1012 text: Rope,
1013 language: Option<Arc<Language>>,
1014 language_registry: Option<Arc<LanguageRegistry>>,
1015 cx: &mut App,
1016 ) -> impl Future<Output = BufferSnapshot> + use<> {
1017 let entity_id = cx.reserve_entity::<Self>().entity_id();
1018 let buffer_id = entity_id.as_non_zero_u64().into();
1019 async move {
1020 let text =
1021 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1022 .snapshot();
1023 let mut syntax = SyntaxMap::new(&text).snapshot();
1024 if let Some(language) = language.clone() {
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038 }
1039
1040 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1041 let entity_id = cx.reserve_entity::<Self>().entity_id();
1042 let buffer_id = entity_id.as_non_zero_u64().into();
1043 let text = TextBuffer::new_normalized(
1044 ReplicaId::LOCAL,
1045 buffer_id,
1046 Default::default(),
1047 Rope::new(),
1048 )
1049 .snapshot();
1050 let syntax = SyntaxMap::new(&text).snapshot();
1051 BufferSnapshot {
1052 text,
1053 syntax,
1054 file: None,
1055 diagnostics: Default::default(),
1056 remote_selections: Default::default(),
1057 language: None,
1058 non_text_state_update_count: 0,
1059 }
1060 }
1061
1062 #[cfg(any(test, feature = "test-support"))]
1063 pub fn build_snapshot_sync(
1064 text: Rope,
1065 language: Option<Arc<Language>>,
1066 language_registry: Option<Arc<LanguageRegistry>>,
1067 cx: &mut App,
1068 ) -> BufferSnapshot {
1069 let entity_id = cx.reserve_entity::<Self>().entity_id();
1070 let buffer_id = entity_id.as_non_zero_u64().into();
1071 let text =
1072 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1073 .snapshot();
1074 let mut syntax = SyntaxMap::new(&text).snapshot();
1075 if let Some(language) = language.clone() {
1076 syntax.reparse(&text, language_registry, language);
1077 }
1078 BufferSnapshot {
1079 text,
1080 syntax,
1081 file: None,
1082 diagnostics: Default::default(),
1083 remote_selections: Default::default(),
1084 language,
1085 non_text_state_update_count: 0,
1086 }
1087 }
1088
1089 /// Retrieve a snapshot of the buffer's current state. This is computationally
1090 /// cheap, and allows reading from the buffer on a background thread.
1091 pub fn snapshot(&self) -> BufferSnapshot {
1092 let text = self.text.snapshot();
1093 let mut syntax_map = self.syntax_map.lock();
1094 syntax_map.interpolate(&text);
1095 let syntax = syntax_map.snapshot();
1096
1097 BufferSnapshot {
1098 text,
1099 syntax,
1100 file: self.file.clone(),
1101 remote_selections: self.remote_selections.clone(),
1102 diagnostics: self.diagnostics.clone(),
1103 language: self.language.clone(),
1104 non_text_state_update_count: self.non_text_state_update_count,
1105 }
1106 }
1107
1108 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1109 let this = cx.entity();
1110 cx.new(|cx| {
1111 let mut branch = Self {
1112 branch_state: Some(BufferBranchState {
1113 base_buffer: this.clone(),
1114 merged_operations: Default::default(),
1115 }),
1116 language: self.language.clone(),
1117 has_conflict: self.has_conflict,
1118 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1119 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1120 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1121 };
1122 if let Some(language_registry) = self.language_registry() {
1123 branch.set_language_registry(language_registry);
1124 }
1125
1126 // Reparse the branch buffer so that we get syntax highlighting immediately.
1127 branch.reparse(cx);
1128
1129 branch
1130 })
1131 }
1132
1133 pub fn preview_edits(
1134 &self,
1135 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1136 cx: &App,
1137 ) -> Task<EditPreview> {
1138 let registry = self.language_registry();
1139 let language = self.language().cloned();
1140 let old_snapshot = self.text.snapshot();
1141 let mut branch_buffer = self.text.branch();
1142 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1143 cx.background_spawn(async move {
1144 if !edits.is_empty() {
1145 if let Some(language) = language.clone() {
1146 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1147 }
1148
1149 branch_buffer.edit(edits.iter().cloned());
1150 let snapshot = branch_buffer.snapshot();
1151 syntax_snapshot.interpolate(&snapshot);
1152
1153 if let Some(language) = language {
1154 syntax_snapshot.reparse(&snapshot, registry, language);
1155 }
1156 }
1157 EditPreview {
1158 old_snapshot,
1159 applied_edits_snapshot: branch_buffer.snapshot(),
1160 syntax_snapshot,
1161 }
1162 })
1163 }
1164
1165 /// Applies all of the changes in this buffer that intersect any of the
1166 /// given `ranges` to its base buffer.
1167 ///
1168 /// If `ranges` is empty, then all changes will be applied. This buffer must
1169 /// be a branch buffer to call this method.
1170 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1171 let Some(base_buffer) = self.base_buffer() else {
1172 debug_panic!("not a branch buffer");
1173 return;
1174 };
1175
1176 let mut ranges = if ranges.is_empty() {
1177 &[0..usize::MAX]
1178 } else {
1179 ranges.as_slice()
1180 }
1181 .iter()
1182 .peekable();
1183
1184 let mut edits = Vec::new();
1185 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1186 let mut is_included = false;
1187 while let Some(range) = ranges.peek() {
1188 if range.end < edit.new.start {
1189 ranges.next().unwrap();
1190 } else {
1191 if range.start <= edit.new.end {
1192 is_included = true;
1193 }
1194 break;
1195 }
1196 }
1197
1198 if is_included {
1199 edits.push((
1200 edit.old.clone(),
1201 self.text_for_range(edit.new.clone()).collect::<String>(),
1202 ));
1203 }
1204 }
1205
1206 let operation = base_buffer.update(cx, |base_buffer, cx| {
1207 // cx.emit(BufferEvent::DiffBaseChanged);
1208 base_buffer.edit(edits, None, cx)
1209 });
1210
1211 if let Some(operation) = operation
1212 && let Some(BufferBranchState {
1213 merged_operations, ..
1214 }) = &mut self.branch_state
1215 {
1216 merged_operations.push(operation);
1217 }
1218 }
1219
1220 fn on_base_buffer_event(
1221 &mut self,
1222 _: Entity<Buffer>,
1223 event: &BufferEvent,
1224 cx: &mut Context<Self>,
1225 ) {
1226 let BufferEvent::Operation { operation, .. } = event else {
1227 return;
1228 };
1229 let Some(BufferBranchState {
1230 merged_operations, ..
1231 }) = &mut self.branch_state
1232 else {
1233 return;
1234 };
1235
1236 let mut operation_to_undo = None;
1237 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1238 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1239 {
1240 merged_operations.remove(ix);
1241 operation_to_undo = Some(operation.timestamp);
1242 }
1243
1244 self.apply_ops([operation.clone()], cx);
1245
1246 if let Some(timestamp) = operation_to_undo {
1247 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1248 self.undo_operations(counts, cx);
1249 }
1250 }
1251
1252 #[cfg(test)]
1253 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1254 &self.text
1255 }
1256
1257 /// Retrieve a snapshot of the buffer's raw text, without any
1258 /// language-related state like the syntax tree or diagnostics.
1259 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1260 self.text.snapshot()
1261 }
1262
1263 /// The file associated with the buffer, if any.
1264 pub fn file(&self) -> Option<&Arc<dyn File>> {
1265 self.file.as_ref()
1266 }
1267
1268 /// The version of the buffer that was last saved or reloaded from disk.
1269 pub fn saved_version(&self) -> &clock::Global {
1270 &self.saved_version
1271 }
1272
1273 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1274 pub fn saved_mtime(&self) -> Option<MTime> {
1275 self.saved_mtime
1276 }
1277
1278 /// Assign a language to the buffer.
1279 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1280 self.non_text_state_update_count += 1;
1281 self.syntax_map.lock().clear(&self.text);
1282 self.language = language;
1283 self.was_changed();
1284 self.reparse(cx);
1285 cx.emit(BufferEvent::LanguageChanged);
1286 }
1287
1288 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1289 /// other languages if parts of the buffer are written in different languages.
1290 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1291 self.syntax_map
1292 .lock()
1293 .set_language_registry(language_registry);
1294 }
1295
1296 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1297 self.syntax_map.lock().language_registry()
1298 }
1299
1300 /// Assign the line ending type to the buffer.
1301 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1302 self.text.set_line_ending(line_ending);
1303
1304 let lamport_timestamp = self.text.lamport_clock.tick();
1305 self.send_operation(
1306 Operation::UpdateLineEnding {
1307 line_ending,
1308 lamport_timestamp,
1309 },
1310 true,
1311 cx,
1312 );
1313 }
1314
1315 /// Assign the buffer a new [`Capability`].
1316 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1317 if self.capability != capability {
1318 self.capability = capability;
1319 cx.emit(BufferEvent::CapabilityChanged)
1320 }
1321 }
1322
1323 /// This method is called to signal that the buffer has been saved.
1324 pub fn did_save(
1325 &mut self,
1326 version: clock::Global,
1327 mtime: Option<MTime>,
1328 cx: &mut Context<Self>,
1329 ) {
1330 self.saved_version = version.clone();
1331 self.has_unsaved_edits.set((version, false));
1332 self.has_conflict = false;
1333 self.saved_mtime = mtime;
1334 self.was_changed();
1335 cx.emit(BufferEvent::Saved);
1336 cx.notify();
1337 }
1338
1339 /// Reloads the contents of the buffer from disk.
1340 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1341 let (tx, rx) = futures::channel::oneshot::channel();
1342 let prev_version = self.text.version();
1343 self.reload_task = Some(cx.spawn(async move |this, cx| {
1344 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1345 let file = this.file.as_ref()?.as_local()?;
1346
1347 Some((file.disk_state().mtime(), file.load(cx)))
1348 })?
1349 else {
1350 return Ok(());
1351 };
1352
1353 let new_text = new_text.await?;
1354 let diff = this
1355 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1356 .await;
1357 this.update(cx, |this, cx| {
1358 if this.version() == diff.base_version {
1359 this.finalize_last_transaction();
1360 this.apply_diff(diff, cx);
1361 tx.send(this.finalize_last_transaction().cloned()).ok();
1362 this.has_conflict = false;
1363 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1364 } else {
1365 if !diff.edits.is_empty()
1366 || this
1367 .edits_since::<usize>(&diff.base_version)
1368 .next()
1369 .is_some()
1370 {
1371 this.has_conflict = true;
1372 }
1373
1374 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1375 }
1376
1377 this.reload_task.take();
1378 })
1379 }));
1380 rx
1381 }
1382
1383 /// This method is called to signal that the buffer has been reloaded.
1384 pub fn did_reload(
1385 &mut self,
1386 version: clock::Global,
1387 line_ending: LineEnding,
1388 mtime: Option<MTime>,
1389 cx: &mut Context<Self>,
1390 ) {
1391 self.saved_version = version;
1392 self.has_unsaved_edits
1393 .set((self.saved_version.clone(), false));
1394 self.text.set_line_ending(line_ending);
1395 self.saved_mtime = mtime;
1396 cx.emit(BufferEvent::Reloaded);
1397 cx.notify();
1398 }
1399
1400 /// Updates the [`File`] backing this buffer. This should be called when
1401 /// the file has changed or has been deleted.
1402 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1403 let was_dirty = self.is_dirty();
1404 let mut file_changed = false;
1405
1406 if let Some(old_file) = self.file.as_ref() {
1407 if new_file.path() != old_file.path() {
1408 file_changed = true;
1409 }
1410
1411 let old_state = old_file.disk_state();
1412 let new_state = new_file.disk_state();
1413 if old_state != new_state {
1414 file_changed = true;
1415 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1416 cx.emit(BufferEvent::ReloadNeeded)
1417 }
1418 }
1419 } else {
1420 file_changed = true;
1421 };
1422
1423 self.file = Some(new_file);
1424 if file_changed {
1425 self.was_changed();
1426 self.non_text_state_update_count += 1;
1427 if was_dirty != self.is_dirty() {
1428 cx.emit(BufferEvent::DirtyChanged);
1429 }
1430 cx.emit(BufferEvent::FileHandleChanged);
1431 cx.notify();
1432 }
1433 }
1434
1435 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1436 Some(self.branch_state.as_ref()?.base_buffer.clone())
1437 }
1438
1439 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1440 pub fn language(&self) -> Option<&Arc<Language>> {
1441 self.language.as_ref()
1442 }
1443
1444 /// Returns the [`Language`] at the given location.
1445 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1446 let offset = position.to_offset(self);
1447 let mut is_first = true;
1448 let start_anchor = self.anchor_before(offset);
1449 let end_anchor = self.anchor_after(offset);
1450 self.syntax_map
1451 .lock()
1452 .layers_for_range(offset..offset, &self.text, false)
1453 .filter(|layer| {
1454 if is_first {
1455 is_first = false;
1456 return true;
1457 }
1458
1459 layer
1460 .included_sub_ranges
1461 .map(|sub_ranges| {
1462 sub_ranges.iter().any(|sub_range| {
1463 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1464 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1465 !is_before_start && !is_after_end
1466 })
1467 })
1468 .unwrap_or(true)
1469 })
1470 .last()
1471 .map(|info| info.language.clone())
1472 .or_else(|| self.language.clone())
1473 }
1474
1475 /// Returns each [`Language`] for the active syntax layers at the given location.
1476 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1477 let offset = position.to_offset(self);
1478 let mut languages: Vec<Arc<Language>> = self
1479 .syntax_map
1480 .lock()
1481 .layers_for_range(offset..offset, &self.text, false)
1482 .map(|info| info.language.clone())
1483 .collect();
1484
1485 if languages.is_empty()
1486 && let Some(buffer_language) = self.language()
1487 {
1488 languages.push(buffer_language.clone());
1489 }
1490
1491 languages
1492 }
1493
1494 /// An integer version number that accounts for all updates besides
1495 /// the buffer's text itself (which is versioned via a version vector).
1496 pub fn non_text_state_update_count(&self) -> usize {
1497 self.non_text_state_update_count
1498 }
1499
1500 /// Whether the buffer is being parsed in the background.
1501 #[cfg(any(test, feature = "test-support"))]
1502 pub fn is_parsing(&self) -> bool {
1503 self.reparse.is_some()
1504 }
1505
1506 /// Indicates whether the buffer contains any regions that may be
1507 /// written in a language that hasn't been loaded yet.
1508 pub fn contains_unknown_injections(&self) -> bool {
1509 self.syntax_map.lock().contains_unknown_injections()
1510 }
1511
1512 #[cfg(any(test, feature = "test-support"))]
1513 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1514 self.sync_parse_timeout = timeout;
1515 }
1516
1517 /// Called after an edit to synchronize the buffer's main parse tree with
1518 /// the buffer's new underlying state.
1519 ///
1520 /// Locks the syntax map and interpolates the edits since the last reparse
1521 /// into the foreground syntax tree.
1522 ///
1523 /// Then takes a stable snapshot of the syntax map before unlocking it.
1524 /// The snapshot with the interpolated edits is sent to a background thread,
1525 /// where we ask Tree-sitter to perform an incremental parse.
1526 ///
1527 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1528 /// waiting on the parse to complete. As soon as it completes, we proceed
1529 /// synchronously, unless a 1ms timeout elapses.
1530 ///
1531 /// If we time out waiting on the parse, we spawn a second task waiting
1532 /// until the parse does complete and return with the interpolated tree still
1533 /// in the foreground. When the background parse completes, call back into
1534 /// the main thread and assign the foreground parse state.
1535 ///
1536 /// If the buffer or grammar changed since the start of the background parse,
1537 /// initiate an additional reparse recursively. To avoid concurrent parses
1538 /// for the same buffer, we only initiate a new parse if we are not already
1539 /// parsing in the background.
1540 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1541 if self.reparse.is_some() {
1542 return;
1543 }
1544 let language = if let Some(language) = self.language.clone() {
1545 language
1546 } else {
1547 return;
1548 };
1549
1550 let text = self.text_snapshot();
1551 let parsed_version = self.version();
1552
1553 let mut syntax_map = self.syntax_map.lock();
1554 syntax_map.interpolate(&text);
1555 let language_registry = syntax_map.language_registry();
1556 let mut syntax_snapshot = syntax_map.snapshot();
1557 drop(syntax_map);
1558
1559 let parse_task = cx.background_spawn({
1560 let language = language.clone();
1561 let language_registry = language_registry.clone();
1562 async move {
1563 syntax_snapshot.reparse(&text, language_registry, language);
1564 syntax_snapshot
1565 }
1566 });
1567
1568 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1569 match cx
1570 .background_executor()
1571 .block_with_timeout(self.sync_parse_timeout, parse_task)
1572 {
1573 Ok(new_syntax_snapshot) => {
1574 self.did_finish_parsing(new_syntax_snapshot, cx);
1575 self.reparse = None;
1576 }
1577 Err(parse_task) => {
1578 // todo(lw): hot foreground spawn
1579 self.reparse = Some(cx.spawn(async move |this, cx| {
1580 let new_syntax_map = cx.background_spawn(parse_task).await;
1581 this.update(cx, move |this, cx| {
1582 let grammar_changed = || {
1583 this.language.as_ref().is_none_or(|current_language| {
1584 !Arc::ptr_eq(&language, current_language)
1585 })
1586 };
1587 let language_registry_changed = || {
1588 new_syntax_map.contains_unknown_injections()
1589 && language_registry.is_some_and(|registry| {
1590 registry.version() != new_syntax_map.language_registry_version()
1591 })
1592 };
1593 let parse_again = this.version.changed_since(&parsed_version)
1594 || language_registry_changed()
1595 || grammar_changed();
1596 this.did_finish_parsing(new_syntax_map, cx);
1597 this.reparse = None;
1598 if parse_again {
1599 this.reparse(cx);
1600 }
1601 })
1602 .ok();
1603 }));
1604 }
1605 }
1606 }
1607
1608 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1609 self.was_changed();
1610 self.non_text_state_update_count += 1;
1611 self.syntax_map.lock().did_parse(syntax_snapshot);
1612 self.request_autoindent(cx);
1613 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1614 cx.emit(BufferEvent::Reparsed);
1615 cx.notify();
1616 }
1617
1618 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1619 self.parse_status.1.clone()
1620 }
1621
1622 /// Wait until the buffer is no longer parsing
1623 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1624 let mut parse_status = self.parse_status();
1625 async move {
1626 while *parse_status.borrow() != ParseStatus::Idle {
1627 if parse_status.changed().await.is_err() {
1628 break;
1629 }
1630 }
1631 }
1632 }
1633
1634 /// Assign to the buffer a set of diagnostics created by a given language server.
1635 pub fn update_diagnostics(
1636 &mut self,
1637 server_id: LanguageServerId,
1638 diagnostics: DiagnosticSet,
1639 cx: &mut Context<Self>,
1640 ) {
1641 let lamport_timestamp = self.text.lamport_clock.tick();
1642 let op = Operation::UpdateDiagnostics {
1643 server_id,
1644 diagnostics: diagnostics.iter().cloned().collect(),
1645 lamport_timestamp,
1646 };
1647
1648 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1649 self.send_operation(op, true, cx);
1650 }
1651
1652 pub fn buffer_diagnostics(
1653 &self,
1654 for_server: Option<LanguageServerId>,
1655 ) -> Vec<&DiagnosticEntry<Anchor>> {
1656 match for_server {
1657 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1658 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1659 Err(_) => Vec::new(),
1660 },
1661 None => self
1662 .diagnostics
1663 .iter()
1664 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1665 .collect(),
1666 }
1667 }
1668
1669 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1670 if let Some(indent_sizes) = self.compute_autoindents() {
1671 let indent_sizes = cx.background_spawn(indent_sizes);
1672 match cx
1673 .background_executor()
1674 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1675 {
1676 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1677 Err(indent_sizes) => {
1678 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1679 let indent_sizes = indent_sizes.await;
1680 this.update(cx, |this, cx| {
1681 this.apply_autoindents(indent_sizes, cx);
1682 })
1683 .ok();
1684 }));
1685 }
1686 }
1687 } else {
1688 self.autoindent_requests.clear();
1689 for tx in self.wait_for_autoindent_txs.drain(..) {
1690 tx.send(()).ok();
1691 }
1692 }
1693 }
1694
1695 fn compute_autoindents(
1696 &self,
1697 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1698 let max_rows_between_yields = 100;
1699 let snapshot = self.snapshot();
1700 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1701 return None;
1702 }
1703
1704 let autoindent_requests = self.autoindent_requests.clone();
1705 Some(async move {
1706 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1707 for request in autoindent_requests {
1708 // Resolve each edited range to its row in the current buffer and in the
1709 // buffer before this batch of edits.
1710 let mut row_ranges = Vec::new();
1711 let mut old_to_new_rows = BTreeMap::new();
1712 let mut language_indent_sizes_by_new_row = Vec::new();
1713 for entry in &request.entries {
1714 let position = entry.range.start;
1715 let new_row = position.to_point(&snapshot).row;
1716 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1717 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1718
1719 if !entry.first_line_is_new {
1720 let old_row = position.to_point(&request.before_edit).row;
1721 old_to_new_rows.insert(old_row, new_row);
1722 }
1723 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1724 }
1725
1726 // Build a map containing the suggested indentation for each of the edited lines
1727 // with respect to the state of the buffer before these edits. This map is keyed
1728 // by the rows for these lines in the current state of the buffer.
1729 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1730 let old_edited_ranges =
1731 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1732 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1733 let mut language_indent_size = IndentSize::default();
1734 for old_edited_range in old_edited_ranges {
1735 let suggestions = request
1736 .before_edit
1737 .suggest_autoindents(old_edited_range.clone())
1738 .into_iter()
1739 .flatten();
1740 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1741 if let Some(suggestion) = suggestion {
1742 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1743
1744 // Find the indent size based on the language for this row.
1745 while let Some((row, size)) = language_indent_sizes.peek() {
1746 if *row > new_row {
1747 break;
1748 }
1749 language_indent_size = *size;
1750 language_indent_sizes.next();
1751 }
1752
1753 let suggested_indent = old_to_new_rows
1754 .get(&suggestion.basis_row)
1755 .and_then(|from_row| {
1756 Some(old_suggestions.get(from_row).copied()?.0)
1757 })
1758 .unwrap_or_else(|| {
1759 request
1760 .before_edit
1761 .indent_size_for_line(suggestion.basis_row)
1762 })
1763 .with_delta(suggestion.delta, language_indent_size);
1764 old_suggestions
1765 .insert(new_row, (suggested_indent, suggestion.within_error));
1766 }
1767 }
1768 yield_now().await;
1769 }
1770
1771 // Compute new suggestions for each line, but only include them in the result
1772 // if they differ from the old suggestion for that line.
1773 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1774 let mut language_indent_size = IndentSize::default();
1775 for (row_range, original_indent_column) in row_ranges {
1776 let new_edited_row_range = if request.is_block_mode {
1777 row_range.start..row_range.start + 1
1778 } else {
1779 row_range.clone()
1780 };
1781
1782 let suggestions = snapshot
1783 .suggest_autoindents(new_edited_row_range.clone())
1784 .into_iter()
1785 .flatten();
1786 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1787 if let Some(suggestion) = suggestion {
1788 // Find the indent size based on the language for this row.
1789 while let Some((row, size)) = language_indent_sizes.peek() {
1790 if *row > new_row {
1791 break;
1792 }
1793 language_indent_size = *size;
1794 language_indent_sizes.next();
1795 }
1796
1797 let suggested_indent = indent_sizes
1798 .get(&suggestion.basis_row)
1799 .copied()
1800 .map(|e| e.0)
1801 .unwrap_or_else(|| {
1802 snapshot.indent_size_for_line(suggestion.basis_row)
1803 })
1804 .with_delta(suggestion.delta, language_indent_size);
1805
1806 if old_suggestions.get(&new_row).is_none_or(
1807 |(old_indentation, was_within_error)| {
1808 suggested_indent != *old_indentation
1809 && (!suggestion.within_error || *was_within_error)
1810 },
1811 ) {
1812 indent_sizes.insert(
1813 new_row,
1814 (suggested_indent, request.ignore_empty_lines),
1815 );
1816 }
1817 }
1818 }
1819
1820 if let (true, Some(original_indent_column)) =
1821 (request.is_block_mode, original_indent_column)
1822 {
1823 let new_indent =
1824 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1825 *indent
1826 } else {
1827 snapshot.indent_size_for_line(row_range.start)
1828 };
1829 let delta = new_indent.len as i64 - original_indent_column as i64;
1830 if delta != 0 {
1831 for row in row_range.skip(1) {
1832 indent_sizes.entry(row).or_insert_with(|| {
1833 let mut size = snapshot.indent_size_for_line(row);
1834 if size.kind == new_indent.kind {
1835 match delta.cmp(&0) {
1836 Ordering::Greater => size.len += delta as u32,
1837 Ordering::Less => {
1838 size.len = size.len.saturating_sub(-delta as u32)
1839 }
1840 Ordering::Equal => {}
1841 }
1842 }
1843 (size, request.ignore_empty_lines)
1844 });
1845 }
1846 }
1847 }
1848
1849 yield_now().await;
1850 }
1851 }
1852
1853 indent_sizes
1854 .into_iter()
1855 .filter_map(|(row, (indent, ignore_empty_lines))| {
1856 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1857 None
1858 } else {
1859 Some((row, indent))
1860 }
1861 })
1862 .collect()
1863 })
1864 }
1865
1866 fn apply_autoindents(
1867 &mut self,
1868 indent_sizes: BTreeMap<u32, IndentSize>,
1869 cx: &mut Context<Self>,
1870 ) {
1871 self.autoindent_requests.clear();
1872 for tx in self.wait_for_autoindent_txs.drain(..) {
1873 tx.send(()).ok();
1874 }
1875
1876 let edits: Vec<_> = indent_sizes
1877 .into_iter()
1878 .filter_map(|(row, indent_size)| {
1879 let current_size = indent_size_for_line(self, row);
1880 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1881 })
1882 .collect();
1883
1884 let preserve_preview = self.preserve_preview();
1885 self.edit(edits, None, cx);
1886 if preserve_preview {
1887 self.refresh_preview();
1888 }
1889 }
1890
1891 /// Create a minimal edit that will cause the given row to be indented
1892 /// with the given size. After applying this edit, the length of the line
1893 /// will always be at least `new_size.len`.
1894 pub fn edit_for_indent_size_adjustment(
1895 row: u32,
1896 current_size: IndentSize,
1897 new_size: IndentSize,
1898 ) -> Option<(Range<Point>, String)> {
1899 if new_size.kind == current_size.kind {
1900 match new_size.len.cmp(¤t_size.len) {
1901 Ordering::Greater => {
1902 let point = Point::new(row, 0);
1903 Some((
1904 point..point,
1905 iter::repeat(new_size.char())
1906 .take((new_size.len - current_size.len) as usize)
1907 .collect::<String>(),
1908 ))
1909 }
1910
1911 Ordering::Less => Some((
1912 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1913 String::new(),
1914 )),
1915
1916 Ordering::Equal => None,
1917 }
1918 } else {
1919 Some((
1920 Point::new(row, 0)..Point::new(row, current_size.len),
1921 iter::repeat(new_size.char())
1922 .take(new_size.len as usize)
1923 .collect::<String>(),
1924 ))
1925 }
1926 }
1927
1928 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1929 /// and the given new text.
1930 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1931 let old_text = self.as_rope().clone();
1932 let base_version = self.version();
1933 cx.background_executor()
1934 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1935 let old_text = old_text.to_string();
1936 let line_ending = LineEnding::detect(&new_text);
1937 LineEnding::normalize(&mut new_text);
1938 let edits = text_diff(&old_text, &new_text);
1939 Diff {
1940 base_version,
1941 line_ending,
1942 edits,
1943 }
1944 })
1945 }
1946
1947 /// Spawns a background task that searches the buffer for any whitespace
1948 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1949 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1950 let old_text = self.as_rope().clone();
1951 let line_ending = self.line_ending();
1952 let base_version = self.version();
1953 cx.background_spawn(async move {
1954 let ranges = trailing_whitespace_ranges(&old_text);
1955 let empty = Arc::<str>::from("");
1956 Diff {
1957 base_version,
1958 line_ending,
1959 edits: ranges
1960 .into_iter()
1961 .map(|range| (range, empty.clone()))
1962 .collect(),
1963 }
1964 })
1965 }
1966
1967 /// Ensures that the buffer ends with a single newline character, and
1968 /// no other whitespace. Skips if the buffer is empty.
1969 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1970 let len = self.len();
1971 if len == 0 {
1972 return;
1973 }
1974 let mut offset = len;
1975 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1976 let non_whitespace_len = chunk
1977 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1978 .len();
1979 offset -= chunk.len();
1980 offset += non_whitespace_len;
1981 if non_whitespace_len != 0 {
1982 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1983 return;
1984 }
1985 break;
1986 }
1987 }
1988 self.edit([(offset..len, "\n")], None, cx);
1989 }
1990
1991 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1992 /// calculated, then adjust the diff to account for those changes, and discard any
1993 /// parts of the diff that conflict with those changes.
1994 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1995 let snapshot = self.snapshot();
1996 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1997 let mut delta = 0;
1998 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1999 while let Some(edit_since) = edits_since.peek() {
2000 // If the edit occurs after a diff hunk, then it does not
2001 // affect that hunk.
2002 if edit_since.old.start > range.end {
2003 break;
2004 }
2005 // If the edit precedes the diff hunk, then adjust the hunk
2006 // to reflect the edit.
2007 else if edit_since.old.end < range.start {
2008 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2009 edits_since.next();
2010 }
2011 // If the edit intersects a diff hunk, then discard that hunk.
2012 else {
2013 return None;
2014 }
2015 }
2016
2017 let start = (range.start as i64 + delta) as usize;
2018 let end = (range.end as i64 + delta) as usize;
2019 Some((start..end, new_text))
2020 });
2021
2022 self.start_transaction();
2023 self.text.set_line_ending(diff.line_ending);
2024 self.edit(adjusted_edits, None, cx);
2025 self.end_transaction(cx)
2026 }
2027
2028 pub fn has_unsaved_edits(&self) -> bool {
2029 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2030
2031 if last_version == self.version {
2032 self.has_unsaved_edits
2033 .set((last_version, has_unsaved_edits));
2034 return has_unsaved_edits;
2035 }
2036
2037 let has_edits = self.has_edits_since(&self.saved_version);
2038 self.has_unsaved_edits
2039 .set((self.version.clone(), has_edits));
2040 has_edits
2041 }
2042
2043 /// Checks if the buffer has unsaved changes.
2044 pub fn is_dirty(&self) -> bool {
2045 if self.capability == Capability::ReadOnly {
2046 return false;
2047 }
2048 if self.has_conflict {
2049 return true;
2050 }
2051 match self.file.as_ref().map(|f| f.disk_state()) {
2052 Some(DiskState::New) | Some(DiskState::Deleted) => {
2053 !self.is_empty() && self.has_unsaved_edits()
2054 }
2055 _ => self.has_unsaved_edits(),
2056 }
2057 }
2058
2059 /// Marks the buffer as having a conflict regardless of current buffer state.
2060 pub fn set_conflict(&mut self) {
2061 self.has_conflict = true;
2062 }
2063
2064 /// Checks if the buffer and its file have both changed since the buffer
2065 /// was last saved or reloaded.
2066 pub fn has_conflict(&self) -> bool {
2067 if self.has_conflict {
2068 return true;
2069 }
2070 let Some(file) = self.file.as_ref() else {
2071 return false;
2072 };
2073 match file.disk_state() {
2074 DiskState::New => false,
2075 DiskState::Present { mtime } => match self.saved_mtime {
2076 Some(saved_mtime) => {
2077 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2078 }
2079 None => true,
2080 },
2081 DiskState::Deleted => false,
2082 }
2083 }
2084
2085 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2086 pub fn subscribe(&mut self) -> Subscription {
2087 self.text.subscribe()
2088 }
2089
2090 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2091 ///
2092 /// This allows downstream code to check if the buffer's text has changed without
2093 /// waiting for an effect cycle, which would be required if using eents.
2094 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2095 if let Err(ix) = self
2096 .change_bits
2097 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2098 {
2099 self.change_bits.insert(ix, bit);
2100 }
2101 }
2102
2103 /// Set the change bit for all "listeners".
2104 fn was_changed(&mut self) {
2105 self.change_bits.retain(|change_bit| {
2106 change_bit
2107 .upgrade()
2108 .inspect(|bit| {
2109 _ = bit.replace(true);
2110 })
2111 .is_some()
2112 });
2113 }
2114
2115 /// Starts a transaction, if one is not already in-progress. When undoing or
2116 /// redoing edits, all of the edits performed within a transaction are undone
2117 /// or redone together.
2118 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2119 self.start_transaction_at(Instant::now())
2120 }
2121
2122 /// Starts a transaction, providing the current time. Subsequent transactions
2123 /// that occur within a short period of time will be grouped together. This
2124 /// is controlled by the buffer's undo grouping duration.
2125 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2126 self.transaction_depth += 1;
2127 if self.was_dirty_before_starting_transaction.is_none() {
2128 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2129 }
2130 self.text.start_transaction_at(now)
2131 }
2132
2133 /// Terminates the current transaction, if this is the outermost transaction.
2134 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2135 self.end_transaction_at(Instant::now(), cx)
2136 }
2137
2138 /// Terminates the current transaction, providing the current time. Subsequent transactions
2139 /// that occur within a short period of time will be grouped together. This
2140 /// is controlled by the buffer's undo grouping duration.
2141 pub fn end_transaction_at(
2142 &mut self,
2143 now: Instant,
2144 cx: &mut Context<Self>,
2145 ) -> Option<TransactionId> {
2146 assert!(self.transaction_depth > 0);
2147 self.transaction_depth -= 1;
2148 let was_dirty = if self.transaction_depth == 0 {
2149 self.was_dirty_before_starting_transaction.take().unwrap()
2150 } else {
2151 false
2152 };
2153 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2154 self.did_edit(&start_version, was_dirty, cx);
2155 Some(transaction_id)
2156 } else {
2157 None
2158 }
2159 }
2160
2161 /// Manually add a transaction to the buffer's undo history.
2162 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2163 self.text.push_transaction(transaction, now);
2164 }
2165
2166 /// Differs from `push_transaction` in that it does not clear the redo
2167 /// stack. Intended to be used to create a parent transaction to merge
2168 /// potential child transactions into.
2169 ///
2170 /// The caller is responsible for removing it from the undo history using
2171 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2172 /// are merged into this transaction, the caller is responsible for ensuring
2173 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2174 /// cleared is to create transactions with the usual `start_transaction` and
2175 /// `end_transaction` methods and merging the resulting transactions into
2176 /// the transaction created by this method
2177 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2178 self.text.push_empty_transaction(now)
2179 }
2180
2181 /// Prevent the last transaction from being grouped with any subsequent transactions,
2182 /// even if they occur with the buffer's undo grouping duration.
2183 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2184 self.text.finalize_last_transaction()
2185 }
2186
2187 /// Manually group all changes since a given transaction.
2188 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2189 self.text.group_until_transaction(transaction_id);
2190 }
2191
2192 /// Manually remove a transaction from the buffer's undo history
2193 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2194 self.text.forget_transaction(transaction_id)
2195 }
2196
2197 /// Retrieve a transaction from the buffer's undo history
2198 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2199 self.text.get_transaction(transaction_id)
2200 }
2201
2202 /// Manually merge two transactions in the buffer's undo history.
2203 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2204 self.text.merge_transactions(transaction, destination);
2205 }
2206
2207 /// Waits for the buffer to receive operations with the given timestamps.
2208 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2209 &mut self,
2210 edit_ids: It,
2211 ) -> impl Future<Output = Result<()>> + use<It> {
2212 self.text.wait_for_edits(edit_ids)
2213 }
2214
2215 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2216 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2217 &mut self,
2218 anchors: It,
2219 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2220 self.text.wait_for_anchors(anchors)
2221 }
2222
2223 /// Waits for the buffer to receive operations up to the given version.
2224 pub fn wait_for_version(
2225 &mut self,
2226 version: clock::Global,
2227 ) -> impl Future<Output = Result<()>> + use<> {
2228 self.text.wait_for_version(version)
2229 }
2230
2231 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2232 /// [`Buffer::wait_for_version`] to resolve with an error.
2233 pub fn give_up_waiting(&mut self) {
2234 self.text.give_up_waiting();
2235 }
2236
2237 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2238 let mut rx = None;
2239 if !self.autoindent_requests.is_empty() {
2240 let channel = oneshot::channel();
2241 self.wait_for_autoindent_txs.push(channel.0);
2242 rx = Some(channel.1);
2243 }
2244 rx
2245 }
2246
2247 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2248 pub fn set_active_selections(
2249 &mut self,
2250 selections: Arc<[Selection<Anchor>]>,
2251 line_mode: bool,
2252 cursor_shape: CursorShape,
2253 cx: &mut Context<Self>,
2254 ) {
2255 let lamport_timestamp = self.text.lamport_clock.tick();
2256 self.remote_selections.insert(
2257 self.text.replica_id(),
2258 SelectionSet {
2259 selections: selections.clone(),
2260 lamport_timestamp,
2261 line_mode,
2262 cursor_shape,
2263 },
2264 );
2265 self.send_operation(
2266 Operation::UpdateSelections {
2267 selections,
2268 line_mode,
2269 lamport_timestamp,
2270 cursor_shape,
2271 },
2272 true,
2273 cx,
2274 );
2275 self.non_text_state_update_count += 1;
2276 cx.notify();
2277 }
2278
2279 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2280 /// this replica.
2281 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2282 if self
2283 .remote_selections
2284 .get(&self.text.replica_id())
2285 .is_none_or(|set| !set.selections.is_empty())
2286 {
2287 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2288 }
2289 }
2290
2291 pub fn set_agent_selections(
2292 &mut self,
2293 selections: Arc<[Selection<Anchor>]>,
2294 line_mode: bool,
2295 cursor_shape: CursorShape,
2296 cx: &mut Context<Self>,
2297 ) {
2298 let lamport_timestamp = self.text.lamport_clock.tick();
2299 self.remote_selections.insert(
2300 ReplicaId::AGENT,
2301 SelectionSet {
2302 selections,
2303 lamport_timestamp,
2304 line_mode,
2305 cursor_shape,
2306 },
2307 );
2308 self.non_text_state_update_count += 1;
2309 cx.notify();
2310 }
2311
2312 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2313 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2314 }
2315
2316 /// Replaces the buffer's entire text.
2317 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2318 where
2319 T: Into<Arc<str>>,
2320 {
2321 self.autoindent_requests.clear();
2322 self.edit([(0..self.len(), text)], None, cx)
2323 }
2324
2325 /// Appends the given text to the end of the buffer.
2326 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2327 where
2328 T: Into<Arc<str>>,
2329 {
2330 self.edit([(self.len()..self.len(), text)], None, cx)
2331 }
2332
2333 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2334 /// delete, and a string of text to insert at that location.
2335 ///
2336 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2337 /// request for the edited ranges, which will be processed when the buffer finishes
2338 /// parsing.
2339 ///
2340 /// Parsing takes place at the end of a transaction, and may compute synchronously
2341 /// or asynchronously, depending on the changes.
2342 pub fn edit<I, S, T>(
2343 &mut self,
2344 edits_iter: I,
2345 autoindent_mode: Option<AutoindentMode>,
2346 cx: &mut Context<Self>,
2347 ) -> Option<clock::Lamport>
2348 where
2349 I: IntoIterator<Item = (Range<S>, T)>,
2350 S: ToOffset,
2351 T: Into<Arc<str>>,
2352 {
2353 // Skip invalid edits and coalesce contiguous ones.
2354 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2355
2356 for (range, new_text) in edits_iter {
2357 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2358
2359 if range.start > range.end {
2360 mem::swap(&mut range.start, &mut range.end);
2361 }
2362 let new_text = new_text.into();
2363 if !new_text.is_empty() || !range.is_empty() {
2364 if let Some((prev_range, prev_text)) = edits.last_mut()
2365 && prev_range.end >= range.start
2366 {
2367 prev_range.end = cmp::max(prev_range.end, range.end);
2368 *prev_text = format!("{prev_text}{new_text}").into();
2369 } else {
2370 edits.push((range, new_text));
2371 }
2372 }
2373 }
2374 if edits.is_empty() {
2375 return None;
2376 }
2377
2378 self.start_transaction();
2379 self.pending_autoindent.take();
2380 let autoindent_request = autoindent_mode
2381 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2382
2383 let edit_operation = self.text.edit(edits.iter().cloned());
2384 let edit_id = edit_operation.timestamp();
2385
2386 if let Some((before_edit, mode)) = autoindent_request {
2387 let mut delta = 0isize;
2388 let mut previous_setting = None;
2389 let entries: Vec<_> = edits
2390 .into_iter()
2391 .enumerate()
2392 .zip(&edit_operation.as_edit().unwrap().new_text)
2393 .filter(|((_, (range, _)), _)| {
2394 let language = before_edit.language_at(range.start);
2395 let language_id = language.map(|l| l.id());
2396 if let Some((cached_language_id, auto_indent)) = previous_setting
2397 && cached_language_id == language_id
2398 {
2399 auto_indent
2400 } else {
2401 // The auto-indent setting is not present in editorconfigs, hence
2402 // we can avoid passing the file here.
2403 let auto_indent =
2404 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2405 previous_setting = Some((language_id, auto_indent));
2406 auto_indent
2407 }
2408 })
2409 .map(|((ix, (range, _)), new_text)| {
2410 let new_text_length = new_text.len();
2411 let old_start = range.start.to_point(&before_edit);
2412 let new_start = (delta + range.start as isize) as usize;
2413 let range_len = range.end - range.start;
2414 delta += new_text_length as isize - range_len as isize;
2415
2416 // Decide what range of the insertion to auto-indent, and whether
2417 // the first line of the insertion should be considered a newly-inserted line
2418 // or an edit to an existing line.
2419 let mut range_of_insertion_to_indent = 0..new_text_length;
2420 let mut first_line_is_new = true;
2421
2422 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2423 let old_line_end = before_edit.line_len(old_start.row);
2424
2425 if old_start.column > old_line_start {
2426 first_line_is_new = false;
2427 }
2428
2429 if !new_text.contains('\n')
2430 && (old_start.column + (range_len as u32) < old_line_end
2431 || old_line_end == old_line_start)
2432 {
2433 first_line_is_new = false;
2434 }
2435
2436 // When inserting text starting with a newline, avoid auto-indenting the
2437 // previous line.
2438 if new_text.starts_with('\n') {
2439 range_of_insertion_to_indent.start += 1;
2440 first_line_is_new = true;
2441 }
2442
2443 let mut original_indent_column = None;
2444 if let AutoindentMode::Block {
2445 original_indent_columns,
2446 } = &mode
2447 {
2448 original_indent_column = Some(if new_text.starts_with('\n') {
2449 indent_size_for_text(
2450 new_text[range_of_insertion_to_indent.clone()].chars(),
2451 )
2452 .len
2453 } else {
2454 original_indent_columns
2455 .get(ix)
2456 .copied()
2457 .flatten()
2458 .unwrap_or_else(|| {
2459 indent_size_for_text(
2460 new_text[range_of_insertion_to_indent.clone()].chars(),
2461 )
2462 .len
2463 })
2464 });
2465
2466 // Avoid auto-indenting the line after the edit.
2467 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2468 range_of_insertion_to_indent.end -= 1;
2469 }
2470 }
2471
2472 AutoindentRequestEntry {
2473 first_line_is_new,
2474 original_indent_column,
2475 indent_size: before_edit.language_indent_size_at(range.start, cx),
2476 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2477 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2478 }
2479 })
2480 .collect();
2481
2482 if !entries.is_empty() {
2483 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2484 before_edit,
2485 entries,
2486 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2487 ignore_empty_lines: false,
2488 }));
2489 }
2490 }
2491
2492 self.end_transaction(cx);
2493 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2494 Some(edit_id)
2495 }
2496
2497 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2498 self.was_changed();
2499
2500 if self.edits_since::<usize>(old_version).next().is_none() {
2501 return;
2502 }
2503
2504 self.reparse(cx);
2505 cx.emit(BufferEvent::Edited);
2506 if was_dirty != self.is_dirty() {
2507 cx.emit(BufferEvent::DirtyChanged);
2508 }
2509 cx.notify();
2510 }
2511
2512 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2513 where
2514 I: IntoIterator<Item = Range<T>>,
2515 T: ToOffset + Copy,
2516 {
2517 let before_edit = self.snapshot();
2518 let entries = ranges
2519 .into_iter()
2520 .map(|range| AutoindentRequestEntry {
2521 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2522 first_line_is_new: true,
2523 indent_size: before_edit.language_indent_size_at(range.start, cx),
2524 original_indent_column: None,
2525 })
2526 .collect();
2527 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2528 before_edit,
2529 entries,
2530 is_block_mode: false,
2531 ignore_empty_lines: true,
2532 }));
2533 self.request_autoindent(cx);
2534 }
2535
2536 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2537 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2538 pub fn insert_empty_line(
2539 &mut self,
2540 position: impl ToPoint,
2541 space_above: bool,
2542 space_below: bool,
2543 cx: &mut Context<Self>,
2544 ) -> Point {
2545 let mut position = position.to_point(self);
2546
2547 self.start_transaction();
2548
2549 self.edit(
2550 [(position..position, "\n")],
2551 Some(AutoindentMode::EachLine),
2552 cx,
2553 );
2554
2555 if position.column > 0 {
2556 position += Point::new(1, 0);
2557 }
2558
2559 if !self.is_line_blank(position.row) {
2560 self.edit(
2561 [(position..position, "\n")],
2562 Some(AutoindentMode::EachLine),
2563 cx,
2564 );
2565 }
2566
2567 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2568 self.edit(
2569 [(position..position, "\n")],
2570 Some(AutoindentMode::EachLine),
2571 cx,
2572 );
2573 position.row += 1;
2574 }
2575
2576 if space_below
2577 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2578 {
2579 self.edit(
2580 [(position..position, "\n")],
2581 Some(AutoindentMode::EachLine),
2582 cx,
2583 );
2584 }
2585
2586 self.end_transaction(cx);
2587
2588 position
2589 }
2590
2591 /// Applies the given remote operations to the buffer.
2592 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2593 self.pending_autoindent.take();
2594 let was_dirty = self.is_dirty();
2595 let old_version = self.version.clone();
2596 let mut deferred_ops = Vec::new();
2597 let buffer_ops = ops
2598 .into_iter()
2599 .filter_map(|op| match op {
2600 Operation::Buffer(op) => Some(op),
2601 _ => {
2602 if self.can_apply_op(&op) {
2603 self.apply_op(op, cx);
2604 } else {
2605 deferred_ops.push(op);
2606 }
2607 None
2608 }
2609 })
2610 .collect::<Vec<_>>();
2611 for operation in buffer_ops.iter() {
2612 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2613 }
2614 self.text.apply_ops(buffer_ops);
2615 self.deferred_ops.insert(deferred_ops);
2616 self.flush_deferred_ops(cx);
2617 self.did_edit(&old_version, was_dirty, cx);
2618 // Notify independently of whether the buffer was edited as the operations could include a
2619 // selection update.
2620 cx.notify();
2621 }
2622
2623 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2624 let mut deferred_ops = Vec::new();
2625 for op in self.deferred_ops.drain().iter().cloned() {
2626 if self.can_apply_op(&op) {
2627 self.apply_op(op, cx);
2628 } else {
2629 deferred_ops.push(op);
2630 }
2631 }
2632 self.deferred_ops.insert(deferred_ops);
2633 }
2634
2635 pub fn has_deferred_ops(&self) -> bool {
2636 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2637 }
2638
2639 fn can_apply_op(&self, operation: &Operation) -> bool {
2640 match operation {
2641 Operation::Buffer(_) => {
2642 unreachable!("buffer operations should never be applied at this layer")
2643 }
2644 Operation::UpdateDiagnostics {
2645 diagnostics: diagnostic_set,
2646 ..
2647 } => diagnostic_set.iter().all(|diagnostic| {
2648 self.text.can_resolve(&diagnostic.range.start)
2649 && self.text.can_resolve(&diagnostic.range.end)
2650 }),
2651 Operation::UpdateSelections { selections, .. } => selections
2652 .iter()
2653 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2654 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2655 }
2656 }
2657
2658 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2659 match operation {
2660 Operation::Buffer(_) => {
2661 unreachable!("buffer operations should never be applied at this layer")
2662 }
2663 Operation::UpdateDiagnostics {
2664 server_id,
2665 diagnostics: diagnostic_set,
2666 lamport_timestamp,
2667 } => {
2668 let snapshot = self.snapshot();
2669 self.apply_diagnostic_update(
2670 server_id,
2671 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2672 lamport_timestamp,
2673 cx,
2674 );
2675 }
2676 Operation::UpdateSelections {
2677 selections,
2678 lamport_timestamp,
2679 line_mode,
2680 cursor_shape,
2681 } => {
2682 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2683 && set.lamport_timestamp > lamport_timestamp
2684 {
2685 return;
2686 }
2687
2688 self.remote_selections.insert(
2689 lamport_timestamp.replica_id,
2690 SelectionSet {
2691 selections,
2692 lamport_timestamp,
2693 line_mode,
2694 cursor_shape,
2695 },
2696 );
2697 self.text.lamport_clock.observe(lamport_timestamp);
2698 self.non_text_state_update_count += 1;
2699 }
2700 Operation::UpdateCompletionTriggers {
2701 triggers,
2702 lamport_timestamp,
2703 server_id,
2704 } => {
2705 if triggers.is_empty() {
2706 self.completion_triggers_per_language_server
2707 .remove(&server_id);
2708 self.completion_triggers = self
2709 .completion_triggers_per_language_server
2710 .values()
2711 .flat_map(|triggers| triggers.iter().cloned())
2712 .collect();
2713 } else {
2714 self.completion_triggers_per_language_server
2715 .insert(server_id, triggers.iter().cloned().collect());
2716 self.completion_triggers.extend(triggers);
2717 }
2718 self.text.lamport_clock.observe(lamport_timestamp);
2719 }
2720 Operation::UpdateLineEnding {
2721 line_ending,
2722 lamport_timestamp,
2723 } => {
2724 self.text.set_line_ending(line_ending);
2725 self.text.lamport_clock.observe(lamport_timestamp);
2726 }
2727 }
2728 }
2729
2730 fn apply_diagnostic_update(
2731 &mut self,
2732 server_id: LanguageServerId,
2733 diagnostics: DiagnosticSet,
2734 lamport_timestamp: clock::Lamport,
2735 cx: &mut Context<Self>,
2736 ) {
2737 if lamport_timestamp > self.diagnostics_timestamp {
2738 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2739 if diagnostics.is_empty() {
2740 if let Ok(ix) = ix {
2741 self.diagnostics.remove(ix);
2742 }
2743 } else {
2744 match ix {
2745 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2746 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2747 };
2748 }
2749 self.diagnostics_timestamp = lamport_timestamp;
2750 self.non_text_state_update_count += 1;
2751 self.text.lamport_clock.observe(lamport_timestamp);
2752 cx.notify();
2753 cx.emit(BufferEvent::DiagnosticsUpdated);
2754 }
2755 }
2756
2757 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2758 self.was_changed();
2759 cx.emit(BufferEvent::Operation {
2760 operation,
2761 is_local,
2762 });
2763 }
2764
2765 /// Removes the selections for a given peer.
2766 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2767 self.remote_selections.remove(&replica_id);
2768 cx.notify();
2769 }
2770
2771 /// Undoes the most recent transaction.
2772 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2773 let was_dirty = self.is_dirty();
2774 let old_version = self.version.clone();
2775
2776 if let Some((transaction_id, operation)) = self.text.undo() {
2777 self.send_operation(Operation::Buffer(operation), true, cx);
2778 self.did_edit(&old_version, was_dirty, cx);
2779 Some(transaction_id)
2780 } else {
2781 None
2782 }
2783 }
2784
2785 /// Manually undoes a specific transaction in the buffer's undo history.
2786 pub fn undo_transaction(
2787 &mut self,
2788 transaction_id: TransactionId,
2789 cx: &mut Context<Self>,
2790 ) -> bool {
2791 let was_dirty = self.is_dirty();
2792 let old_version = self.version.clone();
2793 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2794 self.send_operation(Operation::Buffer(operation), true, cx);
2795 self.did_edit(&old_version, was_dirty, cx);
2796 true
2797 } else {
2798 false
2799 }
2800 }
2801
2802 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2803 pub fn undo_to_transaction(
2804 &mut self,
2805 transaction_id: TransactionId,
2806 cx: &mut Context<Self>,
2807 ) -> bool {
2808 let was_dirty = self.is_dirty();
2809 let old_version = self.version.clone();
2810
2811 let operations = self.text.undo_to_transaction(transaction_id);
2812 let undone = !operations.is_empty();
2813 for operation in operations {
2814 self.send_operation(Operation::Buffer(operation), true, cx);
2815 }
2816 if undone {
2817 self.did_edit(&old_version, was_dirty, cx)
2818 }
2819 undone
2820 }
2821
2822 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2823 let was_dirty = self.is_dirty();
2824 let operation = self.text.undo_operations(counts);
2825 let old_version = self.version.clone();
2826 self.send_operation(Operation::Buffer(operation), true, cx);
2827 self.did_edit(&old_version, was_dirty, cx);
2828 }
2829
2830 /// Manually redoes a specific transaction in the buffer's redo history.
2831 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2832 let was_dirty = self.is_dirty();
2833 let old_version = self.version.clone();
2834
2835 if let Some((transaction_id, operation)) = self.text.redo() {
2836 self.send_operation(Operation::Buffer(operation), true, cx);
2837 self.did_edit(&old_version, was_dirty, cx);
2838 Some(transaction_id)
2839 } else {
2840 None
2841 }
2842 }
2843
2844 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2845 pub fn redo_to_transaction(
2846 &mut self,
2847 transaction_id: TransactionId,
2848 cx: &mut Context<Self>,
2849 ) -> bool {
2850 let was_dirty = self.is_dirty();
2851 let old_version = self.version.clone();
2852
2853 let operations = self.text.redo_to_transaction(transaction_id);
2854 let redone = !operations.is_empty();
2855 for operation in operations {
2856 self.send_operation(Operation::Buffer(operation), true, cx);
2857 }
2858 if redone {
2859 self.did_edit(&old_version, was_dirty, cx)
2860 }
2861 redone
2862 }
2863
2864 /// Override current completion triggers with the user-provided completion triggers.
2865 pub fn set_completion_triggers(
2866 &mut self,
2867 server_id: LanguageServerId,
2868 triggers: BTreeSet<String>,
2869 cx: &mut Context<Self>,
2870 ) {
2871 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2872 if triggers.is_empty() {
2873 self.completion_triggers_per_language_server
2874 .remove(&server_id);
2875 self.completion_triggers = self
2876 .completion_triggers_per_language_server
2877 .values()
2878 .flat_map(|triggers| triggers.iter().cloned())
2879 .collect();
2880 } else {
2881 self.completion_triggers_per_language_server
2882 .insert(server_id, triggers.clone());
2883 self.completion_triggers.extend(triggers.iter().cloned());
2884 }
2885 self.send_operation(
2886 Operation::UpdateCompletionTriggers {
2887 triggers: triggers.into_iter().collect(),
2888 lamport_timestamp: self.completion_triggers_timestamp,
2889 server_id,
2890 },
2891 true,
2892 cx,
2893 );
2894 cx.notify();
2895 }
2896
2897 /// Returns a list of strings which trigger a completion menu for this language.
2898 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2899 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2900 &self.completion_triggers
2901 }
2902
2903 /// Call this directly after performing edits to prevent the preview tab
2904 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2905 /// to return false until there are additional edits.
2906 pub fn refresh_preview(&mut self) {
2907 self.preview_version = self.version.clone();
2908 }
2909
2910 /// Whether we should preserve the preview status of a tab containing this buffer.
2911 pub fn preserve_preview(&self) -> bool {
2912 !self.has_edits_since(&self.preview_version)
2913 }
2914}
2915
2916#[doc(hidden)]
2917#[cfg(any(test, feature = "test-support"))]
2918impl Buffer {
2919 pub fn edit_via_marked_text(
2920 &mut self,
2921 marked_string: &str,
2922 autoindent_mode: Option<AutoindentMode>,
2923 cx: &mut Context<Self>,
2924 ) {
2925 let edits = self.edits_for_marked_text(marked_string);
2926 self.edit(edits, autoindent_mode, cx);
2927 }
2928
2929 pub fn set_group_interval(&mut self, group_interval: Duration) {
2930 self.text.set_group_interval(group_interval);
2931 }
2932
2933 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2934 where
2935 T: rand::Rng,
2936 {
2937 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2938 let mut last_end = None;
2939 for _ in 0..old_range_count {
2940 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2941 break;
2942 }
2943
2944 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2945 let mut range = self.random_byte_range(new_start, rng);
2946 if rng.random_bool(0.2) {
2947 mem::swap(&mut range.start, &mut range.end);
2948 }
2949 last_end = Some(range.end);
2950
2951 let new_text_len = rng.random_range(0..10);
2952 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2953 new_text = new_text.to_uppercase();
2954
2955 edits.push((range, new_text));
2956 }
2957 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2958 self.edit(edits, None, cx);
2959 }
2960
2961 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2962 let was_dirty = self.is_dirty();
2963 let old_version = self.version.clone();
2964
2965 let ops = self.text.randomly_undo_redo(rng);
2966 if !ops.is_empty() {
2967 for op in ops {
2968 self.send_operation(Operation::Buffer(op), true, cx);
2969 self.did_edit(&old_version, was_dirty, cx);
2970 }
2971 }
2972 }
2973}
2974
2975impl EventEmitter<BufferEvent> for Buffer {}
2976
2977impl Deref for Buffer {
2978 type Target = TextBuffer;
2979
2980 fn deref(&self) -> &Self::Target {
2981 &self.text
2982 }
2983}
2984
2985impl BufferSnapshot {
2986 /// Returns [`IndentSize`] for a given line that respects user settings and
2987 /// language preferences.
2988 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2989 indent_size_for_line(self, row)
2990 }
2991
2992 /// Returns [`IndentSize`] for a given position that respects user settings
2993 /// and language preferences.
2994 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2995 let settings = language_settings(
2996 self.language_at(position).map(|l| l.name()),
2997 self.file(),
2998 cx,
2999 );
3000 if settings.hard_tabs {
3001 IndentSize::tab()
3002 } else {
3003 IndentSize::spaces(settings.tab_size.get())
3004 }
3005 }
3006
3007 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3008 /// is passed in as `single_indent_size`.
3009 pub fn suggested_indents(
3010 &self,
3011 rows: impl Iterator<Item = u32>,
3012 single_indent_size: IndentSize,
3013 ) -> BTreeMap<u32, IndentSize> {
3014 let mut result = BTreeMap::new();
3015
3016 for row_range in contiguous_ranges(rows, 10) {
3017 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3018 Some(suggestions) => suggestions,
3019 _ => break,
3020 };
3021
3022 for (row, suggestion) in row_range.zip(suggestions) {
3023 let indent_size = if let Some(suggestion) = suggestion {
3024 result
3025 .get(&suggestion.basis_row)
3026 .copied()
3027 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3028 .with_delta(suggestion.delta, single_indent_size)
3029 } else {
3030 self.indent_size_for_line(row)
3031 };
3032
3033 result.insert(row, indent_size);
3034 }
3035 }
3036
3037 result
3038 }
3039
3040 fn suggest_autoindents(
3041 &self,
3042 row_range: Range<u32>,
3043 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3044 let config = &self.language.as_ref()?.config;
3045 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3046
3047 #[derive(Debug, Clone)]
3048 struct StartPosition {
3049 start: Point,
3050 suffix: SharedString,
3051 }
3052
3053 // Find the suggested indentation ranges based on the syntax tree.
3054 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3055 let end = Point::new(row_range.end, 0);
3056 let range = (start..end).to_offset(&self.text);
3057 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3058 Some(&grammar.indents_config.as_ref()?.query)
3059 });
3060 let indent_configs = matches
3061 .grammars()
3062 .iter()
3063 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3064 .collect::<Vec<_>>();
3065
3066 let mut indent_ranges = Vec::<Range<Point>>::new();
3067 let mut start_positions = Vec::<StartPosition>::new();
3068 let mut outdent_positions = Vec::<Point>::new();
3069 while let Some(mat) = matches.peek() {
3070 let mut start: Option<Point> = None;
3071 let mut end: Option<Point> = None;
3072
3073 let config = indent_configs[mat.grammar_index];
3074 for capture in mat.captures {
3075 if capture.index == config.indent_capture_ix {
3076 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3077 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3078 } else if Some(capture.index) == config.start_capture_ix {
3079 start = Some(Point::from_ts_point(capture.node.end_position()));
3080 } else if Some(capture.index) == config.end_capture_ix {
3081 end = Some(Point::from_ts_point(capture.node.start_position()));
3082 } else if Some(capture.index) == config.outdent_capture_ix {
3083 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3084 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3085 start_positions.push(StartPosition {
3086 start: Point::from_ts_point(capture.node.start_position()),
3087 suffix: suffix.clone(),
3088 });
3089 }
3090 }
3091
3092 matches.advance();
3093 if let Some((start, end)) = start.zip(end) {
3094 if start.row == end.row {
3095 continue;
3096 }
3097 let range = start..end;
3098 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3099 Err(ix) => indent_ranges.insert(ix, range),
3100 Ok(ix) => {
3101 let prev_range = &mut indent_ranges[ix];
3102 prev_range.end = prev_range.end.max(range.end);
3103 }
3104 }
3105 }
3106 }
3107
3108 let mut error_ranges = Vec::<Range<Point>>::new();
3109 let mut matches = self
3110 .syntax
3111 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3112 while let Some(mat) = matches.peek() {
3113 let node = mat.captures[0].node;
3114 let start = Point::from_ts_point(node.start_position());
3115 let end = Point::from_ts_point(node.end_position());
3116 let range = start..end;
3117 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3118 Ok(ix) | Err(ix) => ix,
3119 };
3120 let mut end_ix = ix;
3121 while let Some(existing_range) = error_ranges.get(end_ix) {
3122 if existing_range.end < end {
3123 end_ix += 1;
3124 } else {
3125 break;
3126 }
3127 }
3128 error_ranges.splice(ix..end_ix, [range]);
3129 matches.advance();
3130 }
3131
3132 outdent_positions.sort();
3133 for outdent_position in outdent_positions {
3134 // find the innermost indent range containing this outdent_position
3135 // set its end to the outdent position
3136 if let Some(range_to_truncate) = indent_ranges
3137 .iter_mut()
3138 .filter(|indent_range| indent_range.contains(&outdent_position))
3139 .next_back()
3140 {
3141 range_to_truncate.end = outdent_position;
3142 }
3143 }
3144
3145 start_positions.sort_by_key(|b| b.start);
3146
3147 // Find the suggested indentation increases and decreased based on regexes.
3148 let mut regex_outdent_map = HashMap::default();
3149 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3150 let mut start_positions_iter = start_positions.iter().peekable();
3151
3152 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3153 self.for_each_line(
3154 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3155 ..Point::new(row_range.end, 0),
3156 |row, line| {
3157 if config
3158 .decrease_indent_pattern
3159 .as_ref()
3160 .is_some_and(|regex| regex.is_match(line))
3161 {
3162 indent_change_rows.push((row, Ordering::Less));
3163 }
3164 if config
3165 .increase_indent_pattern
3166 .as_ref()
3167 .is_some_and(|regex| regex.is_match(line))
3168 {
3169 indent_change_rows.push((row + 1, Ordering::Greater));
3170 }
3171 while let Some(pos) = start_positions_iter.peek() {
3172 if pos.start.row < row {
3173 let pos = start_positions_iter.next().unwrap();
3174 last_seen_suffix
3175 .entry(pos.suffix.to_string())
3176 .or_default()
3177 .push(pos.start);
3178 } else {
3179 break;
3180 }
3181 }
3182 for rule in &config.decrease_indent_patterns {
3183 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3184 let row_start_column = self.indent_size_for_line(row).len;
3185 let basis_row = rule
3186 .valid_after
3187 .iter()
3188 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3189 .flatten()
3190 .filter(|start_point| start_point.column <= row_start_column)
3191 .max_by_key(|start_point| start_point.row);
3192 if let Some(outdent_to_row) = basis_row {
3193 regex_outdent_map.insert(row, outdent_to_row.row);
3194 }
3195 break;
3196 }
3197 }
3198 },
3199 );
3200
3201 let mut indent_changes = indent_change_rows.into_iter().peekable();
3202 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3203 prev_non_blank_row.unwrap_or(0)
3204 } else {
3205 row_range.start.saturating_sub(1)
3206 };
3207
3208 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3209 Some(row_range.map(move |row| {
3210 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3211
3212 let mut indent_from_prev_row = false;
3213 let mut outdent_from_prev_row = false;
3214 let mut outdent_to_row = u32::MAX;
3215 let mut from_regex = false;
3216
3217 while let Some((indent_row, delta)) = indent_changes.peek() {
3218 match indent_row.cmp(&row) {
3219 Ordering::Equal => match delta {
3220 Ordering::Less => {
3221 from_regex = true;
3222 outdent_from_prev_row = true
3223 }
3224 Ordering::Greater => {
3225 indent_from_prev_row = true;
3226 from_regex = true
3227 }
3228 _ => {}
3229 },
3230
3231 Ordering::Greater => break,
3232 Ordering::Less => {}
3233 }
3234
3235 indent_changes.next();
3236 }
3237
3238 for range in &indent_ranges {
3239 if range.start.row >= row {
3240 break;
3241 }
3242 if range.start.row == prev_row && range.end > row_start {
3243 indent_from_prev_row = true;
3244 }
3245 if range.end > prev_row_start && range.end <= row_start {
3246 outdent_to_row = outdent_to_row.min(range.start.row);
3247 }
3248 }
3249
3250 if let Some(basis_row) = regex_outdent_map.get(&row) {
3251 indent_from_prev_row = false;
3252 outdent_to_row = *basis_row;
3253 from_regex = true;
3254 }
3255
3256 let within_error = error_ranges
3257 .iter()
3258 .any(|e| e.start.row < row && e.end > row_start);
3259
3260 let suggestion = if outdent_to_row == prev_row
3261 || (outdent_from_prev_row && indent_from_prev_row)
3262 {
3263 Some(IndentSuggestion {
3264 basis_row: prev_row,
3265 delta: Ordering::Equal,
3266 within_error: within_error && !from_regex,
3267 })
3268 } else if indent_from_prev_row {
3269 Some(IndentSuggestion {
3270 basis_row: prev_row,
3271 delta: Ordering::Greater,
3272 within_error: within_error && !from_regex,
3273 })
3274 } else if outdent_to_row < prev_row {
3275 Some(IndentSuggestion {
3276 basis_row: outdent_to_row,
3277 delta: Ordering::Equal,
3278 within_error: within_error && !from_regex,
3279 })
3280 } else if outdent_from_prev_row {
3281 Some(IndentSuggestion {
3282 basis_row: prev_row,
3283 delta: Ordering::Less,
3284 within_error: within_error && !from_regex,
3285 })
3286 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3287 {
3288 Some(IndentSuggestion {
3289 basis_row: prev_row,
3290 delta: Ordering::Equal,
3291 within_error: within_error && !from_regex,
3292 })
3293 } else {
3294 None
3295 };
3296
3297 prev_row = row;
3298 prev_row_start = row_start;
3299 suggestion
3300 }))
3301 }
3302
3303 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3304 while row > 0 {
3305 row -= 1;
3306 if !self.is_line_blank(row) {
3307 return Some(row);
3308 }
3309 }
3310 None
3311 }
3312
3313 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3314 let captures = self.syntax.captures(range, &self.text, |grammar| {
3315 grammar
3316 .highlights_config
3317 .as_ref()
3318 .map(|config| &config.query)
3319 });
3320 let highlight_maps = captures
3321 .grammars()
3322 .iter()
3323 .map(|grammar| grammar.highlight_map())
3324 .collect();
3325 (captures, highlight_maps)
3326 }
3327
3328 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3329 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3330 /// returned in chunks where each chunk has a single syntax highlighting style and
3331 /// diagnostic status.
3332 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3333 let range = range.start.to_offset(self)..range.end.to_offset(self);
3334
3335 let mut syntax = None;
3336 if language_aware {
3337 syntax = Some(self.get_highlights(range.clone()));
3338 }
3339 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3340 let diagnostics = language_aware;
3341 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3342 }
3343
3344 pub fn highlighted_text_for_range<T: ToOffset>(
3345 &self,
3346 range: Range<T>,
3347 override_style: Option<HighlightStyle>,
3348 syntax_theme: &SyntaxTheme,
3349 ) -> HighlightedText {
3350 HighlightedText::from_buffer_range(
3351 range,
3352 &self.text,
3353 &self.syntax,
3354 override_style,
3355 syntax_theme,
3356 )
3357 }
3358
3359 /// Invokes the given callback for each line of text in the given range of the buffer.
3360 /// Uses callback to avoid allocating a string for each line.
3361 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3362 let mut line = String::new();
3363 let mut row = range.start.row;
3364 for chunk in self
3365 .as_rope()
3366 .chunks_in_range(range.to_offset(self))
3367 .chain(["\n"])
3368 {
3369 for (newline_ix, text) in chunk.split('\n').enumerate() {
3370 if newline_ix > 0 {
3371 callback(row, &line);
3372 row += 1;
3373 line.clear();
3374 }
3375 line.push_str(text);
3376 }
3377 }
3378 }
3379
3380 /// Iterates over every [`SyntaxLayer`] in the buffer.
3381 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3382 self.syntax_layers_for_range(0..self.len(), true)
3383 }
3384
3385 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3386 let offset = position.to_offset(self);
3387 self.syntax_layers_for_range(offset..offset, false)
3388 .filter(|l| {
3389 if let Some(ranges) = l.included_sub_ranges {
3390 ranges.iter().any(|range| {
3391 let start = range.start.to_offset(self);
3392 start <= offset && {
3393 let end = range.end.to_offset(self);
3394 offset < end
3395 }
3396 })
3397 } else {
3398 l.node().start_byte() <= offset && l.node().end_byte() > offset
3399 }
3400 })
3401 .last()
3402 }
3403
3404 pub fn syntax_layers_for_range<D: ToOffset>(
3405 &self,
3406 range: Range<D>,
3407 include_hidden: bool,
3408 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3409 self.syntax
3410 .layers_for_range(range, &self.text, include_hidden)
3411 }
3412
3413 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3414 &self,
3415 range: Range<D>,
3416 ) -> Option<SyntaxLayer<'_>> {
3417 let range = range.to_offset(self);
3418 self.syntax
3419 .layers_for_range(range, &self.text, false)
3420 .max_by(|a, b| {
3421 if a.depth != b.depth {
3422 a.depth.cmp(&b.depth)
3423 } else if a.offset.0 != b.offset.0 {
3424 a.offset.0.cmp(&b.offset.0)
3425 } else {
3426 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3427 }
3428 })
3429 }
3430
3431 /// Returns the main [`Language`].
3432 pub fn language(&self) -> Option<&Arc<Language>> {
3433 self.language.as_ref()
3434 }
3435
3436 /// Returns the [`Language`] at the given location.
3437 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3438 self.syntax_layer_at(position)
3439 .map(|info| info.language)
3440 .or(self.language.as_ref())
3441 }
3442
3443 /// Returns the settings for the language at the given location.
3444 pub fn settings_at<'a, D: ToOffset>(
3445 &'a self,
3446 position: D,
3447 cx: &'a App,
3448 ) -> Cow<'a, LanguageSettings> {
3449 language_settings(
3450 self.language_at(position).map(|l| l.name()),
3451 self.file.as_ref(),
3452 cx,
3453 )
3454 }
3455
3456 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3457 CharClassifier::new(self.language_scope_at(point))
3458 }
3459
3460 /// Returns the [`LanguageScope`] at the given location.
3461 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3462 let offset = position.to_offset(self);
3463 let mut scope = None;
3464 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3465
3466 // Use the layer that has the smallest node intersecting the given point.
3467 for layer in self
3468 .syntax
3469 .layers_for_range(offset..offset, &self.text, false)
3470 {
3471 let mut cursor = layer.node().walk();
3472
3473 let mut range = None;
3474 loop {
3475 let child_range = cursor.node().byte_range();
3476 if !child_range.contains(&offset) {
3477 break;
3478 }
3479
3480 range = Some(child_range);
3481 if cursor.goto_first_child_for_byte(offset).is_none() {
3482 break;
3483 }
3484 }
3485
3486 if let Some(range) = range
3487 && smallest_range_and_depth.as_ref().is_none_or(
3488 |(smallest_range, smallest_range_depth)| {
3489 if layer.depth > *smallest_range_depth {
3490 true
3491 } else if layer.depth == *smallest_range_depth {
3492 range.len() < smallest_range.len()
3493 } else {
3494 false
3495 }
3496 },
3497 )
3498 {
3499 smallest_range_and_depth = Some((range, layer.depth));
3500 scope = Some(LanguageScope {
3501 language: layer.language.clone(),
3502 override_id: layer.override_id(offset, &self.text),
3503 });
3504 }
3505 }
3506
3507 scope.or_else(|| {
3508 self.language.clone().map(|language| LanguageScope {
3509 language,
3510 override_id: None,
3511 })
3512 })
3513 }
3514
3515 /// Returns a tuple of the range and character kind of the word
3516 /// surrounding the given position.
3517 pub fn surrounding_word<T: ToOffset>(
3518 &self,
3519 start: T,
3520 scope_context: Option<CharScopeContext>,
3521 ) -> (Range<usize>, Option<CharKind>) {
3522 let mut start = start.to_offset(self);
3523 let mut end = start;
3524 let mut next_chars = self.chars_at(start).take(128).peekable();
3525 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3526
3527 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3528 let word_kind = cmp::max(
3529 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3530 next_chars.peek().copied().map(|c| classifier.kind(c)),
3531 );
3532
3533 for ch in prev_chars {
3534 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3535 start -= ch.len_utf8();
3536 } else {
3537 break;
3538 }
3539 }
3540
3541 for ch in next_chars {
3542 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3543 end += ch.len_utf8();
3544 } else {
3545 break;
3546 }
3547 }
3548
3549 (start..end, word_kind)
3550 }
3551
3552 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3553 /// range. When `require_larger` is true, the node found must be larger than the query range.
3554 ///
3555 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3556 /// be moved to the root of the tree.
3557 fn goto_node_enclosing_range(
3558 cursor: &mut tree_sitter::TreeCursor,
3559 query_range: &Range<usize>,
3560 require_larger: bool,
3561 ) -> bool {
3562 let mut ascending = false;
3563 loop {
3564 let mut range = cursor.node().byte_range();
3565 if query_range.is_empty() {
3566 // When the query range is empty and the current node starts after it, move to the
3567 // previous sibling to find the node the containing node.
3568 if range.start > query_range.start {
3569 cursor.goto_previous_sibling();
3570 range = cursor.node().byte_range();
3571 }
3572 } else {
3573 // When the query range is non-empty and the current node ends exactly at the start,
3574 // move to the next sibling to find a node that extends beyond the start.
3575 if range.end == query_range.start {
3576 cursor.goto_next_sibling();
3577 range = cursor.node().byte_range();
3578 }
3579 }
3580
3581 let encloses = range.contains_inclusive(query_range)
3582 && (!require_larger || range.len() > query_range.len());
3583 if !encloses {
3584 ascending = true;
3585 if !cursor.goto_parent() {
3586 return false;
3587 }
3588 continue;
3589 } else if ascending {
3590 return true;
3591 }
3592
3593 // Descend into the current node.
3594 if cursor
3595 .goto_first_child_for_byte(query_range.start)
3596 .is_none()
3597 {
3598 return true;
3599 }
3600 }
3601 }
3602
3603 pub fn syntax_ancestor<'a, T: ToOffset>(
3604 &'a self,
3605 range: Range<T>,
3606 ) -> Option<tree_sitter::Node<'a>> {
3607 let range = range.start.to_offset(self)..range.end.to_offset(self);
3608 let mut result: Option<tree_sitter::Node<'a>> = None;
3609 for layer in self
3610 .syntax
3611 .layers_for_range(range.clone(), &self.text, true)
3612 {
3613 let mut cursor = layer.node().walk();
3614
3615 // Find the node that both contains the range and is larger than it.
3616 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3617 continue;
3618 }
3619
3620 let left_node = cursor.node();
3621 let mut layer_result = left_node;
3622
3623 // For an empty range, try to find another node immediately to the right of the range.
3624 if left_node.end_byte() == range.start {
3625 let mut right_node = None;
3626 while !cursor.goto_next_sibling() {
3627 if !cursor.goto_parent() {
3628 break;
3629 }
3630 }
3631
3632 while cursor.node().start_byte() == range.start {
3633 right_node = Some(cursor.node());
3634 if !cursor.goto_first_child() {
3635 break;
3636 }
3637 }
3638
3639 // If there is a candidate node on both sides of the (empty) range, then
3640 // decide between the two by favoring a named node over an anonymous token.
3641 // If both nodes are the same in that regard, favor the right one.
3642 if let Some(right_node) = right_node
3643 && (right_node.is_named() || !left_node.is_named())
3644 {
3645 layer_result = right_node;
3646 }
3647 }
3648
3649 if let Some(previous_result) = &result
3650 && previous_result.byte_range().len() < layer_result.byte_range().len()
3651 {
3652 continue;
3653 }
3654 result = Some(layer_result);
3655 }
3656
3657 result
3658 }
3659
3660 /// Find the previous sibling syntax node at the given range.
3661 ///
3662 /// This function locates the syntax node that precedes the node containing
3663 /// the given range. It searches hierarchically by:
3664 /// 1. Finding the node that contains the given range
3665 /// 2. Looking for the previous sibling at the same tree level
3666 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3667 ///
3668 /// Returns `None` if there is no previous sibling at any ancestor level.
3669 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3670 &'a self,
3671 range: Range<T>,
3672 ) -> Option<tree_sitter::Node<'a>> {
3673 let range = range.start.to_offset(self)..range.end.to_offset(self);
3674 let mut result: Option<tree_sitter::Node<'a>> = None;
3675
3676 for layer in self
3677 .syntax
3678 .layers_for_range(range.clone(), &self.text, true)
3679 {
3680 let mut cursor = layer.node().walk();
3681
3682 // Find the node that contains the range
3683 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3684 continue;
3685 }
3686
3687 // Look for the previous sibling, moving up ancestor levels if needed
3688 loop {
3689 if cursor.goto_previous_sibling() {
3690 let layer_result = cursor.node();
3691
3692 if let Some(previous_result) = &result {
3693 if previous_result.byte_range().end < layer_result.byte_range().end {
3694 continue;
3695 }
3696 }
3697 result = Some(layer_result);
3698 break;
3699 }
3700
3701 // No sibling found at this level, try moving up to parent
3702 if !cursor.goto_parent() {
3703 break;
3704 }
3705 }
3706 }
3707
3708 result
3709 }
3710
3711 /// Find the next sibling syntax node at the given range.
3712 ///
3713 /// This function locates the syntax node that follows the node containing
3714 /// the given range. It searches hierarchically by:
3715 /// 1. Finding the node that contains the given range
3716 /// 2. Looking for the next sibling at the same tree level
3717 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3718 ///
3719 /// Returns `None` if there is no next sibling at any ancestor level.
3720 pub fn syntax_next_sibling<'a, T: ToOffset>(
3721 &'a self,
3722 range: Range<T>,
3723 ) -> Option<tree_sitter::Node<'a>> {
3724 let range = range.start.to_offset(self)..range.end.to_offset(self);
3725 let mut result: Option<tree_sitter::Node<'a>> = None;
3726
3727 for layer in self
3728 .syntax
3729 .layers_for_range(range.clone(), &self.text, true)
3730 {
3731 let mut cursor = layer.node().walk();
3732
3733 // Find the node that contains the range
3734 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3735 continue;
3736 }
3737
3738 // Look for the next sibling, moving up ancestor levels if needed
3739 loop {
3740 if cursor.goto_next_sibling() {
3741 let layer_result = cursor.node();
3742
3743 if let Some(previous_result) = &result {
3744 if previous_result.byte_range().start > layer_result.byte_range().start {
3745 continue;
3746 }
3747 }
3748 result = Some(layer_result);
3749 break;
3750 }
3751
3752 // No sibling found at this level, try moving up to parent
3753 if !cursor.goto_parent() {
3754 break;
3755 }
3756 }
3757 }
3758
3759 result
3760 }
3761
3762 /// Returns the root syntax node within the given row
3763 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3764 let start_offset = position.to_offset(self);
3765
3766 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3767
3768 let layer = self
3769 .syntax
3770 .layers_for_range(start_offset..start_offset, &self.text, true)
3771 .next()?;
3772
3773 let mut cursor = layer.node().walk();
3774
3775 // Descend to the first leaf that touches the start of the range.
3776 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3777 if cursor.node().end_byte() == start_offset {
3778 cursor.goto_next_sibling();
3779 }
3780 }
3781
3782 // Ascend to the root node within the same row.
3783 while cursor.goto_parent() {
3784 if cursor.node().start_position().row != row {
3785 break;
3786 }
3787 }
3788
3789 Some(cursor.node())
3790 }
3791
3792 /// Returns the outline for the buffer.
3793 ///
3794 /// This method allows passing an optional [`SyntaxTheme`] to
3795 /// syntax-highlight the returned symbols.
3796 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3797 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3798 }
3799
3800 /// Returns all the symbols that contain the given position.
3801 ///
3802 /// This method allows passing an optional [`SyntaxTheme`] to
3803 /// syntax-highlight the returned symbols.
3804 pub fn symbols_containing<T: ToOffset>(
3805 &self,
3806 position: T,
3807 theme: Option<&SyntaxTheme>,
3808 ) -> Vec<OutlineItem<Anchor>> {
3809 let position = position.to_offset(self);
3810 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3811 let end = self.clip_offset(position + 1, Bias::Right);
3812 let mut items = self.outline_items_containing(start..end, false, theme);
3813 let mut prev_depth = None;
3814 items.retain(|item| {
3815 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3816 prev_depth = Some(item.depth);
3817 result
3818 });
3819 items
3820 }
3821
3822 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3823 let range = range.to_offset(self);
3824 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3825 grammar.outline_config.as_ref().map(|c| &c.query)
3826 });
3827 let configs = matches
3828 .grammars()
3829 .iter()
3830 .map(|g| g.outline_config.as_ref().unwrap())
3831 .collect::<Vec<_>>();
3832
3833 while let Some(mat) = matches.peek() {
3834 let config = &configs[mat.grammar_index];
3835 let containing_item_node = maybe!({
3836 let item_node = mat.captures.iter().find_map(|cap| {
3837 if cap.index == config.item_capture_ix {
3838 Some(cap.node)
3839 } else {
3840 None
3841 }
3842 })?;
3843
3844 let item_byte_range = item_node.byte_range();
3845 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3846 None
3847 } else {
3848 Some(item_node)
3849 }
3850 });
3851
3852 if let Some(item_node) = containing_item_node {
3853 return Some(
3854 Point::from_ts_point(item_node.start_position())
3855 ..Point::from_ts_point(item_node.end_position()),
3856 );
3857 }
3858
3859 matches.advance();
3860 }
3861 None
3862 }
3863
3864 pub fn outline_items_containing<T: ToOffset>(
3865 &self,
3866 range: Range<T>,
3867 include_extra_context: bool,
3868 theme: Option<&SyntaxTheme>,
3869 ) -> Vec<OutlineItem<Anchor>> {
3870 self.outline_items_containing_internal(
3871 range,
3872 include_extra_context,
3873 theme,
3874 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3875 )
3876 }
3877
3878 pub fn outline_items_as_points_containing<T: ToOffset>(
3879 &self,
3880 range: Range<T>,
3881 include_extra_context: bool,
3882 theme: Option<&SyntaxTheme>,
3883 ) -> Vec<OutlineItem<Point>> {
3884 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3885 range
3886 })
3887 }
3888
3889 fn outline_items_containing_internal<T: ToOffset, U>(
3890 &self,
3891 range: Range<T>,
3892 include_extra_context: bool,
3893 theme: Option<&SyntaxTheme>,
3894 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3895 ) -> Vec<OutlineItem<U>> {
3896 let range = range.to_offset(self);
3897 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3898 grammar.outline_config.as_ref().map(|c| &c.query)
3899 });
3900
3901 let mut items = Vec::new();
3902 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3903 while let Some(mat) = matches.peek() {
3904 let config = matches.grammars()[mat.grammar_index]
3905 .outline_config
3906 .as_ref()
3907 .unwrap();
3908 if let Some(item) =
3909 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3910 {
3911 items.push(item);
3912 } else if let Some(capture) = mat
3913 .captures
3914 .iter()
3915 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3916 {
3917 let capture_range = capture.node.start_position()..capture.node.end_position();
3918 let mut capture_row_range =
3919 capture_range.start.row as u32..capture_range.end.row as u32;
3920 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3921 {
3922 capture_row_range.end -= 1;
3923 }
3924 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3925 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3926 last_row_range.end = capture_row_range.end;
3927 } else {
3928 annotation_row_ranges.push(capture_row_range);
3929 }
3930 } else {
3931 annotation_row_ranges.push(capture_row_range);
3932 }
3933 }
3934 matches.advance();
3935 }
3936
3937 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3938
3939 // Assign depths based on containment relationships and convert to anchors.
3940 let mut item_ends_stack = Vec::<Point>::new();
3941 let mut anchor_items = Vec::new();
3942 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3943 for item in items {
3944 while let Some(last_end) = item_ends_stack.last().copied() {
3945 if last_end < item.range.end {
3946 item_ends_stack.pop();
3947 } else {
3948 break;
3949 }
3950 }
3951
3952 let mut annotation_row_range = None;
3953 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3954 let row_preceding_item = item.range.start.row.saturating_sub(1);
3955 if next_annotation_row_range.end < row_preceding_item {
3956 annotation_row_ranges.next();
3957 } else {
3958 if next_annotation_row_range.end == row_preceding_item {
3959 annotation_row_range = Some(next_annotation_row_range.clone());
3960 annotation_row_ranges.next();
3961 }
3962 break;
3963 }
3964 }
3965
3966 anchor_items.push(OutlineItem {
3967 depth: item_ends_stack.len(),
3968 range: range_callback(self, item.range.clone()),
3969 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3970 text: item.text,
3971 highlight_ranges: item.highlight_ranges,
3972 name_ranges: item.name_ranges,
3973 body_range: item.body_range.map(|r| range_callback(self, r)),
3974 annotation_range: annotation_row_range.map(|annotation_range| {
3975 let point_range = Point::new(annotation_range.start, 0)
3976 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3977 range_callback(self, point_range)
3978 }),
3979 });
3980 item_ends_stack.push(item.range.end);
3981 }
3982
3983 anchor_items
3984 }
3985
3986 fn next_outline_item(
3987 &self,
3988 config: &OutlineConfig,
3989 mat: &SyntaxMapMatch,
3990 range: &Range<usize>,
3991 include_extra_context: bool,
3992 theme: Option<&SyntaxTheme>,
3993 ) -> Option<OutlineItem<Point>> {
3994 let item_node = mat.captures.iter().find_map(|cap| {
3995 if cap.index == config.item_capture_ix {
3996 Some(cap.node)
3997 } else {
3998 None
3999 }
4000 })?;
4001
4002 let item_byte_range = item_node.byte_range();
4003 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4004 return None;
4005 }
4006 let item_point_range = Point::from_ts_point(item_node.start_position())
4007 ..Point::from_ts_point(item_node.end_position());
4008
4009 let mut open_point = None;
4010 let mut close_point = None;
4011
4012 let mut buffer_ranges = Vec::new();
4013 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4014 let mut range = node.start_byte()..node.end_byte();
4015 let start = node.start_position();
4016 if node.end_position().row > start.row {
4017 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4018 }
4019
4020 if !range.is_empty() {
4021 buffer_ranges.push((range, node_is_name));
4022 }
4023 };
4024
4025 for capture in mat.captures {
4026 if capture.index == config.name_capture_ix {
4027 add_to_buffer_ranges(capture.node, true);
4028 } else if Some(capture.index) == config.context_capture_ix
4029 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4030 {
4031 add_to_buffer_ranges(capture.node, false);
4032 } else {
4033 if Some(capture.index) == config.open_capture_ix {
4034 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4035 } else if Some(capture.index) == config.close_capture_ix {
4036 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4037 }
4038 }
4039 }
4040
4041 if buffer_ranges.is_empty() {
4042 return None;
4043 }
4044 let source_range_for_text =
4045 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4046
4047 let mut text = String::new();
4048 let mut highlight_ranges = Vec::new();
4049 let mut name_ranges = Vec::new();
4050 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4051 let mut last_buffer_range_end = 0;
4052 for (buffer_range, is_name) in buffer_ranges {
4053 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4054 if space_added {
4055 text.push(' ');
4056 }
4057 let before_append_len = text.len();
4058 let mut offset = buffer_range.start;
4059 chunks.seek(buffer_range.clone());
4060 for mut chunk in chunks.by_ref() {
4061 if chunk.text.len() > buffer_range.end - offset {
4062 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4063 offset = buffer_range.end;
4064 } else {
4065 offset += chunk.text.len();
4066 }
4067 let style = chunk
4068 .syntax_highlight_id
4069 .zip(theme)
4070 .and_then(|(highlight, theme)| highlight.style(theme));
4071 if let Some(style) = style {
4072 let start = text.len();
4073 let end = start + chunk.text.len();
4074 highlight_ranges.push((start..end, style));
4075 }
4076 text.push_str(chunk.text);
4077 if offset >= buffer_range.end {
4078 break;
4079 }
4080 }
4081 if is_name {
4082 let after_append_len = text.len();
4083 let start = if space_added && !name_ranges.is_empty() {
4084 before_append_len - 1
4085 } else {
4086 before_append_len
4087 };
4088 name_ranges.push(start..after_append_len);
4089 }
4090 last_buffer_range_end = buffer_range.end;
4091 }
4092
4093 Some(OutlineItem {
4094 depth: 0, // We'll calculate the depth later
4095 range: item_point_range,
4096 source_range_for_text: source_range_for_text.to_point(self),
4097 text,
4098 highlight_ranges,
4099 name_ranges,
4100 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4101 annotation_range: None,
4102 })
4103 }
4104
4105 pub fn function_body_fold_ranges<T: ToOffset>(
4106 &self,
4107 within: Range<T>,
4108 ) -> impl Iterator<Item = Range<usize>> + '_ {
4109 self.text_object_ranges(within, TreeSitterOptions::default())
4110 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4111 }
4112
4113 /// For each grammar in the language, runs the provided
4114 /// [`tree_sitter::Query`] against the given range.
4115 pub fn matches(
4116 &self,
4117 range: Range<usize>,
4118 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4119 ) -> SyntaxMapMatches<'_> {
4120 self.syntax.matches(range, self, query)
4121 }
4122
4123 pub fn all_bracket_ranges(
4124 &self,
4125 range: Range<usize>,
4126 ) -> impl Iterator<Item = BracketMatch> + '_ {
4127 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4128 grammar.brackets_config.as_ref().map(|c| &c.query)
4129 });
4130 let configs = matches
4131 .grammars()
4132 .iter()
4133 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4134 .collect::<Vec<_>>();
4135
4136 iter::from_fn(move || {
4137 while let Some(mat) = matches.peek() {
4138 let mut open = None;
4139 let mut close = None;
4140 let depth = mat.depth;
4141 let config = &configs[mat.grammar_index];
4142 let pattern = &config.patterns[mat.pattern_index];
4143 for capture in mat.captures {
4144 if capture.index == config.open_capture_ix {
4145 open = Some(capture.node.byte_range());
4146 } else if capture.index == config.close_capture_ix {
4147 close = Some(capture.node.byte_range());
4148 }
4149 }
4150
4151 matches.advance();
4152
4153 let Some((open_range, close_range)) = open.zip(close) else {
4154 continue;
4155 };
4156
4157 let bracket_range = open_range.start..=close_range.end;
4158 if !bracket_range.overlaps(&range) {
4159 continue;
4160 }
4161
4162 return Some(BracketMatch {
4163 open_range,
4164 close_range,
4165 newline_only: pattern.newline_only,
4166 depth,
4167 });
4168 }
4169 None
4170 })
4171 }
4172
4173 /// Returns bracket range pairs overlapping or adjacent to `range`
4174 pub fn bracket_ranges<T: ToOffset>(
4175 &self,
4176 range: Range<T>,
4177 ) -> impl Iterator<Item = BracketMatch> + '_ {
4178 // Find bracket pairs that *inclusively* contain the given range.
4179 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4180 self.all_bracket_ranges(range)
4181 .filter(|pair| !pair.newline_only)
4182 }
4183
4184 pub fn debug_variables_query<T: ToOffset>(
4185 &self,
4186 range: Range<T>,
4187 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4188 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4189
4190 let mut matches = self.syntax.matches_with_options(
4191 range.clone(),
4192 &self.text,
4193 TreeSitterOptions::default(),
4194 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4195 );
4196
4197 let configs = matches
4198 .grammars()
4199 .iter()
4200 .map(|grammar| grammar.debug_variables_config.as_ref())
4201 .collect::<Vec<_>>();
4202
4203 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4204
4205 iter::from_fn(move || {
4206 loop {
4207 while let Some(capture) = captures.pop() {
4208 if capture.0.overlaps(&range) {
4209 return Some(capture);
4210 }
4211 }
4212
4213 let mat = matches.peek()?;
4214
4215 let Some(config) = configs[mat.grammar_index].as_ref() else {
4216 matches.advance();
4217 continue;
4218 };
4219
4220 for capture in mat.captures {
4221 let Some(ix) = config
4222 .objects_by_capture_ix
4223 .binary_search_by_key(&capture.index, |e| e.0)
4224 .ok()
4225 else {
4226 continue;
4227 };
4228 let text_object = config.objects_by_capture_ix[ix].1;
4229 let byte_range = capture.node.byte_range();
4230
4231 let mut found = false;
4232 for (range, existing) in captures.iter_mut() {
4233 if existing == &text_object {
4234 range.start = range.start.min(byte_range.start);
4235 range.end = range.end.max(byte_range.end);
4236 found = true;
4237 break;
4238 }
4239 }
4240
4241 if !found {
4242 captures.push((byte_range, text_object));
4243 }
4244 }
4245
4246 matches.advance();
4247 }
4248 })
4249 }
4250
4251 pub fn text_object_ranges<T: ToOffset>(
4252 &self,
4253 range: Range<T>,
4254 options: TreeSitterOptions,
4255 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4256 let range =
4257 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4258
4259 let mut matches =
4260 self.syntax
4261 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4262 grammar.text_object_config.as_ref().map(|c| &c.query)
4263 });
4264
4265 let configs = matches
4266 .grammars()
4267 .iter()
4268 .map(|grammar| grammar.text_object_config.as_ref())
4269 .collect::<Vec<_>>();
4270
4271 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4272
4273 iter::from_fn(move || {
4274 loop {
4275 while let Some(capture) = captures.pop() {
4276 if capture.0.overlaps(&range) {
4277 return Some(capture);
4278 }
4279 }
4280
4281 let mat = matches.peek()?;
4282
4283 let Some(config) = configs[mat.grammar_index].as_ref() else {
4284 matches.advance();
4285 continue;
4286 };
4287
4288 for capture in mat.captures {
4289 let Some(ix) = config
4290 .text_objects_by_capture_ix
4291 .binary_search_by_key(&capture.index, |e| e.0)
4292 .ok()
4293 else {
4294 continue;
4295 };
4296 let text_object = config.text_objects_by_capture_ix[ix].1;
4297 let byte_range = capture.node.byte_range();
4298
4299 let mut found = false;
4300 for (range, existing) in captures.iter_mut() {
4301 if existing == &text_object {
4302 range.start = range.start.min(byte_range.start);
4303 range.end = range.end.max(byte_range.end);
4304 found = true;
4305 break;
4306 }
4307 }
4308
4309 if !found {
4310 captures.push((byte_range, text_object));
4311 }
4312 }
4313
4314 matches.advance();
4315 }
4316 })
4317 }
4318
4319 /// Returns enclosing bracket ranges containing the given range
4320 pub fn enclosing_bracket_ranges<T: ToOffset>(
4321 &self,
4322 range: Range<T>,
4323 ) -> impl Iterator<Item = BracketMatch> + '_ {
4324 let range = range.start.to_offset(self)..range.end.to_offset(self);
4325
4326 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4327 let max_depth = result.iter().map(|mat| mat.depth).max().unwrap_or(0);
4328 result.into_iter().filter(move |pair| {
4329 pair.open_range.start <= range.start
4330 && pair.close_range.end >= range.end
4331 && pair.depth == max_depth
4332 })
4333 }
4334
4335 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4336 ///
4337 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4338 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4339 &self,
4340 range: Range<T>,
4341 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4342 ) -> Option<(Range<usize>, Range<usize>)> {
4343 let range = range.start.to_offset(self)..range.end.to_offset(self);
4344
4345 // Get the ranges of the innermost pair of brackets.
4346 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4347
4348 for pair in self.enclosing_bracket_ranges(range) {
4349 if let Some(range_filter) = range_filter
4350 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4351 {
4352 continue;
4353 }
4354
4355 let len = pair.close_range.end - pair.open_range.start;
4356
4357 if let Some((existing_open, existing_close)) = &result {
4358 let existing_len = existing_close.end - existing_open.start;
4359 if len > existing_len {
4360 continue;
4361 }
4362 }
4363
4364 result = Some((pair.open_range, pair.close_range));
4365 }
4366
4367 result
4368 }
4369
4370 /// Returns anchor ranges for any matches of the redaction query.
4371 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4372 /// will be run on the relevant section of the buffer.
4373 pub fn redacted_ranges<T: ToOffset>(
4374 &self,
4375 range: Range<T>,
4376 ) -> impl Iterator<Item = Range<usize>> + '_ {
4377 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4378 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4379 grammar
4380 .redactions_config
4381 .as_ref()
4382 .map(|config| &config.query)
4383 });
4384
4385 let configs = syntax_matches
4386 .grammars()
4387 .iter()
4388 .map(|grammar| grammar.redactions_config.as_ref())
4389 .collect::<Vec<_>>();
4390
4391 iter::from_fn(move || {
4392 let redacted_range = syntax_matches
4393 .peek()
4394 .and_then(|mat| {
4395 configs[mat.grammar_index].and_then(|config| {
4396 mat.captures
4397 .iter()
4398 .find(|capture| capture.index == config.redaction_capture_ix)
4399 })
4400 })
4401 .map(|mat| mat.node.byte_range());
4402 syntax_matches.advance();
4403 redacted_range
4404 })
4405 }
4406
4407 pub fn injections_intersecting_range<T: ToOffset>(
4408 &self,
4409 range: Range<T>,
4410 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4411 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4412
4413 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4414 grammar
4415 .injection_config
4416 .as_ref()
4417 .map(|config| &config.query)
4418 });
4419
4420 let configs = syntax_matches
4421 .grammars()
4422 .iter()
4423 .map(|grammar| grammar.injection_config.as_ref())
4424 .collect::<Vec<_>>();
4425
4426 iter::from_fn(move || {
4427 let ranges = syntax_matches.peek().and_then(|mat| {
4428 let config = &configs[mat.grammar_index]?;
4429 let content_capture_range = mat.captures.iter().find_map(|capture| {
4430 if capture.index == config.content_capture_ix {
4431 Some(capture.node.byte_range())
4432 } else {
4433 None
4434 }
4435 })?;
4436 let language = self.language_at(content_capture_range.start)?;
4437 Some((content_capture_range, language))
4438 });
4439 syntax_matches.advance();
4440 ranges
4441 })
4442 }
4443
4444 pub fn runnable_ranges(
4445 &self,
4446 offset_range: Range<usize>,
4447 ) -> impl Iterator<Item = RunnableRange> + '_ {
4448 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4449 grammar.runnable_config.as_ref().map(|config| &config.query)
4450 });
4451
4452 let test_configs = syntax_matches
4453 .grammars()
4454 .iter()
4455 .map(|grammar| grammar.runnable_config.as_ref())
4456 .collect::<Vec<_>>();
4457
4458 iter::from_fn(move || {
4459 loop {
4460 let mat = syntax_matches.peek()?;
4461
4462 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4463 let mut run_range = None;
4464 let full_range = mat.captures.iter().fold(
4465 Range {
4466 start: usize::MAX,
4467 end: 0,
4468 },
4469 |mut acc, next| {
4470 let byte_range = next.node.byte_range();
4471 if acc.start > byte_range.start {
4472 acc.start = byte_range.start;
4473 }
4474 if acc.end < byte_range.end {
4475 acc.end = byte_range.end;
4476 }
4477 acc
4478 },
4479 );
4480 if full_range.start > full_range.end {
4481 // We did not find a full spanning range of this match.
4482 return None;
4483 }
4484 let extra_captures: SmallVec<[_; 1]> =
4485 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4486 test_configs
4487 .extra_captures
4488 .get(capture.index as usize)
4489 .cloned()
4490 .and_then(|tag_name| match tag_name {
4491 RunnableCapture::Named(name) => {
4492 Some((capture.node.byte_range(), name))
4493 }
4494 RunnableCapture::Run => {
4495 let _ = run_range.insert(capture.node.byte_range());
4496 None
4497 }
4498 })
4499 }));
4500 let run_range = run_range?;
4501 let tags = test_configs
4502 .query
4503 .property_settings(mat.pattern_index)
4504 .iter()
4505 .filter_map(|property| {
4506 if *property.key == *"tag" {
4507 property
4508 .value
4509 .as_ref()
4510 .map(|value| RunnableTag(value.to_string().into()))
4511 } else {
4512 None
4513 }
4514 })
4515 .collect();
4516 let extra_captures = extra_captures
4517 .into_iter()
4518 .map(|(range, name)| {
4519 (
4520 name.to_string(),
4521 self.text_for_range(range).collect::<String>(),
4522 )
4523 })
4524 .collect();
4525 // All tags should have the same range.
4526 Some(RunnableRange {
4527 run_range,
4528 full_range,
4529 runnable: Runnable {
4530 tags,
4531 language: mat.language,
4532 buffer: self.remote_id(),
4533 },
4534 extra_captures,
4535 buffer_id: self.remote_id(),
4536 })
4537 });
4538
4539 syntax_matches.advance();
4540 if test_range.is_some() {
4541 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4542 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4543 return test_range;
4544 }
4545 }
4546 })
4547 }
4548
4549 /// Returns selections for remote peers intersecting the given range.
4550 #[allow(clippy::type_complexity)]
4551 pub fn selections_in_range(
4552 &self,
4553 range: Range<Anchor>,
4554 include_local: bool,
4555 ) -> impl Iterator<
4556 Item = (
4557 ReplicaId,
4558 bool,
4559 CursorShape,
4560 impl Iterator<Item = &Selection<Anchor>> + '_,
4561 ),
4562 > + '_ {
4563 self.remote_selections
4564 .iter()
4565 .filter(move |(replica_id, set)| {
4566 (include_local || **replica_id != self.text.replica_id())
4567 && !set.selections.is_empty()
4568 })
4569 .map(move |(replica_id, set)| {
4570 let start_ix = match set.selections.binary_search_by(|probe| {
4571 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4572 }) {
4573 Ok(ix) | Err(ix) => ix,
4574 };
4575 let end_ix = match set.selections.binary_search_by(|probe| {
4576 probe.start.cmp(&range.end, self).then(Ordering::Less)
4577 }) {
4578 Ok(ix) | Err(ix) => ix,
4579 };
4580
4581 (
4582 *replica_id,
4583 set.line_mode,
4584 set.cursor_shape,
4585 set.selections[start_ix..end_ix].iter(),
4586 )
4587 })
4588 }
4589
4590 /// Returns if the buffer contains any diagnostics.
4591 pub fn has_diagnostics(&self) -> bool {
4592 !self.diagnostics.is_empty()
4593 }
4594
4595 /// Returns all the diagnostics intersecting the given range.
4596 pub fn diagnostics_in_range<'a, T, O>(
4597 &'a self,
4598 search_range: Range<T>,
4599 reversed: bool,
4600 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4601 where
4602 T: 'a + Clone + ToOffset,
4603 O: 'a + FromAnchor,
4604 {
4605 let mut iterators: Vec<_> = self
4606 .diagnostics
4607 .iter()
4608 .map(|(_, collection)| {
4609 collection
4610 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4611 .peekable()
4612 })
4613 .collect();
4614
4615 std::iter::from_fn(move || {
4616 let (next_ix, _) = iterators
4617 .iter_mut()
4618 .enumerate()
4619 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4620 .min_by(|(_, a), (_, b)| {
4621 let cmp = a
4622 .range
4623 .start
4624 .cmp(&b.range.start, self)
4625 // when range is equal, sort by diagnostic severity
4626 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4627 // and stabilize order with group_id
4628 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4629 if reversed { cmp.reverse() } else { cmp }
4630 })?;
4631 iterators[next_ix]
4632 .next()
4633 .map(
4634 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4635 diagnostic,
4636 range: FromAnchor::from_anchor(&range.start, self)
4637 ..FromAnchor::from_anchor(&range.end, self),
4638 },
4639 )
4640 })
4641 }
4642
4643 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4644 /// should be used instead.
4645 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4646 &self.diagnostics
4647 }
4648
4649 /// Returns all the diagnostic groups associated with the given
4650 /// language server ID. If no language server ID is provided,
4651 /// all diagnostics groups are returned.
4652 pub fn diagnostic_groups(
4653 &self,
4654 language_server_id: Option<LanguageServerId>,
4655 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4656 let mut groups = Vec::new();
4657
4658 if let Some(language_server_id) = language_server_id {
4659 if let Ok(ix) = self
4660 .diagnostics
4661 .binary_search_by_key(&language_server_id, |e| e.0)
4662 {
4663 self.diagnostics[ix]
4664 .1
4665 .groups(language_server_id, &mut groups, self);
4666 }
4667 } else {
4668 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4669 diagnostics.groups(*language_server_id, &mut groups, self);
4670 }
4671 }
4672
4673 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4674 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4675 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4676 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4677 });
4678
4679 groups
4680 }
4681
4682 /// Returns an iterator over the diagnostics for the given group.
4683 pub fn diagnostic_group<O>(
4684 &self,
4685 group_id: usize,
4686 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4687 where
4688 O: FromAnchor + 'static,
4689 {
4690 self.diagnostics
4691 .iter()
4692 .flat_map(move |(_, set)| set.group(group_id, self))
4693 }
4694
4695 /// An integer version number that accounts for all updates besides
4696 /// the buffer's text itself (which is versioned via a version vector).
4697 pub fn non_text_state_update_count(&self) -> usize {
4698 self.non_text_state_update_count
4699 }
4700
4701 /// An integer version that changes when the buffer's syntax changes.
4702 pub fn syntax_update_count(&self) -> usize {
4703 self.syntax.update_count()
4704 }
4705
4706 /// Returns a snapshot of underlying file.
4707 pub fn file(&self) -> Option<&Arc<dyn File>> {
4708 self.file.as_ref()
4709 }
4710
4711 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4712 if let Some(file) = self.file() {
4713 if file.path().file_name().is_none() || include_root {
4714 Some(file.full_path(cx).to_string_lossy().into_owned())
4715 } else {
4716 Some(file.path().display(file.path_style(cx)).to_string())
4717 }
4718 } else {
4719 None
4720 }
4721 }
4722
4723 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4724 let query_str = query.fuzzy_contents;
4725 if query_str.is_some_and(|query| query.is_empty()) {
4726 return BTreeMap::default();
4727 }
4728
4729 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4730 language,
4731 override_id: None,
4732 }));
4733
4734 let mut query_ix = 0;
4735 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4736 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4737
4738 let mut words = BTreeMap::default();
4739 let mut current_word_start_ix = None;
4740 let mut chunk_ix = query.range.start;
4741 for chunk in self.chunks(query.range, false) {
4742 for (i, c) in chunk.text.char_indices() {
4743 let ix = chunk_ix + i;
4744 if classifier.is_word(c) {
4745 if current_word_start_ix.is_none() {
4746 current_word_start_ix = Some(ix);
4747 }
4748
4749 if let Some(query_chars) = &query_chars
4750 && query_ix < query_len
4751 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4752 {
4753 query_ix += 1;
4754 }
4755 continue;
4756 } else if let Some(word_start) = current_word_start_ix.take()
4757 && query_ix == query_len
4758 {
4759 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4760 let mut word_text = self.text_for_range(word_start..ix).peekable();
4761 let first_char = word_text
4762 .peek()
4763 .and_then(|first_chunk| first_chunk.chars().next());
4764 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4765 if !query.skip_digits
4766 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4767 {
4768 words.insert(word_text.collect(), word_range);
4769 }
4770 }
4771 query_ix = 0;
4772 }
4773 chunk_ix += chunk.text.len();
4774 }
4775
4776 words
4777 }
4778}
4779
4780pub struct WordsQuery<'a> {
4781 /// Only returns words with all chars from the fuzzy string in them.
4782 pub fuzzy_contents: Option<&'a str>,
4783 /// Skips words that start with a digit.
4784 pub skip_digits: bool,
4785 /// Buffer offset range, to look for words.
4786 pub range: Range<usize>,
4787}
4788
4789fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4790 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4791}
4792
4793fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4794 let mut result = IndentSize::spaces(0);
4795 for c in text {
4796 let kind = match c {
4797 ' ' => IndentKind::Space,
4798 '\t' => IndentKind::Tab,
4799 _ => break,
4800 };
4801 if result.len == 0 {
4802 result.kind = kind;
4803 }
4804 result.len += 1;
4805 }
4806 result
4807}
4808
4809impl Clone for BufferSnapshot {
4810 fn clone(&self) -> Self {
4811 Self {
4812 text: self.text.clone(),
4813 syntax: self.syntax.clone(),
4814 file: self.file.clone(),
4815 remote_selections: self.remote_selections.clone(),
4816 diagnostics: self.diagnostics.clone(),
4817 language: self.language.clone(),
4818 non_text_state_update_count: self.non_text_state_update_count,
4819 }
4820 }
4821}
4822
4823impl Deref for BufferSnapshot {
4824 type Target = text::BufferSnapshot;
4825
4826 fn deref(&self) -> &Self::Target {
4827 &self.text
4828 }
4829}
4830
4831unsafe impl Send for BufferChunks<'_> {}
4832
4833impl<'a> BufferChunks<'a> {
4834 pub(crate) fn new(
4835 text: &'a Rope,
4836 range: Range<usize>,
4837 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4838 diagnostics: bool,
4839 buffer_snapshot: Option<&'a BufferSnapshot>,
4840 ) -> Self {
4841 let mut highlights = None;
4842 if let Some((captures, highlight_maps)) = syntax {
4843 highlights = Some(BufferChunkHighlights {
4844 captures,
4845 next_capture: None,
4846 stack: Default::default(),
4847 highlight_maps,
4848 })
4849 }
4850
4851 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4852 let chunks = text.chunks_in_range(range.clone());
4853
4854 let mut this = BufferChunks {
4855 range,
4856 buffer_snapshot,
4857 chunks,
4858 diagnostic_endpoints,
4859 error_depth: 0,
4860 warning_depth: 0,
4861 information_depth: 0,
4862 hint_depth: 0,
4863 unnecessary_depth: 0,
4864 underline: true,
4865 highlights,
4866 };
4867 this.initialize_diagnostic_endpoints();
4868 this
4869 }
4870
4871 /// Seeks to the given byte offset in the buffer.
4872 pub fn seek(&mut self, range: Range<usize>) {
4873 let old_range = std::mem::replace(&mut self.range, range.clone());
4874 self.chunks.set_range(self.range.clone());
4875 if let Some(highlights) = self.highlights.as_mut() {
4876 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4877 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4878 highlights
4879 .stack
4880 .retain(|(end_offset, _)| *end_offset > range.start);
4881 if let Some(capture) = &highlights.next_capture
4882 && range.start >= capture.node.start_byte()
4883 {
4884 let next_capture_end = capture.node.end_byte();
4885 if range.start < next_capture_end {
4886 highlights.stack.push((
4887 next_capture_end,
4888 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4889 ));
4890 }
4891 highlights.next_capture.take();
4892 }
4893 } else if let Some(snapshot) = self.buffer_snapshot {
4894 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4895 *highlights = BufferChunkHighlights {
4896 captures,
4897 next_capture: None,
4898 stack: Default::default(),
4899 highlight_maps,
4900 };
4901 } else {
4902 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4903 // Seeking such BufferChunks is not supported.
4904 debug_assert!(
4905 false,
4906 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4907 );
4908 }
4909
4910 highlights.captures.set_byte_range(self.range.clone());
4911 self.initialize_diagnostic_endpoints();
4912 }
4913 }
4914
4915 fn initialize_diagnostic_endpoints(&mut self) {
4916 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4917 && let Some(buffer) = self.buffer_snapshot
4918 {
4919 let mut diagnostic_endpoints = Vec::new();
4920 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4921 diagnostic_endpoints.push(DiagnosticEndpoint {
4922 offset: entry.range.start,
4923 is_start: true,
4924 severity: entry.diagnostic.severity,
4925 is_unnecessary: entry.diagnostic.is_unnecessary,
4926 underline: entry.diagnostic.underline,
4927 });
4928 diagnostic_endpoints.push(DiagnosticEndpoint {
4929 offset: entry.range.end,
4930 is_start: false,
4931 severity: entry.diagnostic.severity,
4932 is_unnecessary: entry.diagnostic.is_unnecessary,
4933 underline: entry.diagnostic.underline,
4934 });
4935 }
4936 diagnostic_endpoints
4937 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4938 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4939 self.hint_depth = 0;
4940 self.error_depth = 0;
4941 self.warning_depth = 0;
4942 self.information_depth = 0;
4943 }
4944 }
4945
4946 /// The current byte offset in the buffer.
4947 pub fn offset(&self) -> usize {
4948 self.range.start
4949 }
4950
4951 pub fn range(&self) -> Range<usize> {
4952 self.range.clone()
4953 }
4954
4955 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4956 let depth = match endpoint.severity {
4957 DiagnosticSeverity::ERROR => &mut self.error_depth,
4958 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4959 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4960 DiagnosticSeverity::HINT => &mut self.hint_depth,
4961 _ => return,
4962 };
4963 if endpoint.is_start {
4964 *depth += 1;
4965 } else {
4966 *depth -= 1;
4967 }
4968
4969 if endpoint.is_unnecessary {
4970 if endpoint.is_start {
4971 self.unnecessary_depth += 1;
4972 } else {
4973 self.unnecessary_depth -= 1;
4974 }
4975 }
4976 }
4977
4978 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4979 if self.error_depth > 0 {
4980 Some(DiagnosticSeverity::ERROR)
4981 } else if self.warning_depth > 0 {
4982 Some(DiagnosticSeverity::WARNING)
4983 } else if self.information_depth > 0 {
4984 Some(DiagnosticSeverity::INFORMATION)
4985 } else if self.hint_depth > 0 {
4986 Some(DiagnosticSeverity::HINT)
4987 } else {
4988 None
4989 }
4990 }
4991
4992 fn current_code_is_unnecessary(&self) -> bool {
4993 self.unnecessary_depth > 0
4994 }
4995}
4996
4997impl<'a> Iterator for BufferChunks<'a> {
4998 type Item = Chunk<'a>;
4999
5000 fn next(&mut self) -> Option<Self::Item> {
5001 let mut next_capture_start = usize::MAX;
5002 let mut next_diagnostic_endpoint = usize::MAX;
5003
5004 if let Some(highlights) = self.highlights.as_mut() {
5005 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5006 if *parent_capture_end <= self.range.start {
5007 highlights.stack.pop();
5008 } else {
5009 break;
5010 }
5011 }
5012
5013 if highlights.next_capture.is_none() {
5014 highlights.next_capture = highlights.captures.next();
5015 }
5016
5017 while let Some(capture) = highlights.next_capture.as_ref() {
5018 if self.range.start < capture.node.start_byte() {
5019 next_capture_start = capture.node.start_byte();
5020 break;
5021 } else {
5022 let highlight_id =
5023 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5024 highlights
5025 .stack
5026 .push((capture.node.end_byte(), highlight_id));
5027 highlights.next_capture = highlights.captures.next();
5028 }
5029 }
5030 }
5031
5032 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5033 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5034 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5035 if endpoint.offset <= self.range.start {
5036 self.update_diagnostic_depths(endpoint);
5037 diagnostic_endpoints.next();
5038 self.underline = endpoint.underline;
5039 } else {
5040 next_diagnostic_endpoint = endpoint.offset;
5041 break;
5042 }
5043 }
5044 }
5045 self.diagnostic_endpoints = diagnostic_endpoints;
5046
5047 if let Some(ChunkBitmaps {
5048 text: chunk,
5049 chars: chars_map,
5050 tabs,
5051 }) = self.chunks.peek_with_bitmaps()
5052 {
5053 let chunk_start = self.range.start;
5054 let mut chunk_end = (self.chunks.offset() + chunk.len())
5055 .min(next_capture_start)
5056 .min(next_diagnostic_endpoint);
5057 let mut highlight_id = None;
5058 if let Some(highlights) = self.highlights.as_ref()
5059 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5060 {
5061 chunk_end = chunk_end.min(*parent_capture_end);
5062 highlight_id = Some(*parent_highlight_id);
5063 }
5064 let bit_start = chunk_start - self.chunks.offset();
5065 let bit_end = chunk_end - self.chunks.offset();
5066
5067 let slice = &chunk[bit_start..bit_end];
5068
5069 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5070 let tabs = (tabs >> bit_start) & mask;
5071 let chars = (chars_map >> bit_start) & mask;
5072
5073 self.range.start = chunk_end;
5074 if self.range.start == self.chunks.offset() + chunk.len() {
5075 self.chunks.next().unwrap();
5076 }
5077
5078 Some(Chunk {
5079 text: slice,
5080 syntax_highlight_id: highlight_id,
5081 underline: self.underline,
5082 diagnostic_severity: self.current_diagnostic_severity(),
5083 is_unnecessary: self.current_code_is_unnecessary(),
5084 tabs,
5085 chars,
5086 ..Chunk::default()
5087 })
5088 } else {
5089 None
5090 }
5091 }
5092}
5093
5094impl operation_queue::Operation for Operation {
5095 fn lamport_timestamp(&self) -> clock::Lamport {
5096 match self {
5097 Operation::Buffer(_) => {
5098 unreachable!("buffer operations should never be deferred at this layer")
5099 }
5100 Operation::UpdateDiagnostics {
5101 lamport_timestamp, ..
5102 }
5103 | Operation::UpdateSelections {
5104 lamport_timestamp, ..
5105 }
5106 | Operation::UpdateCompletionTriggers {
5107 lamport_timestamp, ..
5108 }
5109 | Operation::UpdateLineEnding {
5110 lamport_timestamp, ..
5111 } => *lamport_timestamp,
5112 }
5113 }
5114}
5115
5116impl Default for Diagnostic {
5117 fn default() -> Self {
5118 Self {
5119 source: Default::default(),
5120 source_kind: DiagnosticSourceKind::Other,
5121 code: None,
5122 code_description: None,
5123 severity: DiagnosticSeverity::ERROR,
5124 message: Default::default(),
5125 markdown: None,
5126 group_id: 0,
5127 is_primary: false,
5128 is_disk_based: false,
5129 is_unnecessary: false,
5130 underline: true,
5131 data: None,
5132 }
5133 }
5134}
5135
5136impl IndentSize {
5137 /// Returns an [`IndentSize`] representing the given spaces.
5138 pub fn spaces(len: u32) -> Self {
5139 Self {
5140 len,
5141 kind: IndentKind::Space,
5142 }
5143 }
5144
5145 /// Returns an [`IndentSize`] representing a tab.
5146 pub fn tab() -> Self {
5147 Self {
5148 len: 1,
5149 kind: IndentKind::Tab,
5150 }
5151 }
5152
5153 /// An iterator over the characters represented by this [`IndentSize`].
5154 pub fn chars(&self) -> impl Iterator<Item = char> {
5155 iter::repeat(self.char()).take(self.len as usize)
5156 }
5157
5158 /// The character representation of this [`IndentSize`].
5159 pub fn char(&self) -> char {
5160 match self.kind {
5161 IndentKind::Space => ' ',
5162 IndentKind::Tab => '\t',
5163 }
5164 }
5165
5166 /// Consumes the current [`IndentSize`] and returns a new one that has
5167 /// been shrunk or enlarged by the given size along the given direction.
5168 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5169 match direction {
5170 Ordering::Less => {
5171 if self.kind == size.kind && self.len >= size.len {
5172 self.len -= size.len;
5173 }
5174 }
5175 Ordering::Equal => {}
5176 Ordering::Greater => {
5177 if self.len == 0 {
5178 self = size;
5179 } else if self.kind == size.kind {
5180 self.len += size.len;
5181 }
5182 }
5183 }
5184 self
5185 }
5186
5187 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5188 match self.kind {
5189 IndentKind::Space => self.len as usize,
5190 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5191 }
5192 }
5193}
5194
5195#[cfg(any(test, feature = "test-support"))]
5196pub struct TestFile {
5197 pub path: Arc<RelPath>,
5198 pub root_name: String,
5199 pub local_root: Option<PathBuf>,
5200}
5201
5202#[cfg(any(test, feature = "test-support"))]
5203impl File for TestFile {
5204 fn path(&self) -> &Arc<RelPath> {
5205 &self.path
5206 }
5207
5208 fn full_path(&self, _: &gpui::App) -> PathBuf {
5209 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5210 }
5211
5212 fn as_local(&self) -> Option<&dyn LocalFile> {
5213 if self.local_root.is_some() {
5214 Some(self)
5215 } else {
5216 None
5217 }
5218 }
5219
5220 fn disk_state(&self) -> DiskState {
5221 unimplemented!()
5222 }
5223
5224 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5225 self.path().file_name().unwrap_or(self.root_name.as_ref())
5226 }
5227
5228 fn worktree_id(&self, _: &App) -> WorktreeId {
5229 WorktreeId::from_usize(0)
5230 }
5231
5232 fn to_proto(&self, _: &App) -> rpc::proto::File {
5233 unimplemented!()
5234 }
5235
5236 fn is_private(&self) -> bool {
5237 false
5238 }
5239
5240 fn path_style(&self, _cx: &App) -> PathStyle {
5241 PathStyle::local()
5242 }
5243}
5244
5245#[cfg(any(test, feature = "test-support"))]
5246impl LocalFile for TestFile {
5247 fn abs_path(&self, _cx: &App) -> PathBuf {
5248 PathBuf::from(self.local_root.as_ref().unwrap())
5249 .join(&self.root_name)
5250 .join(self.path.as_std_path())
5251 }
5252
5253 fn load(&self, _cx: &App) -> Task<Result<String>> {
5254 unimplemented!()
5255 }
5256
5257 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5258 unimplemented!()
5259 }
5260}
5261
5262pub(crate) fn contiguous_ranges(
5263 values: impl Iterator<Item = u32>,
5264 max_len: usize,
5265) -> impl Iterator<Item = Range<u32>> {
5266 let mut values = values;
5267 let mut current_range: Option<Range<u32>> = None;
5268 std::iter::from_fn(move || {
5269 loop {
5270 if let Some(value) = values.next() {
5271 if let Some(range) = &mut current_range
5272 && value == range.end
5273 && range.len() < max_len
5274 {
5275 range.end += 1;
5276 continue;
5277 }
5278
5279 let prev_range = current_range.clone();
5280 current_range = Some(value..(value + 1));
5281 if prev_range.is_some() {
5282 return prev_range;
5283 }
5284 } else {
5285 return current_range.take();
5286 }
5287 }
5288 })
5289}
5290
5291#[derive(Default, Debug)]
5292pub struct CharClassifier {
5293 scope: Option<LanguageScope>,
5294 scope_context: Option<CharScopeContext>,
5295 ignore_punctuation: bool,
5296}
5297
5298impl CharClassifier {
5299 pub fn new(scope: Option<LanguageScope>) -> Self {
5300 Self {
5301 scope,
5302 scope_context: None,
5303 ignore_punctuation: false,
5304 }
5305 }
5306
5307 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5308 Self {
5309 scope_context,
5310 ..self
5311 }
5312 }
5313
5314 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5315 Self {
5316 ignore_punctuation,
5317 ..self
5318 }
5319 }
5320
5321 pub fn is_whitespace(&self, c: char) -> bool {
5322 self.kind(c) == CharKind::Whitespace
5323 }
5324
5325 pub fn is_word(&self, c: char) -> bool {
5326 self.kind(c) == CharKind::Word
5327 }
5328
5329 pub fn is_punctuation(&self, c: char) -> bool {
5330 self.kind(c) == CharKind::Punctuation
5331 }
5332
5333 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5334 if c.is_alphanumeric() || c == '_' {
5335 return CharKind::Word;
5336 }
5337
5338 if let Some(scope) = &self.scope {
5339 let characters = match self.scope_context {
5340 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5341 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5342 None => scope.word_characters(),
5343 };
5344 if let Some(characters) = characters
5345 && characters.contains(&c)
5346 {
5347 return CharKind::Word;
5348 }
5349 }
5350
5351 if c.is_whitespace() {
5352 return CharKind::Whitespace;
5353 }
5354
5355 if ignore_punctuation {
5356 CharKind::Word
5357 } else {
5358 CharKind::Punctuation
5359 }
5360 }
5361
5362 pub fn kind(&self, c: char) -> CharKind {
5363 self.kind_with(c, self.ignore_punctuation)
5364 }
5365}
5366
5367/// Find all of the ranges of whitespace that occur at the ends of lines
5368/// in the given rope.
5369///
5370/// This could also be done with a regex search, but this implementation
5371/// avoids copying text.
5372pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5373 let mut ranges = Vec::new();
5374
5375 let mut offset = 0;
5376 let mut prev_chunk_trailing_whitespace_range = 0..0;
5377 for chunk in rope.chunks() {
5378 let mut prev_line_trailing_whitespace_range = 0..0;
5379 for (i, line) in chunk.split('\n').enumerate() {
5380 let line_end_offset = offset + line.len();
5381 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5382 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5383
5384 if i == 0 && trimmed_line_len == 0 {
5385 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5386 }
5387 if !prev_line_trailing_whitespace_range.is_empty() {
5388 ranges.push(prev_line_trailing_whitespace_range);
5389 }
5390
5391 offset = line_end_offset + 1;
5392 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5393 }
5394
5395 offset -= 1;
5396 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5397 }
5398
5399 if !prev_chunk_trailing_whitespace_range.is_empty() {
5400 ranges.push(prev_chunk_trailing_whitespace_range);
5401 }
5402
5403 ranges
5404}