1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: &'static Encoding,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376}
377
378/// The file's storage status - whether it's stored (`Present`), and if so when it was last
379/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
380/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
381/// indicator for new files.
382#[derive(Copy, Clone, Debug, PartialEq)]
383pub enum DiskState {
384 /// File created in Zed that has not been saved.
385 New,
386 /// File present on the filesystem.
387 Present { mtime: MTime },
388 /// Deleted file that was previously present.
389 Deleted,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 }
400 }
401
402 pub fn exists(&self) -> bool {
403 match self {
404 DiskState::New => false,
405 DiskState::Present { .. } => true,
406 DiskState::Deleted => false,
407 }
408 }
409}
410
411/// The file associated with a buffer, in the case where the file is on the local disk.
412pub trait LocalFile: File {
413 /// Returns the absolute path of this file
414 fn abs_path(&self, cx: &App) -> PathBuf;
415
416 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
417 fn load(&self, cx: &App) -> Task<Result<String>>;
418
419 /// Loads the file's contents from disk.
420 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
421
422 /// Loads the file contents from disk, decoding them with the given encoding.
423 fn load_with_encoding(&self, cx: &App, encoding: &'static Encoding) -> Task<Result<String>>;
424}
425
426/// The auto-indent behavior associated with an editing operation.
427/// For some editing operations, each affected line of text has its
428/// indentation recomputed. For other operations, the entire block
429/// of edited text is adjusted uniformly.
430#[derive(Clone, Debug)]
431pub enum AutoindentMode {
432 /// Indent each line of inserted text.
433 EachLine,
434 /// Apply the same indentation adjustment to all of the lines
435 /// in a given insertion.
436 Block {
437 /// The original indentation column of the first line of each
438 /// insertion, if it has been copied.
439 ///
440 /// Knowing this makes it possible to preserve the relative indentation
441 /// of every line in the insertion from when it was copied.
442 ///
443 /// If the original indent column is `a`, and the first line of insertion
444 /// is then auto-indented to column `b`, then every other line of
445 /// the insertion will be auto-indented to column `b - a`
446 original_indent_columns: Vec<Option<u32>>,
447 },
448}
449
450#[derive(Clone)]
451struct AutoindentRequest {
452 before_edit: BufferSnapshot,
453 entries: Vec<AutoindentRequestEntry>,
454 is_block_mode: bool,
455 ignore_empty_lines: bool,
456}
457
458#[derive(Debug, Clone)]
459struct AutoindentRequestEntry {
460 /// A range of the buffer whose indentation should be adjusted.
461 range: Range<Anchor>,
462 /// Whether or not these lines should be considered brand new, for the
463 /// purpose of auto-indent. When text is not new, its indentation will
464 /// only be adjusted if the suggested indentation level has *changed*
465 /// since the edit was made.
466 first_line_is_new: bool,
467 indent_size: IndentSize,
468 original_indent_column: Option<u32>,
469}
470
471#[derive(Debug)]
472struct IndentSuggestion {
473 basis_row: u32,
474 delta: Ordering,
475 within_error: bool,
476}
477
478struct BufferChunkHighlights<'a> {
479 captures: SyntaxMapCaptures<'a>,
480 next_capture: Option<SyntaxMapCapture<'a>>,
481 stack: Vec<(usize, HighlightId)>,
482 highlight_maps: Vec<HighlightMap>,
483}
484
485/// An iterator that yields chunks of a buffer's text, along with their
486/// syntax highlights and diagnostic status.
487pub struct BufferChunks<'a> {
488 buffer_snapshot: Option<&'a BufferSnapshot>,
489 range: Range<usize>,
490 chunks: text::Chunks<'a>,
491 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
492 error_depth: usize,
493 warning_depth: usize,
494 information_depth: usize,
495 hint_depth: usize,
496 unnecessary_depth: usize,
497 underline: bool,
498 highlights: Option<BufferChunkHighlights<'a>>,
499}
500
501/// A chunk of a buffer's text, along with its syntax highlight and
502/// diagnostic status.
503#[derive(Clone, Debug, Default)]
504pub struct Chunk<'a> {
505 /// The text of the chunk.
506 pub text: &'a str,
507 /// The syntax highlighting style of the chunk.
508 pub syntax_highlight_id: Option<HighlightId>,
509 /// The highlight style that has been applied to this chunk in
510 /// the editor.
511 pub highlight_style: Option<HighlightStyle>,
512 /// The severity of diagnostic associated with this chunk, if any.
513 pub diagnostic_severity: Option<DiagnosticSeverity>,
514 /// A bitset of which characters are tabs in this string.
515 pub tabs: u128,
516 /// Bitmap of character indices in this chunk
517 pub chars: u128,
518 /// Whether this chunk of text is marked as unnecessary.
519 pub is_unnecessary: bool,
520 /// Whether this chunk of text was originally a tab character.
521 pub is_tab: bool,
522 /// Whether this chunk of text was originally an inlay.
523 pub is_inlay: bool,
524 /// Whether to underline the corresponding text range in the editor.
525 pub underline: bool,
526}
527
528/// A set of edits to a given version of a buffer, computed asynchronously.
529#[derive(Debug)]
530pub struct Diff {
531 pub base_version: clock::Global,
532 pub line_ending: LineEnding,
533 pub edits: Vec<(Range<usize>, Arc<str>)>,
534}
535
536#[derive(Debug, Clone, Copy)]
537pub(crate) struct DiagnosticEndpoint {
538 offset: usize,
539 is_start: bool,
540 underline: bool,
541 severity: DiagnosticSeverity,
542 is_unnecessary: bool,
543}
544
545/// A class of characters, used for characterizing a run of text.
546#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
547pub enum CharKind {
548 /// Whitespace.
549 Whitespace,
550 /// Punctuation.
551 Punctuation,
552 /// Word.
553 Word,
554}
555
556/// Context for character classification within a specific scope.
557#[derive(Copy, Clone, Eq, PartialEq, Debug)]
558pub enum CharScopeContext {
559 /// Character classification for completion queries.
560 ///
561 /// This context treats certain characters as word constituents that would
562 /// normally be considered punctuation, such as '-' in Tailwind classes
563 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
564 Completion,
565 /// Character classification for linked edits.
566 ///
567 /// This context handles characters that should be treated as part of
568 /// identifiers during linked editing operations, such as '.' in JSX
569 /// component names like `<Animated.View>`.
570 LinkedEdit,
571}
572
573/// A runnable is a set of data about a region that could be resolved into a task
574pub struct Runnable {
575 pub tags: SmallVec<[RunnableTag; 1]>,
576 pub language: Arc<Language>,
577 pub buffer: BufferId,
578}
579
580#[derive(Default, Clone, Debug)]
581pub struct HighlightedText {
582 pub text: SharedString,
583 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
584}
585
586#[derive(Default, Debug)]
587struct HighlightedTextBuilder {
588 pub text: String,
589 highlights: Vec<(Range<usize>, HighlightStyle)>,
590}
591
592impl HighlightedText {
593 pub fn from_buffer_range<T: ToOffset>(
594 range: Range<T>,
595 snapshot: &text::BufferSnapshot,
596 syntax_snapshot: &SyntaxSnapshot,
597 override_style: Option<HighlightStyle>,
598 syntax_theme: &SyntaxTheme,
599 ) -> Self {
600 let mut highlighted_text = HighlightedTextBuilder::default();
601 highlighted_text.add_text_from_buffer_range(
602 range,
603 snapshot,
604 syntax_snapshot,
605 override_style,
606 syntax_theme,
607 );
608 highlighted_text.build()
609 }
610
611 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
612 gpui::StyledText::new(self.text.clone())
613 .with_default_highlights(default_style, self.highlights.iter().cloned())
614 }
615
616 /// Returns the first line without leading whitespace unless highlighted
617 /// and a boolean indicating if there are more lines after
618 pub fn first_line_preview(self) -> (Self, bool) {
619 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
620 let first_line = &self.text[..newline_ix];
621
622 // Trim leading whitespace, unless an edit starts prior to it.
623 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
624 if let Some((first_highlight_range, _)) = self.highlights.first() {
625 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
626 }
627
628 let preview_text = &first_line[preview_start_ix..];
629 let preview_highlights = self
630 .highlights
631 .into_iter()
632 .skip_while(|(range, _)| range.end <= preview_start_ix)
633 .take_while(|(range, _)| range.start < newline_ix)
634 .filter_map(|(mut range, highlight)| {
635 range.start = range.start.saturating_sub(preview_start_ix);
636 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
637 if range.is_empty() {
638 None
639 } else {
640 Some((range, highlight))
641 }
642 });
643
644 let preview = Self {
645 text: SharedString::new(preview_text),
646 highlights: preview_highlights.collect(),
647 };
648
649 (preview, self.text.len() > newline_ix)
650 }
651}
652
653impl HighlightedTextBuilder {
654 pub fn build(self) -> HighlightedText {
655 HighlightedText {
656 text: self.text.into(),
657 highlights: self.highlights,
658 }
659 }
660
661 pub fn add_text_from_buffer_range<T: ToOffset>(
662 &mut self,
663 range: Range<T>,
664 snapshot: &text::BufferSnapshot,
665 syntax_snapshot: &SyntaxSnapshot,
666 override_style: Option<HighlightStyle>,
667 syntax_theme: &SyntaxTheme,
668 ) {
669 let range = range.to_offset(snapshot);
670 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
671 let start = self.text.len();
672 self.text.push_str(chunk.text);
673 let end = self.text.len();
674
675 if let Some(highlight_style) = chunk
676 .syntax_highlight_id
677 .and_then(|id| id.style(syntax_theme))
678 {
679 let highlight_style = override_style.map_or(highlight_style, |override_style| {
680 highlight_style.highlight(override_style)
681 });
682 self.highlights.push((start..end, highlight_style));
683 } else if let Some(override_style) = override_style {
684 self.highlights.push((start..end, override_style));
685 }
686 }
687 }
688
689 fn highlighted_chunks<'a>(
690 range: Range<usize>,
691 snapshot: &'a text::BufferSnapshot,
692 syntax_snapshot: &'a SyntaxSnapshot,
693 ) -> BufferChunks<'a> {
694 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
695 grammar
696 .highlights_config
697 .as_ref()
698 .map(|config| &config.query)
699 });
700
701 let highlight_maps = captures
702 .grammars()
703 .iter()
704 .map(|grammar| grammar.highlight_map())
705 .collect();
706
707 BufferChunks::new(
708 snapshot.as_rope(),
709 range,
710 Some((captures, highlight_maps)),
711 false,
712 None,
713 )
714 }
715}
716
717#[derive(Clone)]
718pub struct EditPreview {
719 old_snapshot: text::BufferSnapshot,
720 applied_edits_snapshot: text::BufferSnapshot,
721 syntax_snapshot: SyntaxSnapshot,
722}
723
724impl EditPreview {
725 pub fn highlight_edits(
726 &self,
727 current_snapshot: &BufferSnapshot,
728 edits: &[(Range<Anchor>, String)],
729 include_deletions: bool,
730 cx: &App,
731 ) -> HighlightedText {
732 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
733 return HighlightedText::default();
734 };
735
736 let mut highlighted_text = HighlightedTextBuilder::default();
737
738 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
739
740 let insertion_highlight_style = HighlightStyle {
741 background_color: Some(cx.theme().status().created_background),
742 ..Default::default()
743 };
744 let deletion_highlight_style = HighlightStyle {
745 background_color: Some(cx.theme().status().deleted_background),
746 ..Default::default()
747 };
748 let syntax_theme = cx.theme().syntax();
749
750 for (range, edit_text) in edits {
751 let edit_new_end_in_preview_snapshot = range
752 .end
753 .bias_right(&self.old_snapshot)
754 .to_offset(&self.applied_edits_snapshot);
755 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
756
757 let unchanged_range_in_preview_snapshot =
758 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
759 if !unchanged_range_in_preview_snapshot.is_empty() {
760 highlighted_text.add_text_from_buffer_range(
761 unchanged_range_in_preview_snapshot,
762 &self.applied_edits_snapshot,
763 &self.syntax_snapshot,
764 None,
765 syntax_theme,
766 );
767 }
768
769 let range_in_current_snapshot = range.to_offset(current_snapshot);
770 if include_deletions && !range_in_current_snapshot.is_empty() {
771 highlighted_text.add_text_from_buffer_range(
772 range_in_current_snapshot,
773 ¤t_snapshot.text,
774 ¤t_snapshot.syntax,
775 Some(deletion_highlight_style),
776 syntax_theme,
777 );
778 }
779
780 if !edit_text.is_empty() {
781 highlighted_text.add_text_from_buffer_range(
782 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
783 &self.applied_edits_snapshot,
784 &self.syntax_snapshot,
785 Some(insertion_highlight_style),
786 syntax_theme,
787 );
788 }
789
790 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
791 }
792
793 highlighted_text.add_text_from_buffer_range(
794 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
795 &self.applied_edits_snapshot,
796 &self.syntax_snapshot,
797 None,
798 syntax_theme,
799 );
800
801 highlighted_text.build()
802 }
803
804 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
805 let (first, _) = edits.first()?;
806 let (last, _) = edits.last()?;
807
808 let start = first
809 .start
810 .bias_left(&self.old_snapshot)
811 .to_point(&self.applied_edits_snapshot);
812 let end = last
813 .end
814 .bias_right(&self.old_snapshot)
815 .to_point(&self.applied_edits_snapshot);
816
817 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
818 let range = Point::new(start.row, 0)
819 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
820
821 Some(range.to_offset(&self.applied_edits_snapshot))
822 }
823}
824
825#[derive(Clone, Debug, PartialEq, Eq)]
826pub struct BracketMatch {
827 pub open_range: Range<usize>,
828 pub close_range: Range<usize>,
829 pub newline_only: bool,
830}
831
832impl Buffer {
833 /// Create a new buffer with the given base text.
834 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
835 Self::build(
836 TextBuffer::new(
837 ReplicaId::LOCAL,
838 cx.entity_id().as_non_zero_u64().into(),
839 base_text.into(),
840 &cx.background_executor(),
841 ),
842 None,
843 Capability::ReadWrite,
844 )
845 }
846
847 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
848 pub fn local_normalized(
849 base_text_normalized: Rope,
850 line_ending: LineEnding,
851 cx: &Context<Self>,
852 ) -> Self {
853 Self::build(
854 TextBuffer::new_normalized(
855 ReplicaId::LOCAL,
856 cx.entity_id().as_non_zero_u64().into(),
857 line_ending,
858 base_text_normalized,
859 ),
860 None,
861 Capability::ReadWrite,
862 )
863 }
864
865 /// Create a new buffer that is a replica of a remote buffer.
866 pub fn remote(
867 remote_id: BufferId,
868 replica_id: ReplicaId,
869 capability: Capability,
870 base_text: impl Into<String>,
871 cx: &BackgroundExecutor,
872 ) -> Self {
873 Self::build(
874 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
875 None,
876 capability,
877 )
878 }
879
880 /// Create a new buffer that is a replica of a remote buffer, populating its
881 /// state from the given protobuf message.
882 pub fn from_proto(
883 replica_id: ReplicaId,
884 capability: Capability,
885 message: proto::BufferState,
886 file: Option<Arc<dyn File>>,
887 cx: &BackgroundExecutor,
888 ) -> Result<Self> {
889 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
890 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
891 let mut this = Self::build(buffer, file, capability);
892 this.text.set_line_ending(proto::deserialize_line_ending(
893 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
894 ));
895 this.saved_version = proto::deserialize_version(&message.saved_version);
896 this.saved_mtime = message.saved_mtime.map(|time| time.into());
897 Ok(this)
898 }
899
900 /// Serialize the buffer's state to a protobuf message.
901 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
902 proto::BufferState {
903 id: self.remote_id().into(),
904 file: self.file.as_ref().map(|f| f.to_proto(cx)),
905 base_text: self.base_text().to_string(),
906 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
907 saved_version: proto::serialize_version(&self.saved_version),
908 saved_mtime: self.saved_mtime.map(|time| time.into()),
909 }
910 }
911
912 /// Serialize as protobufs all of the changes to the buffer since the given version.
913 pub fn serialize_ops(
914 &self,
915 since: Option<clock::Global>,
916 cx: &App,
917 ) -> Task<Vec<proto::Operation>> {
918 let mut operations = Vec::new();
919 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
920
921 operations.extend(self.remote_selections.iter().map(|(_, set)| {
922 proto::serialize_operation(&Operation::UpdateSelections {
923 selections: set.selections.clone(),
924 lamport_timestamp: set.lamport_timestamp,
925 line_mode: set.line_mode,
926 cursor_shape: set.cursor_shape,
927 })
928 }));
929
930 for (server_id, diagnostics) in &self.diagnostics {
931 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
932 lamport_timestamp: self.diagnostics_timestamp,
933 server_id: *server_id,
934 diagnostics: diagnostics.iter().cloned().collect(),
935 }));
936 }
937
938 for (server_id, completions) in &self.completion_triggers_per_language_server {
939 operations.push(proto::serialize_operation(
940 &Operation::UpdateCompletionTriggers {
941 triggers: completions.iter().cloned().collect(),
942 lamport_timestamp: self.completion_triggers_timestamp,
943 server_id: *server_id,
944 },
945 ));
946 }
947
948 let text_operations = self.text.operations().clone();
949 cx.background_spawn(async move {
950 let since = since.unwrap_or_default();
951 operations.extend(
952 text_operations
953 .iter()
954 .filter(|(_, op)| !since.observed(op.timestamp()))
955 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
956 );
957 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
958 operations
959 })
960 }
961
962 /// Assign a language to the buffer, returning the buffer.
963 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
964 self.set_language(Some(language), cx);
965 self
966 }
967
968 /// Returns the [`Capability`] of this buffer.
969 pub fn capability(&self) -> Capability {
970 self.capability
971 }
972
973 /// Whether this buffer can only be read.
974 pub fn read_only(&self) -> bool {
975 self.capability == Capability::ReadOnly
976 }
977
978 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
979 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
980 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
981 let snapshot = buffer.snapshot();
982 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
983 Self {
984 saved_mtime,
985 saved_version: buffer.version(),
986 preview_version: buffer.version(),
987 reload_task: None,
988 transaction_depth: 0,
989 was_dirty_before_starting_transaction: None,
990 has_unsaved_edits: Cell::new((buffer.version(), false)),
991 text: buffer,
992 branch_state: None,
993 file,
994 capability,
995 syntax_map,
996 reparse: None,
997 non_text_state_update_count: 0,
998 sync_parse_timeout: Duration::from_millis(1),
999 parse_status: watch::channel(ParseStatus::Idle),
1000 autoindent_requests: Default::default(),
1001 wait_for_autoindent_txs: Default::default(),
1002 pending_autoindent: Default::default(),
1003 language: None,
1004 remote_selections: Default::default(),
1005 diagnostics: Default::default(),
1006 diagnostics_timestamp: Lamport::MIN,
1007 completion_triggers: Default::default(),
1008 completion_triggers_per_language_server: Default::default(),
1009 completion_triggers_timestamp: Lamport::MIN,
1010 deferred_ops: OperationQueue::new(),
1011 has_conflict: false,
1012 change_bits: Default::default(),
1013 _subscriptions: Vec::new(),
1014 encoding: encoding_rs::UTF_8,
1015 }
1016 }
1017
1018 pub fn build_snapshot(
1019 text: Rope,
1020 language: Option<Arc<Language>>,
1021 language_registry: Option<Arc<LanguageRegistry>>,
1022 cx: &mut App,
1023 ) -> impl Future<Output = BufferSnapshot> + use<> {
1024 let entity_id = cx.reserve_entity::<Self>().entity_id();
1025 let buffer_id = entity_id.as_non_zero_u64().into();
1026 async move {
1027 let text =
1028 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1029 .snapshot();
1030 let mut syntax = SyntaxMap::new(&text).snapshot();
1031 if let Some(language) = language.clone() {
1032 let language_registry = language_registry.clone();
1033 syntax.reparse(&text, language_registry, language);
1034 }
1035 BufferSnapshot {
1036 text,
1037 syntax,
1038 file: None,
1039 diagnostics: Default::default(),
1040 remote_selections: Default::default(),
1041 language,
1042 non_text_state_update_count: 0,
1043 }
1044 }
1045 }
1046
1047 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1048 let entity_id = cx.reserve_entity::<Self>().entity_id();
1049 let buffer_id = entity_id.as_non_zero_u64().into();
1050 let text = TextBuffer::new_normalized(
1051 ReplicaId::LOCAL,
1052 buffer_id,
1053 Default::default(),
1054 Rope::new(),
1055 )
1056 .snapshot();
1057 let syntax = SyntaxMap::new(&text).snapshot();
1058 BufferSnapshot {
1059 text,
1060 syntax,
1061 file: None,
1062 diagnostics: Default::default(),
1063 remote_selections: Default::default(),
1064 language: None,
1065 non_text_state_update_count: 0,
1066 }
1067 }
1068
1069 #[cfg(any(test, feature = "test-support"))]
1070 pub fn build_snapshot_sync(
1071 text: Rope,
1072 language: Option<Arc<Language>>,
1073 language_registry: Option<Arc<LanguageRegistry>>,
1074 cx: &mut App,
1075 ) -> BufferSnapshot {
1076 let entity_id = cx.reserve_entity::<Self>().entity_id();
1077 let buffer_id = entity_id.as_non_zero_u64().into();
1078 let text =
1079 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1080 .snapshot();
1081 let mut syntax = SyntaxMap::new(&text).snapshot();
1082 if let Some(language) = language.clone() {
1083 syntax.reparse(&text, language_registry, language);
1084 }
1085 BufferSnapshot {
1086 text,
1087 syntax,
1088 file: None,
1089 diagnostics: Default::default(),
1090 remote_selections: Default::default(),
1091 language,
1092 non_text_state_update_count: 0,
1093 }
1094 }
1095
1096 /// Retrieve a snapshot of the buffer's current state. This is computationally
1097 /// cheap, and allows reading from the buffer on a background thread.
1098 pub fn snapshot(&self) -> BufferSnapshot {
1099 let text = self.text.snapshot();
1100 let mut syntax_map = self.syntax_map.lock();
1101 syntax_map.interpolate(&text);
1102 let syntax = syntax_map.snapshot();
1103
1104 BufferSnapshot {
1105 text,
1106 syntax,
1107 file: self.file.clone(),
1108 remote_selections: self.remote_selections.clone(),
1109 diagnostics: self.diagnostics.clone(),
1110 language: self.language.clone(),
1111 non_text_state_update_count: self.non_text_state_update_count,
1112 }
1113 }
1114
1115 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1116 let this = cx.entity();
1117 cx.new(|cx| {
1118 let mut branch = Self {
1119 branch_state: Some(BufferBranchState {
1120 base_buffer: this.clone(),
1121 merged_operations: Default::default(),
1122 }),
1123 language: self.language.clone(),
1124 has_conflict: self.has_conflict,
1125 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1126 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1127 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1128 };
1129 if let Some(language_registry) = self.language_registry() {
1130 branch.set_language_registry(language_registry);
1131 }
1132
1133 // Reparse the branch buffer so that we get syntax highlighting immediately.
1134 branch.reparse(cx);
1135
1136 branch
1137 })
1138 }
1139
1140 pub fn preview_edits(
1141 &self,
1142 edits: Arc<[(Range<Anchor>, String)]>,
1143 cx: &App,
1144 ) -> Task<EditPreview> {
1145 let registry = self.language_registry();
1146 let language = self.language().cloned();
1147 let old_snapshot = self.text.snapshot();
1148 let mut branch_buffer = self.text.branch();
1149 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1150 let executor = cx.background_executor().clone();
1151 cx.background_spawn(async move {
1152 if !edits.is_empty() {
1153 if let Some(language) = language.clone() {
1154 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1155 }
1156
1157 branch_buffer.edit(edits.iter().cloned(), &executor);
1158 let snapshot = branch_buffer.snapshot();
1159 syntax_snapshot.interpolate(&snapshot);
1160
1161 if let Some(language) = language {
1162 syntax_snapshot.reparse(&snapshot, registry, language);
1163 }
1164 }
1165 EditPreview {
1166 old_snapshot,
1167 applied_edits_snapshot: branch_buffer.snapshot(),
1168 syntax_snapshot,
1169 }
1170 })
1171 }
1172
1173 /// Applies all of the changes in this buffer that intersect any of the
1174 /// given `ranges` to its base buffer.
1175 ///
1176 /// If `ranges` is empty, then all changes will be applied. This buffer must
1177 /// be a branch buffer to call this method.
1178 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1179 let Some(base_buffer) = self.base_buffer() else {
1180 debug_panic!("not a branch buffer");
1181 return;
1182 };
1183
1184 let mut ranges = if ranges.is_empty() {
1185 &[0..usize::MAX]
1186 } else {
1187 ranges.as_slice()
1188 }
1189 .iter()
1190 .peekable();
1191
1192 let mut edits = Vec::new();
1193 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1194 let mut is_included = false;
1195 while let Some(range) = ranges.peek() {
1196 if range.end < edit.new.start {
1197 ranges.next().unwrap();
1198 } else {
1199 if range.start <= edit.new.end {
1200 is_included = true;
1201 }
1202 break;
1203 }
1204 }
1205
1206 if is_included {
1207 edits.push((
1208 edit.old.clone(),
1209 self.text_for_range(edit.new.clone()).collect::<String>(),
1210 ));
1211 }
1212 }
1213
1214 let operation = base_buffer.update(cx, |base_buffer, cx| {
1215 // cx.emit(BufferEvent::DiffBaseChanged);
1216 base_buffer.edit(edits, None, cx)
1217 });
1218
1219 if let Some(operation) = operation
1220 && let Some(BufferBranchState {
1221 merged_operations, ..
1222 }) = &mut self.branch_state
1223 {
1224 merged_operations.push(operation);
1225 }
1226 }
1227
1228 fn on_base_buffer_event(
1229 &mut self,
1230 _: Entity<Buffer>,
1231 event: &BufferEvent,
1232 cx: &mut Context<Self>,
1233 ) {
1234 let BufferEvent::Operation { operation, .. } = event else {
1235 return;
1236 };
1237 let Some(BufferBranchState {
1238 merged_operations, ..
1239 }) = &mut self.branch_state
1240 else {
1241 return;
1242 };
1243
1244 let mut operation_to_undo = None;
1245 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1246 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1247 {
1248 merged_operations.remove(ix);
1249 operation_to_undo = Some(operation.timestamp);
1250 }
1251
1252 self.apply_ops([operation.clone()], cx);
1253
1254 if let Some(timestamp) = operation_to_undo {
1255 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1256 self.undo_operations(counts, cx);
1257 }
1258 }
1259
1260 #[cfg(test)]
1261 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1262 &self.text
1263 }
1264
1265 /// Retrieve a snapshot of the buffer's raw text, without any
1266 /// language-related state like the syntax tree or diagnostics.
1267 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1268 self.text.snapshot()
1269 }
1270
1271 /// The file associated with the buffer, if any.
1272 pub fn file(&self) -> Option<&Arc<dyn File>> {
1273 self.file.as_ref()
1274 }
1275
1276 /// The version of the buffer that was last saved or reloaded from disk.
1277 pub fn saved_version(&self) -> &clock::Global {
1278 &self.saved_version
1279 }
1280
1281 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1282 pub fn saved_mtime(&self) -> Option<MTime> {
1283 self.saved_mtime
1284 }
1285
1286 /// Assign a language to the buffer.
1287 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1288 self.non_text_state_update_count += 1;
1289 self.syntax_map.lock().clear(&self.text);
1290 self.language = language;
1291 self.was_changed();
1292 self.reparse(cx);
1293 cx.emit(BufferEvent::LanguageChanged);
1294 }
1295
1296 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1297 /// other languages if parts of the buffer are written in different languages.
1298 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1299 self.syntax_map
1300 .lock()
1301 .set_language_registry(language_registry);
1302 }
1303
1304 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1305 self.syntax_map.lock().language_registry()
1306 }
1307
1308 /// Assign the line ending type to the buffer.
1309 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1310 self.text.set_line_ending(line_ending);
1311
1312 let lamport_timestamp = self.text.lamport_clock.tick();
1313 self.send_operation(
1314 Operation::UpdateLineEnding {
1315 line_ending,
1316 lamport_timestamp,
1317 },
1318 true,
1319 cx,
1320 );
1321 }
1322
1323 /// Assign the buffer a new [`Capability`].
1324 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1325 if self.capability != capability {
1326 self.capability = capability;
1327 cx.emit(BufferEvent::CapabilityChanged)
1328 }
1329 }
1330
1331 /// This method is called to signal that the buffer has been saved.
1332 pub fn did_save(
1333 &mut self,
1334 version: clock::Global,
1335 mtime: Option<MTime>,
1336 cx: &mut Context<Self>,
1337 ) {
1338 self.saved_version = version.clone();
1339 self.has_unsaved_edits.set((version, false));
1340 self.has_conflict = false;
1341 self.saved_mtime = mtime;
1342 self.was_changed();
1343 cx.emit(BufferEvent::Saved);
1344 cx.notify();
1345 }
1346
1347 /// Reloads the contents of the buffer from disk.
1348 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1349 let (tx, rx) = futures::channel::oneshot::channel();
1350 let encoding = self.encoding;
1351 let prev_version = self.text.version();
1352 self.reload_task = Some(cx.spawn(async move |this, cx| {
1353 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1354 let file = this.file.as_ref()?.as_local()?;
1355 Some((
1356 file.disk_state().mtime(),
1357 file.load_with_encoding(cx, encoding),
1358 ))
1359 })?
1360 else {
1361 return Ok(());
1362 };
1363
1364 let new_text = new_text.await?;
1365 let diff = this
1366 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1367 .await;
1368 this.update(cx, |this, cx| {
1369 if this.version() == diff.base_version {
1370 this.finalize_last_transaction();
1371 this.apply_diff(diff, cx);
1372 tx.send(this.finalize_last_transaction().cloned()).ok();
1373 this.has_conflict = false;
1374 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1375 } else {
1376 if !diff.edits.is_empty()
1377 || this
1378 .edits_since::<usize>(&diff.base_version)
1379 .next()
1380 .is_some()
1381 {
1382 this.has_conflict = true;
1383 }
1384
1385 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1386 }
1387
1388 this.reload_task.take();
1389 })
1390 }));
1391 rx
1392 }
1393
1394 /// This method is called to signal that the buffer has been reloaded.
1395 pub fn did_reload(
1396 &mut self,
1397 version: clock::Global,
1398 line_ending: LineEnding,
1399 mtime: Option<MTime>,
1400 cx: &mut Context<Self>,
1401 ) {
1402 self.saved_version = version;
1403 self.has_unsaved_edits
1404 .set((self.saved_version.clone(), false));
1405 self.text.set_line_ending(line_ending);
1406 self.saved_mtime = mtime;
1407 cx.emit(BufferEvent::Reloaded);
1408 cx.notify();
1409 }
1410
1411 /// Updates the [`File`] backing this buffer. This should be called when
1412 /// the file has changed or has been deleted.
1413 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1414 let was_dirty = self.is_dirty();
1415 let mut file_changed = false;
1416
1417 if let Some(old_file) = self.file.as_ref() {
1418 if new_file.path() != old_file.path() {
1419 file_changed = true;
1420 }
1421
1422 let old_state = old_file.disk_state();
1423 let new_state = new_file.disk_state();
1424 if old_state != new_state {
1425 file_changed = true;
1426 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1427 cx.emit(BufferEvent::ReloadNeeded)
1428 }
1429 }
1430 } else {
1431 file_changed = true;
1432 };
1433
1434 self.file = Some(new_file);
1435 if file_changed {
1436 self.was_changed();
1437 self.non_text_state_update_count += 1;
1438 if was_dirty != self.is_dirty() {
1439 cx.emit(BufferEvent::DirtyChanged);
1440 }
1441 cx.emit(BufferEvent::FileHandleChanged);
1442 cx.notify();
1443 }
1444 }
1445
1446 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1447 Some(self.branch_state.as_ref()?.base_buffer.clone())
1448 }
1449
1450 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1451 pub fn language(&self) -> Option<&Arc<Language>> {
1452 self.language.as_ref()
1453 }
1454
1455 /// Returns the [`Language`] at the given location.
1456 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1457 let offset = position.to_offset(self);
1458 let mut is_first = true;
1459 let start_anchor = self.anchor_before(offset);
1460 let end_anchor = self.anchor_after(offset);
1461 self.syntax_map
1462 .lock()
1463 .layers_for_range(offset..offset, &self.text, false)
1464 .filter(|layer| {
1465 if is_first {
1466 is_first = false;
1467 return true;
1468 }
1469
1470 layer
1471 .included_sub_ranges
1472 .map(|sub_ranges| {
1473 sub_ranges.iter().any(|sub_range| {
1474 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1475 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1476 !is_before_start && !is_after_end
1477 })
1478 })
1479 .unwrap_or(true)
1480 })
1481 .last()
1482 .map(|info| info.language.clone())
1483 .or_else(|| self.language.clone())
1484 }
1485
1486 /// Returns each [`Language`] for the active syntax layers at the given location.
1487 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1488 let offset = position.to_offset(self);
1489 let mut languages: Vec<Arc<Language>> = self
1490 .syntax_map
1491 .lock()
1492 .layers_for_range(offset..offset, &self.text, false)
1493 .map(|info| info.language.clone())
1494 .collect();
1495
1496 if languages.is_empty()
1497 && let Some(buffer_language) = self.language()
1498 {
1499 languages.push(buffer_language.clone());
1500 }
1501
1502 languages
1503 }
1504
1505 /// An integer version number that accounts for all updates besides
1506 /// the buffer's text itself (which is versioned via a version vector).
1507 pub fn non_text_state_update_count(&self) -> usize {
1508 self.non_text_state_update_count
1509 }
1510
1511 /// Whether the buffer is being parsed in the background.
1512 #[cfg(any(test, feature = "test-support"))]
1513 pub fn is_parsing(&self) -> bool {
1514 self.reparse.is_some()
1515 }
1516
1517 /// Indicates whether the buffer contains any regions that may be
1518 /// written in a language that hasn't been loaded yet.
1519 pub fn contains_unknown_injections(&self) -> bool {
1520 self.syntax_map.lock().contains_unknown_injections()
1521 }
1522
1523 #[cfg(any(test, feature = "test-support"))]
1524 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1525 self.sync_parse_timeout = timeout;
1526 }
1527
1528 /// Called after an edit to synchronize the buffer's main parse tree with
1529 /// the buffer's new underlying state.
1530 ///
1531 /// Locks the syntax map and interpolates the edits since the last reparse
1532 /// into the foreground syntax tree.
1533 ///
1534 /// Then takes a stable snapshot of the syntax map before unlocking it.
1535 /// The snapshot with the interpolated edits is sent to a background thread,
1536 /// where we ask Tree-sitter to perform an incremental parse.
1537 ///
1538 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1539 /// waiting on the parse to complete. As soon as it completes, we proceed
1540 /// synchronously, unless a 1ms timeout elapses.
1541 ///
1542 /// If we time out waiting on the parse, we spawn a second task waiting
1543 /// until the parse does complete and return with the interpolated tree still
1544 /// in the foreground. When the background parse completes, call back into
1545 /// the main thread and assign the foreground parse state.
1546 ///
1547 /// If the buffer or grammar changed since the start of the background parse,
1548 /// initiate an additional reparse recursively. To avoid concurrent parses
1549 /// for the same buffer, we only initiate a new parse if we are not already
1550 /// parsing in the background.
1551 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1552 if self.reparse.is_some() {
1553 return;
1554 }
1555 let language = if let Some(language) = self.language.clone() {
1556 language
1557 } else {
1558 return;
1559 };
1560
1561 let text = self.text_snapshot();
1562 let parsed_version = self.version();
1563
1564 let mut syntax_map = self.syntax_map.lock();
1565 syntax_map.interpolate(&text);
1566 let language_registry = syntax_map.language_registry();
1567 let mut syntax_snapshot = syntax_map.snapshot();
1568 drop(syntax_map);
1569
1570 let parse_task = cx.background_spawn({
1571 let language = language.clone();
1572 let language_registry = language_registry.clone();
1573 async move {
1574 syntax_snapshot.reparse(&text, language_registry, language);
1575 syntax_snapshot
1576 }
1577 });
1578
1579 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1580 match cx
1581 .background_executor()
1582 .block_with_timeout(self.sync_parse_timeout, parse_task)
1583 {
1584 Ok(new_syntax_snapshot) => {
1585 self.did_finish_parsing(new_syntax_snapshot, cx);
1586 self.reparse = None;
1587 }
1588 Err(parse_task) => {
1589 // todo(lw): hot foreground spawn
1590 self.reparse = Some(cx.spawn(async move |this, cx| {
1591 let new_syntax_map = cx.background_spawn(parse_task).await;
1592 this.update(cx, move |this, cx| {
1593 let grammar_changed = || {
1594 this.language.as_ref().is_none_or(|current_language| {
1595 !Arc::ptr_eq(&language, current_language)
1596 })
1597 };
1598 let language_registry_changed = || {
1599 new_syntax_map.contains_unknown_injections()
1600 && language_registry.is_some_and(|registry| {
1601 registry.version() != new_syntax_map.language_registry_version()
1602 })
1603 };
1604 let parse_again = this.version.changed_since(&parsed_version)
1605 || language_registry_changed()
1606 || grammar_changed();
1607 this.did_finish_parsing(new_syntax_map, cx);
1608 this.reparse = None;
1609 if parse_again {
1610 this.reparse(cx);
1611 }
1612 })
1613 .ok();
1614 }));
1615 }
1616 }
1617 }
1618
1619 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1620 self.was_changed();
1621 self.non_text_state_update_count += 1;
1622 self.syntax_map.lock().did_parse(syntax_snapshot);
1623 self.request_autoindent(cx);
1624 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1625 cx.emit(BufferEvent::Reparsed);
1626 cx.notify();
1627 }
1628
1629 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1630 self.parse_status.1.clone()
1631 }
1632
1633 /// Assign to the buffer a set of diagnostics created by a given language server.
1634 pub fn update_diagnostics(
1635 &mut self,
1636 server_id: LanguageServerId,
1637 diagnostics: DiagnosticSet,
1638 cx: &mut Context<Self>,
1639 ) {
1640 let lamport_timestamp = self.text.lamport_clock.tick();
1641 let op = Operation::UpdateDiagnostics {
1642 server_id,
1643 diagnostics: diagnostics.iter().cloned().collect(),
1644 lamport_timestamp,
1645 };
1646
1647 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1648 self.send_operation(op, true, cx);
1649 }
1650
1651 pub fn buffer_diagnostics(
1652 &self,
1653 for_server: Option<LanguageServerId>,
1654 ) -> Vec<&DiagnosticEntry<Anchor>> {
1655 match for_server {
1656 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1657 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1658 Err(_) => Vec::new(),
1659 },
1660 None => self
1661 .diagnostics
1662 .iter()
1663 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1664 .collect(),
1665 }
1666 }
1667
1668 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1669 if let Some(indent_sizes) = self.compute_autoindents() {
1670 let indent_sizes = cx.background_spawn(indent_sizes);
1671 match cx
1672 .background_executor()
1673 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1674 {
1675 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1676 Err(indent_sizes) => {
1677 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1678 let indent_sizes = indent_sizes.await;
1679 this.update(cx, |this, cx| {
1680 this.apply_autoindents(indent_sizes, cx);
1681 })
1682 .ok();
1683 }));
1684 }
1685 }
1686 } else {
1687 self.autoindent_requests.clear();
1688 for tx in self.wait_for_autoindent_txs.drain(..) {
1689 tx.send(()).ok();
1690 }
1691 }
1692 }
1693
1694 fn compute_autoindents(
1695 &self,
1696 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1697 let max_rows_between_yields = 100;
1698 let snapshot = self.snapshot();
1699 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1700 return None;
1701 }
1702
1703 let autoindent_requests = self.autoindent_requests.clone();
1704 Some(async move {
1705 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1706 for request in autoindent_requests {
1707 // Resolve each edited range to its row in the current buffer and in the
1708 // buffer before this batch of edits.
1709 let mut row_ranges = Vec::new();
1710 let mut old_to_new_rows = BTreeMap::new();
1711 let mut language_indent_sizes_by_new_row = Vec::new();
1712 for entry in &request.entries {
1713 let position = entry.range.start;
1714 let new_row = position.to_point(&snapshot).row;
1715 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1716 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1717
1718 if !entry.first_line_is_new {
1719 let old_row = position.to_point(&request.before_edit).row;
1720 old_to_new_rows.insert(old_row, new_row);
1721 }
1722 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1723 }
1724
1725 // Build a map containing the suggested indentation for each of the edited lines
1726 // with respect to the state of the buffer before these edits. This map is keyed
1727 // by the rows for these lines in the current state of the buffer.
1728 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1729 let old_edited_ranges =
1730 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1731 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1732 let mut language_indent_size = IndentSize::default();
1733 for old_edited_range in old_edited_ranges {
1734 let suggestions = request
1735 .before_edit
1736 .suggest_autoindents(old_edited_range.clone())
1737 .into_iter()
1738 .flatten();
1739 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1740 if let Some(suggestion) = suggestion {
1741 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1742
1743 // Find the indent size based on the language for this row.
1744 while let Some((row, size)) = language_indent_sizes.peek() {
1745 if *row > new_row {
1746 break;
1747 }
1748 language_indent_size = *size;
1749 language_indent_sizes.next();
1750 }
1751
1752 let suggested_indent = old_to_new_rows
1753 .get(&suggestion.basis_row)
1754 .and_then(|from_row| {
1755 Some(old_suggestions.get(from_row).copied()?.0)
1756 })
1757 .unwrap_or_else(|| {
1758 request
1759 .before_edit
1760 .indent_size_for_line(suggestion.basis_row)
1761 })
1762 .with_delta(suggestion.delta, language_indent_size);
1763 old_suggestions
1764 .insert(new_row, (suggested_indent, suggestion.within_error));
1765 }
1766 }
1767 yield_now().await;
1768 }
1769
1770 // Compute new suggestions for each line, but only include them in the result
1771 // if they differ from the old suggestion for that line.
1772 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1773 let mut language_indent_size = IndentSize::default();
1774 for (row_range, original_indent_column) in row_ranges {
1775 let new_edited_row_range = if request.is_block_mode {
1776 row_range.start..row_range.start + 1
1777 } else {
1778 row_range.clone()
1779 };
1780
1781 let suggestions = snapshot
1782 .suggest_autoindents(new_edited_row_range.clone())
1783 .into_iter()
1784 .flatten();
1785 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1786 if let Some(suggestion) = suggestion {
1787 // Find the indent size based on the language for this row.
1788 while let Some((row, size)) = language_indent_sizes.peek() {
1789 if *row > new_row {
1790 break;
1791 }
1792 language_indent_size = *size;
1793 language_indent_sizes.next();
1794 }
1795
1796 let suggested_indent = indent_sizes
1797 .get(&suggestion.basis_row)
1798 .copied()
1799 .map(|e| e.0)
1800 .unwrap_or_else(|| {
1801 snapshot.indent_size_for_line(suggestion.basis_row)
1802 })
1803 .with_delta(suggestion.delta, language_indent_size);
1804
1805 if old_suggestions.get(&new_row).is_none_or(
1806 |(old_indentation, was_within_error)| {
1807 suggested_indent != *old_indentation
1808 && (!suggestion.within_error || *was_within_error)
1809 },
1810 ) {
1811 indent_sizes.insert(
1812 new_row,
1813 (suggested_indent, request.ignore_empty_lines),
1814 );
1815 }
1816 }
1817 }
1818
1819 if let (true, Some(original_indent_column)) =
1820 (request.is_block_mode, original_indent_column)
1821 {
1822 let new_indent =
1823 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1824 *indent
1825 } else {
1826 snapshot.indent_size_for_line(row_range.start)
1827 };
1828 let delta = new_indent.len as i64 - original_indent_column as i64;
1829 if delta != 0 {
1830 for row in row_range.skip(1) {
1831 indent_sizes.entry(row).or_insert_with(|| {
1832 let mut size = snapshot.indent_size_for_line(row);
1833 if size.kind == new_indent.kind {
1834 match delta.cmp(&0) {
1835 Ordering::Greater => size.len += delta as u32,
1836 Ordering::Less => {
1837 size.len = size.len.saturating_sub(-delta as u32)
1838 }
1839 Ordering::Equal => {}
1840 }
1841 }
1842 (size, request.ignore_empty_lines)
1843 });
1844 }
1845 }
1846 }
1847
1848 yield_now().await;
1849 }
1850 }
1851
1852 indent_sizes
1853 .into_iter()
1854 .filter_map(|(row, (indent, ignore_empty_lines))| {
1855 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1856 None
1857 } else {
1858 Some((row, indent))
1859 }
1860 })
1861 .collect()
1862 })
1863 }
1864
1865 fn apply_autoindents(
1866 &mut self,
1867 indent_sizes: BTreeMap<u32, IndentSize>,
1868 cx: &mut Context<Self>,
1869 ) {
1870 self.autoindent_requests.clear();
1871 for tx in self.wait_for_autoindent_txs.drain(..) {
1872 tx.send(()).ok();
1873 }
1874
1875 let edits: Vec<_> = indent_sizes
1876 .into_iter()
1877 .filter_map(|(row, indent_size)| {
1878 let current_size = indent_size_for_line(self, row);
1879 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1880 })
1881 .collect();
1882
1883 let preserve_preview = self.preserve_preview();
1884 self.edit(edits, None, cx);
1885 if preserve_preview {
1886 self.refresh_preview();
1887 }
1888 }
1889
1890 /// Create a minimal edit that will cause the given row to be indented
1891 /// with the given size. After applying this edit, the length of the line
1892 /// will always be at least `new_size.len`.
1893 pub fn edit_for_indent_size_adjustment(
1894 row: u32,
1895 current_size: IndentSize,
1896 new_size: IndentSize,
1897 ) -> Option<(Range<Point>, String)> {
1898 if new_size.kind == current_size.kind {
1899 match new_size.len.cmp(¤t_size.len) {
1900 Ordering::Greater => {
1901 let point = Point::new(row, 0);
1902 Some((
1903 point..point,
1904 iter::repeat(new_size.char())
1905 .take((new_size.len - current_size.len) as usize)
1906 .collect::<String>(),
1907 ))
1908 }
1909
1910 Ordering::Less => Some((
1911 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1912 String::new(),
1913 )),
1914
1915 Ordering::Equal => None,
1916 }
1917 } else {
1918 Some((
1919 Point::new(row, 0)..Point::new(row, current_size.len),
1920 iter::repeat(new_size.char())
1921 .take(new_size.len as usize)
1922 .collect::<String>(),
1923 ))
1924 }
1925 }
1926
1927 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1928 /// and the given new text.
1929 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1930 let old_text = self.as_rope().clone();
1931 let base_version = self.version();
1932 cx.background_executor()
1933 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1934 let old_text = old_text.to_string();
1935 let line_ending = LineEnding::detect(&new_text);
1936 LineEnding::normalize(&mut new_text);
1937 let edits = text_diff(&old_text, &new_text);
1938 Diff {
1939 base_version,
1940 line_ending,
1941 edits,
1942 }
1943 })
1944 }
1945
1946 /// Spawns a background task that searches the buffer for any whitespace
1947 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1948 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1949 let old_text = self.as_rope().clone();
1950 let line_ending = self.line_ending();
1951 let base_version = self.version();
1952 cx.background_spawn(async move {
1953 let ranges = trailing_whitespace_ranges(&old_text);
1954 let empty = Arc::<str>::from("");
1955 Diff {
1956 base_version,
1957 line_ending,
1958 edits: ranges
1959 .into_iter()
1960 .map(|range| (range, empty.clone()))
1961 .collect(),
1962 }
1963 })
1964 }
1965
1966 /// Ensures that the buffer ends with a single newline character, and
1967 /// no other whitespace. Skips if the buffer is empty.
1968 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1969 let len = self.len();
1970 if len == 0 {
1971 return;
1972 }
1973 let mut offset = len;
1974 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1975 let non_whitespace_len = chunk
1976 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1977 .len();
1978 offset -= chunk.len();
1979 offset += non_whitespace_len;
1980 if non_whitespace_len != 0 {
1981 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1982 return;
1983 }
1984 break;
1985 }
1986 }
1987 self.edit([(offset..len, "\n")], None, cx);
1988 }
1989
1990 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1991 /// calculated, then adjust the diff to account for those changes, and discard any
1992 /// parts of the diff that conflict with those changes.
1993 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1994 let snapshot = self.snapshot();
1995 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1996 let mut delta = 0;
1997 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1998 while let Some(edit_since) = edits_since.peek() {
1999 // If the edit occurs after a diff hunk, then it does not
2000 // affect that hunk.
2001 if edit_since.old.start > range.end {
2002 break;
2003 }
2004 // If the edit precedes the diff hunk, then adjust the hunk
2005 // to reflect the edit.
2006 else if edit_since.old.end < range.start {
2007 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2008 edits_since.next();
2009 }
2010 // If the edit intersects a diff hunk, then discard that hunk.
2011 else {
2012 return None;
2013 }
2014 }
2015
2016 let start = (range.start as i64 + delta) as usize;
2017 let end = (range.end as i64 + delta) as usize;
2018 Some((start..end, new_text))
2019 });
2020
2021 self.start_transaction();
2022 self.text.set_line_ending(diff.line_ending);
2023 self.edit(adjusted_edits, None, cx);
2024 self.end_transaction(cx)
2025 }
2026
2027 pub fn has_unsaved_edits(&self) -> bool {
2028 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2029
2030 if last_version == self.version {
2031 self.has_unsaved_edits
2032 .set((last_version, has_unsaved_edits));
2033 return has_unsaved_edits;
2034 }
2035
2036 let has_edits = self.has_edits_since(&self.saved_version);
2037 self.has_unsaved_edits
2038 .set((self.version.clone(), has_edits));
2039 has_edits
2040 }
2041
2042 /// Checks if the buffer has unsaved changes.
2043 pub fn is_dirty(&self) -> bool {
2044 if self.capability == Capability::ReadOnly {
2045 return false;
2046 }
2047 if self.has_conflict {
2048 return true;
2049 }
2050 match self.file.as_ref().map(|f| f.disk_state()) {
2051 Some(DiskState::New) | Some(DiskState::Deleted) => {
2052 !self.is_empty() && self.has_unsaved_edits()
2053 }
2054 _ => self.has_unsaved_edits(),
2055 }
2056 }
2057
2058 /// Checks if the buffer and its file have both changed since the buffer
2059 /// was last saved or reloaded.
2060 pub fn has_conflict(&self) -> bool {
2061 if self.has_conflict {
2062 return true;
2063 }
2064 let Some(file) = self.file.as_ref() else {
2065 return false;
2066 };
2067 match file.disk_state() {
2068 DiskState::New => false,
2069 DiskState::Present { mtime } => match self.saved_mtime {
2070 Some(saved_mtime) => {
2071 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2072 }
2073 None => true,
2074 },
2075 DiskState::Deleted => false,
2076 }
2077 }
2078
2079 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2080 pub fn subscribe(&mut self) -> Subscription {
2081 self.text.subscribe()
2082 }
2083
2084 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2085 ///
2086 /// This allows downstream code to check if the buffer's text has changed without
2087 /// waiting for an effect cycle, which would be required if using eents.
2088 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2089 if let Err(ix) = self
2090 .change_bits
2091 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2092 {
2093 self.change_bits.insert(ix, bit);
2094 }
2095 }
2096
2097 /// Set the change bit for all "listeners".
2098 fn was_changed(&mut self) {
2099 self.change_bits.retain(|change_bit| {
2100 change_bit
2101 .upgrade()
2102 .inspect(|bit| {
2103 _ = bit.replace(true);
2104 })
2105 .is_some()
2106 });
2107 }
2108
2109 /// Starts a transaction, if one is not already in-progress. When undoing or
2110 /// redoing edits, all of the edits performed within a transaction are undone
2111 /// or redone together.
2112 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2113 self.start_transaction_at(Instant::now())
2114 }
2115
2116 /// Starts a transaction, providing the current time. Subsequent transactions
2117 /// that occur within a short period of time will be grouped together. This
2118 /// is controlled by the buffer's undo grouping duration.
2119 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2120 self.transaction_depth += 1;
2121 if self.was_dirty_before_starting_transaction.is_none() {
2122 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2123 }
2124 self.text.start_transaction_at(now)
2125 }
2126
2127 /// Terminates the current transaction, if this is the outermost transaction.
2128 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2129 self.end_transaction_at(Instant::now(), cx)
2130 }
2131
2132 /// Terminates the current transaction, providing the current time. Subsequent transactions
2133 /// that occur within a short period of time will be grouped together. This
2134 /// is controlled by the buffer's undo grouping duration.
2135 pub fn end_transaction_at(
2136 &mut self,
2137 now: Instant,
2138 cx: &mut Context<Self>,
2139 ) -> Option<TransactionId> {
2140 assert!(self.transaction_depth > 0);
2141 self.transaction_depth -= 1;
2142 let was_dirty = if self.transaction_depth == 0 {
2143 self.was_dirty_before_starting_transaction.take().unwrap()
2144 } else {
2145 false
2146 };
2147 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2148 self.did_edit(&start_version, was_dirty, cx);
2149 Some(transaction_id)
2150 } else {
2151 None
2152 }
2153 }
2154
2155 /// Manually add a transaction to the buffer's undo history.
2156 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2157 self.text.push_transaction(transaction, now);
2158 }
2159
2160 /// Differs from `push_transaction` in that it does not clear the redo
2161 /// stack. Intended to be used to create a parent transaction to merge
2162 /// potential child transactions into.
2163 ///
2164 /// The caller is responsible for removing it from the undo history using
2165 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2166 /// are merged into this transaction, the caller is responsible for ensuring
2167 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2168 /// cleared is to create transactions with the usual `start_transaction` and
2169 /// `end_transaction` methods and merging the resulting transactions into
2170 /// the transaction created by this method
2171 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2172 self.text.push_empty_transaction(now)
2173 }
2174
2175 /// Prevent the last transaction from being grouped with any subsequent transactions,
2176 /// even if they occur with the buffer's undo grouping duration.
2177 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2178 self.text.finalize_last_transaction()
2179 }
2180
2181 /// Manually group all changes since a given transaction.
2182 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2183 self.text.group_until_transaction(transaction_id);
2184 }
2185
2186 /// Manually remove a transaction from the buffer's undo history
2187 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2188 self.text.forget_transaction(transaction_id)
2189 }
2190
2191 /// Retrieve a transaction from the buffer's undo history
2192 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2193 self.text.get_transaction(transaction_id)
2194 }
2195
2196 /// Manually merge two transactions in the buffer's undo history.
2197 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2198 self.text.merge_transactions(transaction, destination);
2199 }
2200
2201 /// Waits for the buffer to receive operations with the given timestamps.
2202 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2203 &mut self,
2204 edit_ids: It,
2205 ) -> impl Future<Output = Result<()>> + use<It> {
2206 self.text.wait_for_edits(edit_ids)
2207 }
2208
2209 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2210 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2211 &mut self,
2212 anchors: It,
2213 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2214 self.text.wait_for_anchors(anchors)
2215 }
2216
2217 /// Waits for the buffer to receive operations up to the given version.
2218 pub fn wait_for_version(
2219 &mut self,
2220 version: clock::Global,
2221 ) -> impl Future<Output = Result<()>> + use<> {
2222 self.text.wait_for_version(version)
2223 }
2224
2225 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2226 /// [`Buffer::wait_for_version`] to resolve with an error.
2227 pub fn give_up_waiting(&mut self) {
2228 self.text.give_up_waiting();
2229 }
2230
2231 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2232 let mut rx = None;
2233 if !self.autoindent_requests.is_empty() {
2234 let channel = oneshot::channel();
2235 self.wait_for_autoindent_txs.push(channel.0);
2236 rx = Some(channel.1);
2237 }
2238 rx
2239 }
2240
2241 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2242 pub fn set_active_selections(
2243 &mut self,
2244 selections: Arc<[Selection<Anchor>]>,
2245 line_mode: bool,
2246 cursor_shape: CursorShape,
2247 cx: &mut Context<Self>,
2248 ) {
2249 let lamport_timestamp = self.text.lamport_clock.tick();
2250 self.remote_selections.insert(
2251 self.text.replica_id(),
2252 SelectionSet {
2253 selections: selections.clone(),
2254 lamport_timestamp,
2255 line_mode,
2256 cursor_shape,
2257 },
2258 );
2259 self.send_operation(
2260 Operation::UpdateSelections {
2261 selections,
2262 line_mode,
2263 lamport_timestamp,
2264 cursor_shape,
2265 },
2266 true,
2267 cx,
2268 );
2269 self.non_text_state_update_count += 1;
2270 cx.notify();
2271 }
2272
2273 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2274 /// this replica.
2275 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2276 if self
2277 .remote_selections
2278 .get(&self.text.replica_id())
2279 .is_none_or(|set| !set.selections.is_empty())
2280 {
2281 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2282 }
2283 }
2284
2285 pub fn set_agent_selections(
2286 &mut self,
2287 selections: Arc<[Selection<Anchor>]>,
2288 line_mode: bool,
2289 cursor_shape: CursorShape,
2290 cx: &mut Context<Self>,
2291 ) {
2292 let lamport_timestamp = self.text.lamport_clock.tick();
2293 self.remote_selections.insert(
2294 ReplicaId::AGENT,
2295 SelectionSet {
2296 selections,
2297 lamport_timestamp,
2298 line_mode,
2299 cursor_shape,
2300 },
2301 );
2302 self.non_text_state_update_count += 1;
2303 cx.notify();
2304 }
2305
2306 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2307 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2308 }
2309
2310 /// Replaces the buffer's entire text.
2311 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2312 where
2313 T: Into<Arc<str>>,
2314 {
2315 self.autoindent_requests.clear();
2316 self.edit([(0..self.len(), text)], None, cx)
2317 }
2318
2319 /// Appends the given text to the end of the buffer.
2320 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2321 where
2322 T: Into<Arc<str>>,
2323 {
2324 self.edit([(self.len()..self.len(), text)], None, cx)
2325 }
2326
2327 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2328 /// delete, and a string of text to insert at that location.
2329 ///
2330 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2331 /// request for the edited ranges, which will be processed when the buffer finishes
2332 /// parsing.
2333 ///
2334 /// Parsing takes place at the end of a transaction, and may compute synchronously
2335 /// or asynchronously, depending on the changes.
2336 pub fn edit<I, S, T>(
2337 &mut self,
2338 edits_iter: I,
2339 autoindent_mode: Option<AutoindentMode>,
2340 cx: &mut Context<Self>,
2341 ) -> Option<clock::Lamport>
2342 where
2343 I: IntoIterator<Item = (Range<S>, T)>,
2344 S: ToOffset,
2345 T: Into<Arc<str>>,
2346 {
2347 // Skip invalid edits and coalesce contiguous ones.
2348 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2349
2350 for (range, new_text) in edits_iter {
2351 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2352
2353 if range.start > range.end {
2354 mem::swap(&mut range.start, &mut range.end);
2355 }
2356 let new_text = new_text.into();
2357 if !new_text.is_empty() || !range.is_empty() {
2358 if let Some((prev_range, prev_text)) = edits.last_mut()
2359 && prev_range.end >= range.start
2360 {
2361 prev_range.end = cmp::max(prev_range.end, range.end);
2362 *prev_text = format!("{prev_text}{new_text}").into();
2363 } else {
2364 edits.push((range, new_text));
2365 }
2366 }
2367 }
2368 if edits.is_empty() {
2369 return None;
2370 }
2371
2372 self.start_transaction();
2373 self.pending_autoindent.take();
2374 let autoindent_request = autoindent_mode
2375 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2376
2377 let edit_operation = self
2378 .text
2379 .edit(edits.iter().cloned(), cx.background_executor());
2380 let edit_id = edit_operation.timestamp();
2381
2382 if let Some((before_edit, mode)) = autoindent_request {
2383 let mut delta = 0isize;
2384 let mut previous_setting = None;
2385 let entries: Vec<_> = edits
2386 .into_iter()
2387 .enumerate()
2388 .zip(&edit_operation.as_edit().unwrap().new_text)
2389 .filter(|((_, (range, _)), _)| {
2390 let language = before_edit.language_at(range.start);
2391 let language_id = language.map(|l| l.id());
2392 if let Some((cached_language_id, auto_indent)) = previous_setting
2393 && cached_language_id == language_id
2394 {
2395 auto_indent
2396 } else {
2397 // The auto-indent setting is not present in editorconfigs, hence
2398 // we can avoid passing the file here.
2399 let auto_indent =
2400 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2401 previous_setting = Some((language_id, auto_indent));
2402 auto_indent
2403 }
2404 })
2405 .map(|((ix, (range, _)), new_text)| {
2406 let new_text_length = new_text.len();
2407 let old_start = range.start.to_point(&before_edit);
2408 let new_start = (delta + range.start as isize) as usize;
2409 let range_len = range.end - range.start;
2410 delta += new_text_length as isize - range_len as isize;
2411
2412 // Decide what range of the insertion to auto-indent, and whether
2413 // the first line of the insertion should be considered a newly-inserted line
2414 // or an edit to an existing line.
2415 let mut range_of_insertion_to_indent = 0..new_text_length;
2416 let mut first_line_is_new = true;
2417
2418 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2419 let old_line_end = before_edit.line_len(old_start.row);
2420
2421 if old_start.column > old_line_start {
2422 first_line_is_new = false;
2423 }
2424
2425 if !new_text.contains('\n')
2426 && (old_start.column + (range_len as u32) < old_line_end
2427 || old_line_end == old_line_start)
2428 {
2429 first_line_is_new = false;
2430 }
2431
2432 // When inserting text starting with a newline, avoid auto-indenting the
2433 // previous line.
2434 if new_text.starts_with('\n') {
2435 range_of_insertion_to_indent.start += 1;
2436 first_line_is_new = true;
2437 }
2438
2439 let mut original_indent_column = None;
2440 if let AutoindentMode::Block {
2441 original_indent_columns,
2442 } = &mode
2443 {
2444 original_indent_column = Some(if new_text.starts_with('\n') {
2445 indent_size_for_text(
2446 new_text[range_of_insertion_to_indent.clone()].chars(),
2447 )
2448 .len
2449 } else {
2450 original_indent_columns
2451 .get(ix)
2452 .copied()
2453 .flatten()
2454 .unwrap_or_else(|| {
2455 indent_size_for_text(
2456 new_text[range_of_insertion_to_indent.clone()].chars(),
2457 )
2458 .len
2459 })
2460 });
2461
2462 // Avoid auto-indenting the line after the edit.
2463 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2464 range_of_insertion_to_indent.end -= 1;
2465 }
2466 }
2467
2468 AutoindentRequestEntry {
2469 first_line_is_new,
2470 original_indent_column,
2471 indent_size: before_edit.language_indent_size_at(range.start, cx),
2472 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2473 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2474 }
2475 })
2476 .collect();
2477
2478 if !entries.is_empty() {
2479 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2480 before_edit,
2481 entries,
2482 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2483 ignore_empty_lines: false,
2484 }));
2485 }
2486 }
2487
2488 self.end_transaction(cx);
2489 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2490 Some(edit_id)
2491 }
2492
2493 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2494 self.was_changed();
2495
2496 if self.edits_since::<usize>(old_version).next().is_none() {
2497 return;
2498 }
2499
2500 self.reparse(cx);
2501 cx.emit(BufferEvent::Edited);
2502 if was_dirty != self.is_dirty() {
2503 cx.emit(BufferEvent::DirtyChanged);
2504 }
2505 cx.notify();
2506 }
2507
2508 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2509 where
2510 I: IntoIterator<Item = Range<T>>,
2511 T: ToOffset + Copy,
2512 {
2513 let before_edit = self.snapshot();
2514 let entries = ranges
2515 .into_iter()
2516 .map(|range| AutoindentRequestEntry {
2517 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2518 first_line_is_new: true,
2519 indent_size: before_edit.language_indent_size_at(range.start, cx),
2520 original_indent_column: None,
2521 })
2522 .collect();
2523 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2524 before_edit,
2525 entries,
2526 is_block_mode: false,
2527 ignore_empty_lines: true,
2528 }));
2529 self.request_autoindent(cx);
2530 }
2531
2532 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2533 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2534 pub fn insert_empty_line(
2535 &mut self,
2536 position: impl ToPoint,
2537 space_above: bool,
2538 space_below: bool,
2539 cx: &mut Context<Self>,
2540 ) -> Point {
2541 let mut position = position.to_point(self);
2542
2543 self.start_transaction();
2544
2545 self.edit(
2546 [(position..position, "\n")],
2547 Some(AutoindentMode::EachLine),
2548 cx,
2549 );
2550
2551 if position.column > 0 {
2552 position += Point::new(1, 0);
2553 }
2554
2555 if !self.is_line_blank(position.row) {
2556 self.edit(
2557 [(position..position, "\n")],
2558 Some(AutoindentMode::EachLine),
2559 cx,
2560 );
2561 }
2562
2563 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2564 self.edit(
2565 [(position..position, "\n")],
2566 Some(AutoindentMode::EachLine),
2567 cx,
2568 );
2569 position.row += 1;
2570 }
2571
2572 if space_below
2573 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2574 {
2575 self.edit(
2576 [(position..position, "\n")],
2577 Some(AutoindentMode::EachLine),
2578 cx,
2579 );
2580 }
2581
2582 self.end_transaction(cx);
2583
2584 position
2585 }
2586
2587 /// Applies the given remote operations to the buffer.
2588 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2589 self.pending_autoindent.take();
2590 let was_dirty = self.is_dirty();
2591 let old_version = self.version.clone();
2592 let mut deferred_ops = Vec::new();
2593 let buffer_ops = ops
2594 .into_iter()
2595 .filter_map(|op| match op {
2596 Operation::Buffer(op) => Some(op),
2597 _ => {
2598 if self.can_apply_op(&op) {
2599 self.apply_op(op, cx);
2600 } else {
2601 deferred_ops.push(op);
2602 }
2603 None
2604 }
2605 })
2606 .collect::<Vec<_>>();
2607 for operation in buffer_ops.iter() {
2608 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2609 }
2610 self.text
2611 .apply_ops(buffer_ops, Some(cx.background_executor()));
2612 self.deferred_ops.insert(deferred_ops);
2613 self.flush_deferred_ops(cx);
2614 self.did_edit(&old_version, was_dirty, cx);
2615 // Notify independently of whether the buffer was edited as the operations could include a
2616 // selection update.
2617 cx.notify();
2618 }
2619
2620 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2621 let mut deferred_ops = Vec::new();
2622 for op in self.deferred_ops.drain().iter().cloned() {
2623 if self.can_apply_op(&op) {
2624 self.apply_op(op, cx);
2625 } else {
2626 deferred_ops.push(op);
2627 }
2628 }
2629 self.deferred_ops.insert(deferred_ops);
2630 }
2631
2632 pub fn has_deferred_ops(&self) -> bool {
2633 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2634 }
2635
2636 fn can_apply_op(&self, operation: &Operation) -> bool {
2637 match operation {
2638 Operation::Buffer(_) => {
2639 unreachable!("buffer operations should never be applied at this layer")
2640 }
2641 Operation::UpdateDiagnostics {
2642 diagnostics: diagnostic_set,
2643 ..
2644 } => diagnostic_set.iter().all(|diagnostic| {
2645 self.text.can_resolve(&diagnostic.range.start)
2646 && self.text.can_resolve(&diagnostic.range.end)
2647 }),
2648 Operation::UpdateSelections { selections, .. } => selections
2649 .iter()
2650 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2651 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2652 }
2653 }
2654
2655 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2656 match operation {
2657 Operation::Buffer(_) => {
2658 unreachable!("buffer operations should never be applied at this layer")
2659 }
2660 Operation::UpdateDiagnostics {
2661 server_id,
2662 diagnostics: diagnostic_set,
2663 lamport_timestamp,
2664 } => {
2665 let snapshot = self.snapshot();
2666 self.apply_diagnostic_update(
2667 server_id,
2668 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2669 lamport_timestamp,
2670 cx,
2671 );
2672 }
2673 Operation::UpdateSelections {
2674 selections,
2675 lamport_timestamp,
2676 line_mode,
2677 cursor_shape,
2678 } => {
2679 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2680 && set.lamport_timestamp > lamport_timestamp
2681 {
2682 return;
2683 }
2684
2685 self.remote_selections.insert(
2686 lamport_timestamp.replica_id,
2687 SelectionSet {
2688 selections,
2689 lamport_timestamp,
2690 line_mode,
2691 cursor_shape,
2692 },
2693 );
2694 self.text.lamport_clock.observe(lamport_timestamp);
2695 self.non_text_state_update_count += 1;
2696 }
2697 Operation::UpdateCompletionTriggers {
2698 triggers,
2699 lamport_timestamp,
2700 server_id,
2701 } => {
2702 if triggers.is_empty() {
2703 self.completion_triggers_per_language_server
2704 .remove(&server_id);
2705 self.completion_triggers = self
2706 .completion_triggers_per_language_server
2707 .values()
2708 .flat_map(|triggers| triggers.iter().cloned())
2709 .collect();
2710 } else {
2711 self.completion_triggers_per_language_server
2712 .insert(server_id, triggers.iter().cloned().collect());
2713 self.completion_triggers.extend(triggers);
2714 }
2715 self.text.lamport_clock.observe(lamport_timestamp);
2716 }
2717 Operation::UpdateLineEnding {
2718 line_ending,
2719 lamport_timestamp,
2720 } => {
2721 self.text.set_line_ending(line_ending);
2722 self.text.lamport_clock.observe(lamport_timestamp);
2723 }
2724 }
2725 }
2726
2727 fn apply_diagnostic_update(
2728 &mut self,
2729 server_id: LanguageServerId,
2730 diagnostics: DiagnosticSet,
2731 lamport_timestamp: clock::Lamport,
2732 cx: &mut Context<Self>,
2733 ) {
2734 if lamport_timestamp > self.diagnostics_timestamp {
2735 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2736 if diagnostics.is_empty() {
2737 if let Ok(ix) = ix {
2738 self.diagnostics.remove(ix);
2739 }
2740 } else {
2741 match ix {
2742 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2743 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2744 };
2745 }
2746 self.diagnostics_timestamp = lamport_timestamp;
2747 self.non_text_state_update_count += 1;
2748 self.text.lamport_clock.observe(lamport_timestamp);
2749 cx.notify();
2750 cx.emit(BufferEvent::DiagnosticsUpdated);
2751 }
2752 }
2753
2754 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2755 self.was_changed();
2756 cx.emit(BufferEvent::Operation {
2757 operation,
2758 is_local,
2759 });
2760 }
2761
2762 /// Removes the selections for a given peer.
2763 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2764 self.remote_selections.remove(&replica_id);
2765 cx.notify();
2766 }
2767
2768 /// Undoes the most recent transaction.
2769 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2770 let was_dirty = self.is_dirty();
2771 let old_version = self.version.clone();
2772
2773 if let Some((transaction_id, operation)) = self.text.undo() {
2774 self.send_operation(Operation::Buffer(operation), true, cx);
2775 self.did_edit(&old_version, was_dirty, cx);
2776 Some(transaction_id)
2777 } else {
2778 None
2779 }
2780 }
2781
2782 /// Manually undoes a specific transaction in the buffer's undo history.
2783 pub fn undo_transaction(
2784 &mut self,
2785 transaction_id: TransactionId,
2786 cx: &mut Context<Self>,
2787 ) -> bool {
2788 let was_dirty = self.is_dirty();
2789 let old_version = self.version.clone();
2790 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2791 self.send_operation(Operation::Buffer(operation), true, cx);
2792 self.did_edit(&old_version, was_dirty, cx);
2793 true
2794 } else {
2795 false
2796 }
2797 }
2798
2799 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2800 pub fn undo_to_transaction(
2801 &mut self,
2802 transaction_id: TransactionId,
2803 cx: &mut Context<Self>,
2804 ) -> bool {
2805 let was_dirty = self.is_dirty();
2806 let old_version = self.version.clone();
2807
2808 let operations = self.text.undo_to_transaction(transaction_id);
2809 let undone = !operations.is_empty();
2810 for operation in operations {
2811 self.send_operation(Operation::Buffer(operation), true, cx);
2812 }
2813 if undone {
2814 self.did_edit(&old_version, was_dirty, cx)
2815 }
2816 undone
2817 }
2818
2819 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2820 let was_dirty = self.is_dirty();
2821 let operation = self.text.undo_operations(counts);
2822 let old_version = self.version.clone();
2823 self.send_operation(Operation::Buffer(operation), true, cx);
2824 self.did_edit(&old_version, was_dirty, cx);
2825 }
2826
2827 /// Manually redoes a specific transaction in the buffer's redo history.
2828 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2829 let was_dirty = self.is_dirty();
2830 let old_version = self.version.clone();
2831
2832 if let Some((transaction_id, operation)) = self.text.redo() {
2833 self.send_operation(Operation::Buffer(operation), true, cx);
2834 self.did_edit(&old_version, was_dirty, cx);
2835 Some(transaction_id)
2836 } else {
2837 None
2838 }
2839 }
2840
2841 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2842 pub fn redo_to_transaction(
2843 &mut self,
2844 transaction_id: TransactionId,
2845 cx: &mut Context<Self>,
2846 ) -> bool {
2847 let was_dirty = self.is_dirty();
2848 let old_version = self.version.clone();
2849
2850 let operations = self.text.redo_to_transaction(transaction_id);
2851 let redone = !operations.is_empty();
2852 for operation in operations {
2853 self.send_operation(Operation::Buffer(operation), true, cx);
2854 }
2855 if redone {
2856 self.did_edit(&old_version, was_dirty, cx)
2857 }
2858 redone
2859 }
2860
2861 /// Override current completion triggers with the user-provided completion triggers.
2862 pub fn set_completion_triggers(
2863 &mut self,
2864 server_id: LanguageServerId,
2865 triggers: BTreeSet<String>,
2866 cx: &mut Context<Self>,
2867 ) {
2868 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2869 if triggers.is_empty() {
2870 self.completion_triggers_per_language_server
2871 .remove(&server_id);
2872 self.completion_triggers = self
2873 .completion_triggers_per_language_server
2874 .values()
2875 .flat_map(|triggers| triggers.iter().cloned())
2876 .collect();
2877 } else {
2878 self.completion_triggers_per_language_server
2879 .insert(server_id, triggers.clone());
2880 self.completion_triggers.extend(triggers.iter().cloned());
2881 }
2882 self.send_operation(
2883 Operation::UpdateCompletionTriggers {
2884 triggers: triggers.into_iter().collect(),
2885 lamport_timestamp: self.completion_triggers_timestamp,
2886 server_id,
2887 },
2888 true,
2889 cx,
2890 );
2891 cx.notify();
2892 }
2893
2894 /// Returns a list of strings which trigger a completion menu for this language.
2895 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2896 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2897 &self.completion_triggers
2898 }
2899
2900 /// Call this directly after performing edits to prevent the preview tab
2901 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2902 /// to return false until there are additional edits.
2903 pub fn refresh_preview(&mut self) {
2904 self.preview_version = self.version.clone();
2905 }
2906
2907 /// Whether we should preserve the preview status of a tab containing this buffer.
2908 pub fn preserve_preview(&self) -> bool {
2909 !self.has_edits_since(&self.preview_version)
2910 }
2911}
2912
2913#[doc(hidden)]
2914#[cfg(any(test, feature = "test-support"))]
2915impl Buffer {
2916 pub fn edit_via_marked_text(
2917 &mut self,
2918 marked_string: &str,
2919 autoindent_mode: Option<AutoindentMode>,
2920 cx: &mut Context<Self>,
2921 ) {
2922 let edits = self.edits_for_marked_text(marked_string);
2923 self.edit(edits, autoindent_mode, cx);
2924 }
2925
2926 pub fn set_group_interval(&mut self, group_interval: Duration) {
2927 self.text.set_group_interval(group_interval);
2928 }
2929
2930 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2931 where
2932 T: rand::Rng,
2933 {
2934 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2935 let mut last_end = None;
2936 for _ in 0..old_range_count {
2937 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2938 break;
2939 }
2940
2941 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2942 let mut range = self.random_byte_range(new_start, rng);
2943 if rng.random_bool(0.2) {
2944 mem::swap(&mut range.start, &mut range.end);
2945 }
2946 last_end = Some(range.end);
2947
2948 let new_text_len = rng.random_range(0..10);
2949 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2950 new_text = new_text.to_uppercase();
2951
2952 edits.push((range, new_text));
2953 }
2954 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2955 self.edit(edits, None, cx);
2956 }
2957
2958 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2959 let was_dirty = self.is_dirty();
2960 let old_version = self.version.clone();
2961
2962 let ops = self.text.randomly_undo_redo(rng);
2963 if !ops.is_empty() {
2964 for op in ops {
2965 self.send_operation(Operation::Buffer(op), true, cx);
2966 self.did_edit(&old_version, was_dirty, cx);
2967 }
2968 }
2969 }
2970}
2971
2972impl EventEmitter<BufferEvent> for Buffer {}
2973
2974impl Deref for Buffer {
2975 type Target = TextBuffer;
2976
2977 fn deref(&self) -> &Self::Target {
2978 &self.text
2979 }
2980}
2981
2982impl BufferSnapshot {
2983 /// Returns [`IndentSize`] for a given line that respects user settings and
2984 /// language preferences.
2985 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2986 indent_size_for_line(self, row)
2987 }
2988
2989 /// Returns [`IndentSize`] for a given position that respects user settings
2990 /// and language preferences.
2991 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2992 let settings = language_settings(
2993 self.language_at(position).map(|l| l.name()),
2994 self.file(),
2995 cx,
2996 );
2997 if settings.hard_tabs {
2998 IndentSize::tab()
2999 } else {
3000 IndentSize::spaces(settings.tab_size.get())
3001 }
3002 }
3003
3004 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3005 /// is passed in as `single_indent_size`.
3006 pub fn suggested_indents(
3007 &self,
3008 rows: impl Iterator<Item = u32>,
3009 single_indent_size: IndentSize,
3010 ) -> BTreeMap<u32, IndentSize> {
3011 let mut result = BTreeMap::new();
3012
3013 for row_range in contiguous_ranges(rows, 10) {
3014 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3015 Some(suggestions) => suggestions,
3016 _ => break,
3017 };
3018
3019 for (row, suggestion) in row_range.zip(suggestions) {
3020 let indent_size = if let Some(suggestion) = suggestion {
3021 result
3022 .get(&suggestion.basis_row)
3023 .copied()
3024 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3025 .with_delta(suggestion.delta, single_indent_size)
3026 } else {
3027 self.indent_size_for_line(row)
3028 };
3029
3030 result.insert(row, indent_size);
3031 }
3032 }
3033
3034 result
3035 }
3036
3037 fn suggest_autoindents(
3038 &self,
3039 row_range: Range<u32>,
3040 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3041 let config = &self.language.as_ref()?.config;
3042 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3043
3044 #[derive(Debug, Clone)]
3045 struct StartPosition {
3046 start: Point,
3047 suffix: SharedString,
3048 }
3049
3050 // Find the suggested indentation ranges based on the syntax tree.
3051 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3052 let end = Point::new(row_range.end, 0);
3053 let range = (start..end).to_offset(&self.text);
3054 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3055 Some(&grammar.indents_config.as_ref()?.query)
3056 });
3057 let indent_configs = matches
3058 .grammars()
3059 .iter()
3060 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3061 .collect::<Vec<_>>();
3062
3063 let mut indent_ranges = Vec::<Range<Point>>::new();
3064 let mut start_positions = Vec::<StartPosition>::new();
3065 let mut outdent_positions = Vec::<Point>::new();
3066 while let Some(mat) = matches.peek() {
3067 let mut start: Option<Point> = None;
3068 let mut end: Option<Point> = None;
3069
3070 let config = indent_configs[mat.grammar_index];
3071 for capture in mat.captures {
3072 if capture.index == config.indent_capture_ix {
3073 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3074 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3075 } else if Some(capture.index) == config.start_capture_ix {
3076 start = Some(Point::from_ts_point(capture.node.end_position()));
3077 } else if Some(capture.index) == config.end_capture_ix {
3078 end = Some(Point::from_ts_point(capture.node.start_position()));
3079 } else if Some(capture.index) == config.outdent_capture_ix {
3080 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3081 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3082 start_positions.push(StartPosition {
3083 start: Point::from_ts_point(capture.node.start_position()),
3084 suffix: suffix.clone(),
3085 });
3086 }
3087 }
3088
3089 matches.advance();
3090 if let Some((start, end)) = start.zip(end) {
3091 if start.row == end.row {
3092 continue;
3093 }
3094 let range = start..end;
3095 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3096 Err(ix) => indent_ranges.insert(ix, range),
3097 Ok(ix) => {
3098 let prev_range = &mut indent_ranges[ix];
3099 prev_range.end = prev_range.end.max(range.end);
3100 }
3101 }
3102 }
3103 }
3104
3105 let mut error_ranges = Vec::<Range<Point>>::new();
3106 let mut matches = self
3107 .syntax
3108 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3109 while let Some(mat) = matches.peek() {
3110 let node = mat.captures[0].node;
3111 let start = Point::from_ts_point(node.start_position());
3112 let end = Point::from_ts_point(node.end_position());
3113 let range = start..end;
3114 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3115 Ok(ix) | Err(ix) => ix,
3116 };
3117 let mut end_ix = ix;
3118 while let Some(existing_range) = error_ranges.get(end_ix) {
3119 if existing_range.end < end {
3120 end_ix += 1;
3121 } else {
3122 break;
3123 }
3124 }
3125 error_ranges.splice(ix..end_ix, [range]);
3126 matches.advance();
3127 }
3128
3129 outdent_positions.sort();
3130 for outdent_position in outdent_positions {
3131 // find the innermost indent range containing this outdent_position
3132 // set its end to the outdent position
3133 if let Some(range_to_truncate) = indent_ranges
3134 .iter_mut()
3135 .filter(|indent_range| indent_range.contains(&outdent_position))
3136 .next_back()
3137 {
3138 range_to_truncate.end = outdent_position;
3139 }
3140 }
3141
3142 start_positions.sort_by_key(|b| b.start);
3143
3144 // Find the suggested indentation increases and decreased based on regexes.
3145 let mut regex_outdent_map = HashMap::default();
3146 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3147 let mut start_positions_iter = start_positions.iter().peekable();
3148
3149 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3150 self.for_each_line(
3151 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3152 ..Point::new(row_range.end, 0),
3153 |row, line| {
3154 if config
3155 .decrease_indent_pattern
3156 .as_ref()
3157 .is_some_and(|regex| regex.is_match(line))
3158 {
3159 indent_change_rows.push((row, Ordering::Less));
3160 }
3161 if config
3162 .increase_indent_pattern
3163 .as_ref()
3164 .is_some_and(|regex| regex.is_match(line))
3165 {
3166 indent_change_rows.push((row + 1, Ordering::Greater));
3167 }
3168 while let Some(pos) = start_positions_iter.peek() {
3169 if pos.start.row < row {
3170 let pos = start_positions_iter.next().unwrap();
3171 last_seen_suffix
3172 .entry(pos.suffix.to_string())
3173 .or_default()
3174 .push(pos.start);
3175 } else {
3176 break;
3177 }
3178 }
3179 for rule in &config.decrease_indent_patterns {
3180 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3181 let row_start_column = self.indent_size_for_line(row).len;
3182 let basis_row = rule
3183 .valid_after
3184 .iter()
3185 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3186 .flatten()
3187 .filter(|start_point| start_point.column <= row_start_column)
3188 .max_by_key(|start_point| start_point.row);
3189 if let Some(outdent_to_row) = basis_row {
3190 regex_outdent_map.insert(row, outdent_to_row.row);
3191 }
3192 break;
3193 }
3194 }
3195 },
3196 );
3197
3198 let mut indent_changes = indent_change_rows.into_iter().peekable();
3199 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3200 prev_non_blank_row.unwrap_or(0)
3201 } else {
3202 row_range.start.saturating_sub(1)
3203 };
3204
3205 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3206 Some(row_range.map(move |row| {
3207 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3208
3209 let mut indent_from_prev_row = false;
3210 let mut outdent_from_prev_row = false;
3211 let mut outdent_to_row = u32::MAX;
3212 let mut from_regex = false;
3213
3214 while let Some((indent_row, delta)) = indent_changes.peek() {
3215 match indent_row.cmp(&row) {
3216 Ordering::Equal => match delta {
3217 Ordering::Less => {
3218 from_regex = true;
3219 outdent_from_prev_row = true
3220 }
3221 Ordering::Greater => {
3222 indent_from_prev_row = true;
3223 from_regex = true
3224 }
3225 _ => {}
3226 },
3227
3228 Ordering::Greater => break,
3229 Ordering::Less => {}
3230 }
3231
3232 indent_changes.next();
3233 }
3234
3235 for range in &indent_ranges {
3236 if range.start.row >= row {
3237 break;
3238 }
3239 if range.start.row == prev_row && range.end > row_start {
3240 indent_from_prev_row = true;
3241 }
3242 if range.end > prev_row_start && range.end <= row_start {
3243 outdent_to_row = outdent_to_row.min(range.start.row);
3244 }
3245 }
3246
3247 if let Some(basis_row) = regex_outdent_map.get(&row) {
3248 indent_from_prev_row = false;
3249 outdent_to_row = *basis_row;
3250 from_regex = true;
3251 }
3252
3253 let within_error = error_ranges
3254 .iter()
3255 .any(|e| e.start.row < row && e.end > row_start);
3256
3257 let suggestion = if outdent_to_row == prev_row
3258 || (outdent_from_prev_row && indent_from_prev_row)
3259 {
3260 Some(IndentSuggestion {
3261 basis_row: prev_row,
3262 delta: Ordering::Equal,
3263 within_error: within_error && !from_regex,
3264 })
3265 } else if indent_from_prev_row {
3266 Some(IndentSuggestion {
3267 basis_row: prev_row,
3268 delta: Ordering::Greater,
3269 within_error: within_error && !from_regex,
3270 })
3271 } else if outdent_to_row < prev_row {
3272 Some(IndentSuggestion {
3273 basis_row: outdent_to_row,
3274 delta: Ordering::Equal,
3275 within_error: within_error && !from_regex,
3276 })
3277 } else if outdent_from_prev_row {
3278 Some(IndentSuggestion {
3279 basis_row: prev_row,
3280 delta: Ordering::Less,
3281 within_error: within_error && !from_regex,
3282 })
3283 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3284 {
3285 Some(IndentSuggestion {
3286 basis_row: prev_row,
3287 delta: Ordering::Equal,
3288 within_error: within_error && !from_regex,
3289 })
3290 } else {
3291 None
3292 };
3293
3294 prev_row = row;
3295 prev_row_start = row_start;
3296 suggestion
3297 }))
3298 }
3299
3300 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3301 while row > 0 {
3302 row -= 1;
3303 if !self.is_line_blank(row) {
3304 return Some(row);
3305 }
3306 }
3307 None
3308 }
3309
3310 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3311 let captures = self.syntax.captures(range, &self.text, |grammar| {
3312 grammar
3313 .highlights_config
3314 .as_ref()
3315 .map(|config| &config.query)
3316 });
3317 let highlight_maps = captures
3318 .grammars()
3319 .iter()
3320 .map(|grammar| grammar.highlight_map())
3321 .collect();
3322 (captures, highlight_maps)
3323 }
3324
3325 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3326 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3327 /// returned in chunks where each chunk has a single syntax highlighting style and
3328 /// diagnostic status.
3329 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3330 let range = range.start.to_offset(self)..range.end.to_offset(self);
3331
3332 let mut syntax = None;
3333 if language_aware {
3334 syntax = Some(self.get_highlights(range.clone()));
3335 }
3336 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3337 let diagnostics = language_aware;
3338 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3339 }
3340
3341 pub fn highlighted_text_for_range<T: ToOffset>(
3342 &self,
3343 range: Range<T>,
3344 override_style: Option<HighlightStyle>,
3345 syntax_theme: &SyntaxTheme,
3346 ) -> HighlightedText {
3347 HighlightedText::from_buffer_range(
3348 range,
3349 &self.text,
3350 &self.syntax,
3351 override_style,
3352 syntax_theme,
3353 )
3354 }
3355
3356 /// Invokes the given callback for each line of text in the given range of the buffer.
3357 /// Uses callback to avoid allocating a string for each line.
3358 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3359 let mut line = String::new();
3360 let mut row = range.start.row;
3361 for chunk in self
3362 .as_rope()
3363 .chunks_in_range(range.to_offset(self))
3364 .chain(["\n"])
3365 {
3366 for (newline_ix, text) in chunk.split('\n').enumerate() {
3367 if newline_ix > 0 {
3368 callback(row, &line);
3369 row += 1;
3370 line.clear();
3371 }
3372 line.push_str(text);
3373 }
3374 }
3375 }
3376
3377 /// Iterates over every [`SyntaxLayer`] in the buffer.
3378 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3379 self.syntax_layers_for_range(0..self.len(), true)
3380 }
3381
3382 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3383 let offset = position.to_offset(self);
3384 self.syntax_layers_for_range(offset..offset, false)
3385 .filter(|l| l.node().end_byte() > offset)
3386 .last()
3387 }
3388
3389 pub fn syntax_layers_for_range<D: ToOffset>(
3390 &self,
3391 range: Range<D>,
3392 include_hidden: bool,
3393 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3394 self.syntax
3395 .layers_for_range(range, &self.text, include_hidden)
3396 }
3397
3398 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3399 &self,
3400 range: Range<D>,
3401 ) -> Option<SyntaxLayer<'_>> {
3402 let range = range.to_offset(self);
3403 self.syntax
3404 .layers_for_range(range, &self.text, false)
3405 .max_by(|a, b| {
3406 if a.depth != b.depth {
3407 a.depth.cmp(&b.depth)
3408 } else if a.offset.0 != b.offset.0 {
3409 a.offset.0.cmp(&b.offset.0)
3410 } else {
3411 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3412 }
3413 })
3414 }
3415
3416 /// Returns the main [`Language`].
3417 pub fn language(&self) -> Option<&Arc<Language>> {
3418 self.language.as_ref()
3419 }
3420
3421 /// Returns the [`Language`] at the given location.
3422 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3423 self.syntax_layer_at(position)
3424 .map(|info| info.language)
3425 .or(self.language.as_ref())
3426 }
3427
3428 /// Returns the settings for the language at the given location.
3429 pub fn settings_at<'a, D: ToOffset>(
3430 &'a self,
3431 position: D,
3432 cx: &'a App,
3433 ) -> Cow<'a, LanguageSettings> {
3434 language_settings(
3435 self.language_at(position).map(|l| l.name()),
3436 self.file.as_ref(),
3437 cx,
3438 )
3439 }
3440
3441 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3442 CharClassifier::new(self.language_scope_at(point))
3443 }
3444
3445 /// Returns the [`LanguageScope`] at the given location.
3446 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3447 let offset = position.to_offset(self);
3448 let mut scope = None;
3449 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3450
3451 // Use the layer that has the smallest node intersecting the given point.
3452 for layer in self
3453 .syntax
3454 .layers_for_range(offset..offset, &self.text, false)
3455 {
3456 let mut cursor = layer.node().walk();
3457
3458 let mut range = None;
3459 loop {
3460 let child_range = cursor.node().byte_range();
3461 if !child_range.contains(&offset) {
3462 break;
3463 }
3464
3465 range = Some(child_range);
3466 if cursor.goto_first_child_for_byte(offset).is_none() {
3467 break;
3468 }
3469 }
3470
3471 if let Some(range) = range
3472 && smallest_range_and_depth.as_ref().is_none_or(
3473 |(smallest_range, smallest_range_depth)| {
3474 if layer.depth > *smallest_range_depth {
3475 true
3476 } else if layer.depth == *smallest_range_depth {
3477 range.len() < smallest_range.len()
3478 } else {
3479 false
3480 }
3481 },
3482 )
3483 {
3484 smallest_range_and_depth = Some((range, layer.depth));
3485 scope = Some(LanguageScope {
3486 language: layer.language.clone(),
3487 override_id: layer.override_id(offset, &self.text),
3488 });
3489 }
3490 }
3491
3492 scope.or_else(|| {
3493 self.language.clone().map(|language| LanguageScope {
3494 language,
3495 override_id: None,
3496 })
3497 })
3498 }
3499
3500 /// Returns a tuple of the range and character kind of the word
3501 /// surrounding the given position.
3502 pub fn surrounding_word<T: ToOffset>(
3503 &self,
3504 start: T,
3505 scope_context: Option<CharScopeContext>,
3506 ) -> (Range<usize>, Option<CharKind>) {
3507 let mut start = start.to_offset(self);
3508 let mut end = start;
3509 let mut next_chars = self.chars_at(start).take(128).peekable();
3510 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3511
3512 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3513 let word_kind = cmp::max(
3514 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3515 next_chars.peek().copied().map(|c| classifier.kind(c)),
3516 );
3517
3518 for ch in prev_chars {
3519 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3520 start -= ch.len_utf8();
3521 } else {
3522 break;
3523 }
3524 }
3525
3526 for ch in next_chars {
3527 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3528 end += ch.len_utf8();
3529 } else {
3530 break;
3531 }
3532 }
3533
3534 (start..end, word_kind)
3535 }
3536
3537 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3538 /// range. When `require_larger` is true, the node found must be larger than the query range.
3539 ///
3540 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3541 /// be moved to the root of the tree.
3542 fn goto_node_enclosing_range(
3543 cursor: &mut tree_sitter::TreeCursor,
3544 query_range: &Range<usize>,
3545 require_larger: bool,
3546 ) -> bool {
3547 let mut ascending = false;
3548 loop {
3549 let mut range = cursor.node().byte_range();
3550 if query_range.is_empty() {
3551 // When the query range is empty and the current node starts after it, move to the
3552 // previous sibling to find the node the containing node.
3553 if range.start > query_range.start {
3554 cursor.goto_previous_sibling();
3555 range = cursor.node().byte_range();
3556 }
3557 } else {
3558 // When the query range is non-empty and the current node ends exactly at the start,
3559 // move to the next sibling to find a node that extends beyond the start.
3560 if range.end == query_range.start {
3561 cursor.goto_next_sibling();
3562 range = cursor.node().byte_range();
3563 }
3564 }
3565
3566 let encloses = range.contains_inclusive(query_range)
3567 && (!require_larger || range.len() > query_range.len());
3568 if !encloses {
3569 ascending = true;
3570 if !cursor.goto_parent() {
3571 return false;
3572 }
3573 continue;
3574 } else if ascending {
3575 return true;
3576 }
3577
3578 // Descend into the current node.
3579 if cursor
3580 .goto_first_child_for_byte(query_range.start)
3581 .is_none()
3582 {
3583 return true;
3584 }
3585 }
3586 }
3587
3588 pub fn syntax_ancestor<'a, T: ToOffset>(
3589 &'a self,
3590 range: Range<T>,
3591 ) -> Option<tree_sitter::Node<'a>> {
3592 let range = range.start.to_offset(self)..range.end.to_offset(self);
3593 let mut result: Option<tree_sitter::Node<'a>> = None;
3594 for layer in self
3595 .syntax
3596 .layers_for_range(range.clone(), &self.text, true)
3597 {
3598 let mut cursor = layer.node().walk();
3599
3600 // Find the node that both contains the range and is larger than it.
3601 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3602 continue;
3603 }
3604
3605 let left_node = cursor.node();
3606 let mut layer_result = left_node;
3607
3608 // For an empty range, try to find another node immediately to the right of the range.
3609 if left_node.end_byte() == range.start {
3610 let mut right_node = None;
3611 while !cursor.goto_next_sibling() {
3612 if !cursor.goto_parent() {
3613 break;
3614 }
3615 }
3616
3617 while cursor.node().start_byte() == range.start {
3618 right_node = Some(cursor.node());
3619 if !cursor.goto_first_child() {
3620 break;
3621 }
3622 }
3623
3624 // If there is a candidate node on both sides of the (empty) range, then
3625 // decide between the two by favoring a named node over an anonymous token.
3626 // If both nodes are the same in that regard, favor the right one.
3627 if let Some(right_node) = right_node
3628 && (right_node.is_named() || !left_node.is_named())
3629 {
3630 layer_result = right_node;
3631 }
3632 }
3633
3634 if let Some(previous_result) = &result
3635 && previous_result.byte_range().len() < layer_result.byte_range().len()
3636 {
3637 continue;
3638 }
3639 result = Some(layer_result);
3640 }
3641
3642 result
3643 }
3644
3645 /// Find the previous sibling syntax node at the given range.
3646 ///
3647 /// This function locates the syntax node that precedes the node containing
3648 /// the given range. It searches hierarchically by:
3649 /// 1. Finding the node that contains the given range
3650 /// 2. Looking for the previous sibling at the same tree level
3651 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3652 ///
3653 /// Returns `None` if there is no previous sibling at any ancestor level.
3654 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3655 &'a self,
3656 range: Range<T>,
3657 ) -> Option<tree_sitter::Node<'a>> {
3658 let range = range.start.to_offset(self)..range.end.to_offset(self);
3659 let mut result: Option<tree_sitter::Node<'a>> = None;
3660
3661 for layer in self
3662 .syntax
3663 .layers_for_range(range.clone(), &self.text, true)
3664 {
3665 let mut cursor = layer.node().walk();
3666
3667 // Find the node that contains the range
3668 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3669 continue;
3670 }
3671
3672 // Look for the previous sibling, moving up ancestor levels if needed
3673 loop {
3674 if cursor.goto_previous_sibling() {
3675 let layer_result = cursor.node();
3676
3677 if let Some(previous_result) = &result {
3678 if previous_result.byte_range().end < layer_result.byte_range().end {
3679 continue;
3680 }
3681 }
3682 result = Some(layer_result);
3683 break;
3684 }
3685
3686 // No sibling found at this level, try moving up to parent
3687 if !cursor.goto_parent() {
3688 break;
3689 }
3690 }
3691 }
3692
3693 result
3694 }
3695
3696 /// Find the next sibling syntax node at the given range.
3697 ///
3698 /// This function locates the syntax node that follows the node containing
3699 /// the given range. It searches hierarchically by:
3700 /// 1. Finding the node that contains the given range
3701 /// 2. Looking for the next sibling at the same tree level
3702 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3703 ///
3704 /// Returns `None` if there is no next sibling at any ancestor level.
3705 pub fn syntax_next_sibling<'a, T: ToOffset>(
3706 &'a self,
3707 range: Range<T>,
3708 ) -> Option<tree_sitter::Node<'a>> {
3709 let range = range.start.to_offset(self)..range.end.to_offset(self);
3710 let mut result: Option<tree_sitter::Node<'a>> = None;
3711
3712 for layer in self
3713 .syntax
3714 .layers_for_range(range.clone(), &self.text, true)
3715 {
3716 let mut cursor = layer.node().walk();
3717
3718 // Find the node that contains the range
3719 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3720 continue;
3721 }
3722
3723 // Look for the next sibling, moving up ancestor levels if needed
3724 loop {
3725 if cursor.goto_next_sibling() {
3726 let layer_result = cursor.node();
3727
3728 if let Some(previous_result) = &result {
3729 if previous_result.byte_range().start > layer_result.byte_range().start {
3730 continue;
3731 }
3732 }
3733 result = Some(layer_result);
3734 break;
3735 }
3736
3737 // No sibling found at this level, try moving up to parent
3738 if !cursor.goto_parent() {
3739 break;
3740 }
3741 }
3742 }
3743
3744 result
3745 }
3746
3747 /// Returns the root syntax node within the given row
3748 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3749 let start_offset = position.to_offset(self);
3750
3751 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3752
3753 let layer = self
3754 .syntax
3755 .layers_for_range(start_offset..start_offset, &self.text, true)
3756 .next()?;
3757
3758 let mut cursor = layer.node().walk();
3759
3760 // Descend to the first leaf that touches the start of the range.
3761 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3762 if cursor.node().end_byte() == start_offset {
3763 cursor.goto_next_sibling();
3764 }
3765 }
3766
3767 // Ascend to the root node within the same row.
3768 while cursor.goto_parent() {
3769 if cursor.node().start_position().row != row {
3770 break;
3771 }
3772 }
3773
3774 Some(cursor.node())
3775 }
3776
3777 /// Returns the outline for the buffer.
3778 ///
3779 /// This method allows passing an optional [`SyntaxTheme`] to
3780 /// syntax-highlight the returned symbols.
3781 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3782 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3783 }
3784
3785 /// Returns all the symbols that contain the given position.
3786 ///
3787 /// This method allows passing an optional [`SyntaxTheme`] to
3788 /// syntax-highlight the returned symbols.
3789 pub fn symbols_containing<T: ToOffset>(
3790 &self,
3791 position: T,
3792 theme: Option<&SyntaxTheme>,
3793 ) -> Vec<OutlineItem<Anchor>> {
3794 let position = position.to_offset(self);
3795 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3796 let end = self.clip_offset(position + 1, Bias::Right);
3797 let mut items = self.outline_items_containing(start..end, false, theme);
3798 let mut prev_depth = None;
3799 items.retain(|item| {
3800 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3801 prev_depth = Some(item.depth);
3802 result
3803 });
3804 items
3805 }
3806
3807 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3808 let range = range.to_offset(self);
3809 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3810 grammar.outline_config.as_ref().map(|c| &c.query)
3811 });
3812 let configs = matches
3813 .grammars()
3814 .iter()
3815 .map(|g| g.outline_config.as_ref().unwrap())
3816 .collect::<Vec<_>>();
3817
3818 while let Some(mat) = matches.peek() {
3819 let config = &configs[mat.grammar_index];
3820 let containing_item_node = maybe!({
3821 let item_node = mat.captures.iter().find_map(|cap| {
3822 if cap.index == config.item_capture_ix {
3823 Some(cap.node)
3824 } else {
3825 None
3826 }
3827 })?;
3828
3829 let item_byte_range = item_node.byte_range();
3830 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3831 None
3832 } else {
3833 Some(item_node)
3834 }
3835 });
3836
3837 if let Some(item_node) = containing_item_node {
3838 return Some(
3839 Point::from_ts_point(item_node.start_position())
3840 ..Point::from_ts_point(item_node.end_position()),
3841 );
3842 }
3843
3844 matches.advance();
3845 }
3846 None
3847 }
3848
3849 pub fn outline_items_containing<T: ToOffset>(
3850 &self,
3851 range: Range<T>,
3852 include_extra_context: bool,
3853 theme: Option<&SyntaxTheme>,
3854 ) -> Vec<OutlineItem<Anchor>> {
3855 self.outline_items_containing_internal(
3856 range,
3857 include_extra_context,
3858 theme,
3859 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3860 )
3861 }
3862
3863 pub fn outline_items_as_points_containing<T: ToOffset>(
3864 &self,
3865 range: Range<T>,
3866 include_extra_context: bool,
3867 theme: Option<&SyntaxTheme>,
3868 ) -> Vec<OutlineItem<Point>> {
3869 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3870 range
3871 })
3872 }
3873
3874 fn outline_items_containing_internal<T: ToOffset, U>(
3875 &self,
3876 range: Range<T>,
3877 include_extra_context: bool,
3878 theme: Option<&SyntaxTheme>,
3879 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3880 ) -> Vec<OutlineItem<U>> {
3881 let range = range.to_offset(self);
3882 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3883 grammar.outline_config.as_ref().map(|c| &c.query)
3884 });
3885
3886 let mut items = Vec::new();
3887 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3888 while let Some(mat) = matches.peek() {
3889 let config = matches.grammars()[mat.grammar_index]
3890 .outline_config
3891 .as_ref()
3892 .unwrap();
3893 if let Some(item) =
3894 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3895 {
3896 items.push(item);
3897 } else if let Some(capture) = mat
3898 .captures
3899 .iter()
3900 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3901 {
3902 let capture_range = capture.node.start_position()..capture.node.end_position();
3903 let mut capture_row_range =
3904 capture_range.start.row as u32..capture_range.end.row as u32;
3905 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3906 {
3907 capture_row_range.end -= 1;
3908 }
3909 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3910 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3911 last_row_range.end = capture_row_range.end;
3912 } else {
3913 annotation_row_ranges.push(capture_row_range);
3914 }
3915 } else {
3916 annotation_row_ranges.push(capture_row_range);
3917 }
3918 }
3919 matches.advance();
3920 }
3921
3922 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3923
3924 // Assign depths based on containment relationships and convert to anchors.
3925 let mut item_ends_stack = Vec::<Point>::new();
3926 let mut anchor_items = Vec::new();
3927 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3928 for item in items {
3929 while let Some(last_end) = item_ends_stack.last().copied() {
3930 if last_end < item.range.end {
3931 item_ends_stack.pop();
3932 } else {
3933 break;
3934 }
3935 }
3936
3937 let mut annotation_row_range = None;
3938 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3939 let row_preceding_item = item.range.start.row.saturating_sub(1);
3940 if next_annotation_row_range.end < row_preceding_item {
3941 annotation_row_ranges.next();
3942 } else {
3943 if next_annotation_row_range.end == row_preceding_item {
3944 annotation_row_range = Some(next_annotation_row_range.clone());
3945 annotation_row_ranges.next();
3946 }
3947 break;
3948 }
3949 }
3950
3951 anchor_items.push(OutlineItem {
3952 depth: item_ends_stack.len(),
3953 range: range_callback(self, item.range.clone()),
3954 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3955 text: item.text,
3956 highlight_ranges: item.highlight_ranges,
3957 name_ranges: item.name_ranges,
3958 body_range: item.body_range.map(|r| range_callback(self, r)),
3959 annotation_range: annotation_row_range.map(|annotation_range| {
3960 let point_range = Point::new(annotation_range.start, 0)
3961 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3962 range_callback(self, point_range)
3963 }),
3964 });
3965 item_ends_stack.push(item.range.end);
3966 }
3967
3968 anchor_items
3969 }
3970
3971 fn next_outline_item(
3972 &self,
3973 config: &OutlineConfig,
3974 mat: &SyntaxMapMatch,
3975 range: &Range<usize>,
3976 include_extra_context: bool,
3977 theme: Option<&SyntaxTheme>,
3978 ) -> Option<OutlineItem<Point>> {
3979 let item_node = mat.captures.iter().find_map(|cap| {
3980 if cap.index == config.item_capture_ix {
3981 Some(cap.node)
3982 } else {
3983 None
3984 }
3985 })?;
3986
3987 let item_byte_range = item_node.byte_range();
3988 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3989 return None;
3990 }
3991 let item_point_range = Point::from_ts_point(item_node.start_position())
3992 ..Point::from_ts_point(item_node.end_position());
3993
3994 let mut open_point = None;
3995 let mut close_point = None;
3996
3997 let mut buffer_ranges = Vec::new();
3998 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3999 let mut range = node.start_byte()..node.end_byte();
4000 let start = node.start_position();
4001 if node.end_position().row > start.row {
4002 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4003 }
4004
4005 if !range.is_empty() {
4006 buffer_ranges.push((range, node_is_name));
4007 }
4008 };
4009
4010 for capture in mat.captures {
4011 if capture.index == config.name_capture_ix {
4012 add_to_buffer_ranges(capture.node, true);
4013 } else if Some(capture.index) == config.context_capture_ix
4014 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4015 {
4016 add_to_buffer_ranges(capture.node, false);
4017 } else {
4018 if Some(capture.index) == config.open_capture_ix {
4019 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4020 } else if Some(capture.index) == config.close_capture_ix {
4021 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4022 }
4023 }
4024 }
4025
4026 if buffer_ranges.is_empty() {
4027 return None;
4028 }
4029 let source_range_for_text =
4030 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4031
4032 let mut text = String::new();
4033 let mut highlight_ranges = Vec::new();
4034 let mut name_ranges = Vec::new();
4035 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4036 let mut last_buffer_range_end = 0;
4037 for (buffer_range, is_name) in buffer_ranges {
4038 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4039 if space_added {
4040 text.push(' ');
4041 }
4042 let before_append_len = text.len();
4043 let mut offset = buffer_range.start;
4044 chunks.seek(buffer_range.clone());
4045 for mut chunk in chunks.by_ref() {
4046 if chunk.text.len() > buffer_range.end - offset {
4047 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4048 offset = buffer_range.end;
4049 } else {
4050 offset += chunk.text.len();
4051 }
4052 let style = chunk
4053 .syntax_highlight_id
4054 .zip(theme)
4055 .and_then(|(highlight, theme)| highlight.style(theme));
4056 if let Some(style) = style {
4057 let start = text.len();
4058 let end = start + chunk.text.len();
4059 highlight_ranges.push((start..end, style));
4060 }
4061 text.push_str(chunk.text);
4062 if offset >= buffer_range.end {
4063 break;
4064 }
4065 }
4066 if is_name {
4067 let after_append_len = text.len();
4068 let start = if space_added && !name_ranges.is_empty() {
4069 before_append_len - 1
4070 } else {
4071 before_append_len
4072 };
4073 name_ranges.push(start..after_append_len);
4074 }
4075 last_buffer_range_end = buffer_range.end;
4076 }
4077
4078 Some(OutlineItem {
4079 depth: 0, // We'll calculate the depth later
4080 range: item_point_range,
4081 source_range_for_text: source_range_for_text.to_point(self),
4082 text,
4083 highlight_ranges,
4084 name_ranges,
4085 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4086 annotation_range: None,
4087 })
4088 }
4089
4090 pub fn function_body_fold_ranges<T: ToOffset>(
4091 &self,
4092 within: Range<T>,
4093 ) -> impl Iterator<Item = Range<usize>> + '_ {
4094 self.text_object_ranges(within, TreeSitterOptions::default())
4095 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4096 }
4097
4098 /// For each grammar in the language, runs the provided
4099 /// [`tree_sitter::Query`] against the given range.
4100 pub fn matches(
4101 &self,
4102 range: Range<usize>,
4103 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4104 ) -> SyntaxMapMatches<'_> {
4105 self.syntax.matches(range, self, query)
4106 }
4107
4108 pub fn all_bracket_ranges(
4109 &self,
4110 range: Range<usize>,
4111 ) -> impl Iterator<Item = BracketMatch> + '_ {
4112 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4113 grammar.brackets_config.as_ref().map(|c| &c.query)
4114 });
4115 let configs = matches
4116 .grammars()
4117 .iter()
4118 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4119 .collect::<Vec<_>>();
4120
4121 iter::from_fn(move || {
4122 while let Some(mat) = matches.peek() {
4123 let mut open = None;
4124 let mut close = None;
4125 let config = &configs[mat.grammar_index];
4126 let pattern = &config.patterns[mat.pattern_index];
4127 for capture in mat.captures {
4128 if capture.index == config.open_capture_ix {
4129 open = Some(capture.node.byte_range());
4130 } else if capture.index == config.close_capture_ix {
4131 close = Some(capture.node.byte_range());
4132 }
4133 }
4134
4135 matches.advance();
4136
4137 let Some((open_range, close_range)) = open.zip(close) else {
4138 continue;
4139 };
4140
4141 let bracket_range = open_range.start..=close_range.end;
4142 if !bracket_range.overlaps(&range) {
4143 continue;
4144 }
4145
4146 return Some(BracketMatch {
4147 open_range,
4148 close_range,
4149 newline_only: pattern.newline_only,
4150 });
4151 }
4152 None
4153 })
4154 }
4155
4156 /// Returns bracket range pairs overlapping or adjacent to `range`
4157 pub fn bracket_ranges<T: ToOffset>(
4158 &self,
4159 range: Range<T>,
4160 ) -> impl Iterator<Item = BracketMatch> + '_ {
4161 // Find bracket pairs that *inclusively* contain the given range.
4162 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4163 self.all_bracket_ranges(range)
4164 .filter(|pair| !pair.newline_only)
4165 }
4166
4167 pub fn debug_variables_query<T: ToOffset>(
4168 &self,
4169 range: Range<T>,
4170 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4171 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4172
4173 let mut matches = self.syntax.matches_with_options(
4174 range.clone(),
4175 &self.text,
4176 TreeSitterOptions::default(),
4177 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4178 );
4179
4180 let configs = matches
4181 .grammars()
4182 .iter()
4183 .map(|grammar| grammar.debug_variables_config.as_ref())
4184 .collect::<Vec<_>>();
4185
4186 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4187
4188 iter::from_fn(move || {
4189 loop {
4190 while let Some(capture) = captures.pop() {
4191 if capture.0.overlaps(&range) {
4192 return Some(capture);
4193 }
4194 }
4195
4196 let mat = matches.peek()?;
4197
4198 let Some(config) = configs[mat.grammar_index].as_ref() else {
4199 matches.advance();
4200 continue;
4201 };
4202
4203 for capture in mat.captures {
4204 let Some(ix) = config
4205 .objects_by_capture_ix
4206 .binary_search_by_key(&capture.index, |e| e.0)
4207 .ok()
4208 else {
4209 continue;
4210 };
4211 let text_object = config.objects_by_capture_ix[ix].1;
4212 let byte_range = capture.node.byte_range();
4213
4214 let mut found = false;
4215 for (range, existing) in captures.iter_mut() {
4216 if existing == &text_object {
4217 range.start = range.start.min(byte_range.start);
4218 range.end = range.end.max(byte_range.end);
4219 found = true;
4220 break;
4221 }
4222 }
4223
4224 if !found {
4225 captures.push((byte_range, text_object));
4226 }
4227 }
4228
4229 matches.advance();
4230 }
4231 })
4232 }
4233
4234 pub fn text_object_ranges<T: ToOffset>(
4235 &self,
4236 range: Range<T>,
4237 options: TreeSitterOptions,
4238 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4239 let range =
4240 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4241
4242 let mut matches =
4243 self.syntax
4244 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4245 grammar.text_object_config.as_ref().map(|c| &c.query)
4246 });
4247
4248 let configs = matches
4249 .grammars()
4250 .iter()
4251 .map(|grammar| grammar.text_object_config.as_ref())
4252 .collect::<Vec<_>>();
4253
4254 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4255
4256 iter::from_fn(move || {
4257 loop {
4258 while let Some(capture) = captures.pop() {
4259 if capture.0.overlaps(&range) {
4260 return Some(capture);
4261 }
4262 }
4263
4264 let mat = matches.peek()?;
4265
4266 let Some(config) = configs[mat.grammar_index].as_ref() else {
4267 matches.advance();
4268 continue;
4269 };
4270
4271 for capture in mat.captures {
4272 let Some(ix) = config
4273 .text_objects_by_capture_ix
4274 .binary_search_by_key(&capture.index, |e| e.0)
4275 .ok()
4276 else {
4277 continue;
4278 };
4279 let text_object = config.text_objects_by_capture_ix[ix].1;
4280 let byte_range = capture.node.byte_range();
4281
4282 let mut found = false;
4283 for (range, existing) in captures.iter_mut() {
4284 if existing == &text_object {
4285 range.start = range.start.min(byte_range.start);
4286 range.end = range.end.max(byte_range.end);
4287 found = true;
4288 break;
4289 }
4290 }
4291
4292 if !found {
4293 captures.push((byte_range, text_object));
4294 }
4295 }
4296
4297 matches.advance();
4298 }
4299 })
4300 }
4301
4302 /// Returns enclosing bracket ranges containing the given range
4303 pub fn enclosing_bracket_ranges<T: ToOffset>(
4304 &self,
4305 range: Range<T>,
4306 ) -> impl Iterator<Item = BracketMatch> + '_ {
4307 let range = range.start.to_offset(self)..range.end.to_offset(self);
4308
4309 self.bracket_ranges(range.clone()).filter(move |pair| {
4310 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4311 })
4312 }
4313
4314 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4315 ///
4316 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4317 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4318 &self,
4319 range: Range<T>,
4320 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4321 ) -> Option<(Range<usize>, Range<usize>)> {
4322 let range = range.start.to_offset(self)..range.end.to_offset(self);
4323
4324 // Get the ranges of the innermost pair of brackets.
4325 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4326
4327 for pair in self.enclosing_bracket_ranges(range) {
4328 if let Some(range_filter) = range_filter
4329 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4330 {
4331 continue;
4332 }
4333
4334 let len = pair.close_range.end - pair.open_range.start;
4335
4336 if let Some((existing_open, existing_close)) = &result {
4337 let existing_len = existing_close.end - existing_open.start;
4338 if len > existing_len {
4339 continue;
4340 }
4341 }
4342
4343 result = Some((pair.open_range, pair.close_range));
4344 }
4345
4346 result
4347 }
4348
4349 /// Returns anchor ranges for any matches of the redaction query.
4350 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4351 /// will be run on the relevant section of the buffer.
4352 pub fn redacted_ranges<T: ToOffset>(
4353 &self,
4354 range: Range<T>,
4355 ) -> impl Iterator<Item = Range<usize>> + '_ {
4356 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4357 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4358 grammar
4359 .redactions_config
4360 .as_ref()
4361 .map(|config| &config.query)
4362 });
4363
4364 let configs = syntax_matches
4365 .grammars()
4366 .iter()
4367 .map(|grammar| grammar.redactions_config.as_ref())
4368 .collect::<Vec<_>>();
4369
4370 iter::from_fn(move || {
4371 let redacted_range = syntax_matches
4372 .peek()
4373 .and_then(|mat| {
4374 configs[mat.grammar_index].and_then(|config| {
4375 mat.captures
4376 .iter()
4377 .find(|capture| capture.index == config.redaction_capture_ix)
4378 })
4379 })
4380 .map(|mat| mat.node.byte_range());
4381 syntax_matches.advance();
4382 redacted_range
4383 })
4384 }
4385
4386 pub fn injections_intersecting_range<T: ToOffset>(
4387 &self,
4388 range: Range<T>,
4389 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4390 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4391
4392 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4393 grammar
4394 .injection_config
4395 .as_ref()
4396 .map(|config| &config.query)
4397 });
4398
4399 let configs = syntax_matches
4400 .grammars()
4401 .iter()
4402 .map(|grammar| grammar.injection_config.as_ref())
4403 .collect::<Vec<_>>();
4404
4405 iter::from_fn(move || {
4406 let ranges = syntax_matches.peek().and_then(|mat| {
4407 let config = &configs[mat.grammar_index]?;
4408 let content_capture_range = mat.captures.iter().find_map(|capture| {
4409 if capture.index == config.content_capture_ix {
4410 Some(capture.node.byte_range())
4411 } else {
4412 None
4413 }
4414 })?;
4415 let language = self.language_at(content_capture_range.start)?;
4416 Some((content_capture_range, language))
4417 });
4418 syntax_matches.advance();
4419 ranges
4420 })
4421 }
4422
4423 pub fn runnable_ranges(
4424 &self,
4425 offset_range: Range<usize>,
4426 ) -> impl Iterator<Item = RunnableRange> + '_ {
4427 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4428 grammar.runnable_config.as_ref().map(|config| &config.query)
4429 });
4430
4431 let test_configs = syntax_matches
4432 .grammars()
4433 .iter()
4434 .map(|grammar| grammar.runnable_config.as_ref())
4435 .collect::<Vec<_>>();
4436
4437 iter::from_fn(move || {
4438 loop {
4439 let mat = syntax_matches.peek()?;
4440
4441 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4442 let mut run_range = None;
4443 let full_range = mat.captures.iter().fold(
4444 Range {
4445 start: usize::MAX,
4446 end: 0,
4447 },
4448 |mut acc, next| {
4449 let byte_range = next.node.byte_range();
4450 if acc.start > byte_range.start {
4451 acc.start = byte_range.start;
4452 }
4453 if acc.end < byte_range.end {
4454 acc.end = byte_range.end;
4455 }
4456 acc
4457 },
4458 );
4459 if full_range.start > full_range.end {
4460 // We did not find a full spanning range of this match.
4461 return None;
4462 }
4463 let extra_captures: SmallVec<[_; 1]> =
4464 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4465 test_configs
4466 .extra_captures
4467 .get(capture.index as usize)
4468 .cloned()
4469 .and_then(|tag_name| match tag_name {
4470 RunnableCapture::Named(name) => {
4471 Some((capture.node.byte_range(), name))
4472 }
4473 RunnableCapture::Run => {
4474 let _ = run_range.insert(capture.node.byte_range());
4475 None
4476 }
4477 })
4478 }));
4479 let run_range = run_range?;
4480 let tags = test_configs
4481 .query
4482 .property_settings(mat.pattern_index)
4483 .iter()
4484 .filter_map(|property| {
4485 if *property.key == *"tag" {
4486 property
4487 .value
4488 .as_ref()
4489 .map(|value| RunnableTag(value.to_string().into()))
4490 } else {
4491 None
4492 }
4493 })
4494 .collect();
4495 let extra_captures = extra_captures
4496 .into_iter()
4497 .map(|(range, name)| {
4498 (
4499 name.to_string(),
4500 self.text_for_range(range).collect::<String>(),
4501 )
4502 })
4503 .collect();
4504 // All tags should have the same range.
4505 Some(RunnableRange {
4506 run_range,
4507 full_range,
4508 runnable: Runnable {
4509 tags,
4510 language: mat.language,
4511 buffer: self.remote_id(),
4512 },
4513 extra_captures,
4514 buffer_id: self.remote_id(),
4515 })
4516 });
4517
4518 syntax_matches.advance();
4519 if test_range.is_some() {
4520 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4521 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4522 return test_range;
4523 }
4524 }
4525 })
4526 }
4527
4528 /// Returns selections for remote peers intersecting the given range.
4529 #[allow(clippy::type_complexity)]
4530 pub fn selections_in_range(
4531 &self,
4532 range: Range<Anchor>,
4533 include_local: bool,
4534 ) -> impl Iterator<
4535 Item = (
4536 ReplicaId,
4537 bool,
4538 CursorShape,
4539 impl Iterator<Item = &Selection<Anchor>> + '_,
4540 ),
4541 > + '_ {
4542 self.remote_selections
4543 .iter()
4544 .filter(move |(replica_id, set)| {
4545 (include_local || **replica_id != self.text.replica_id())
4546 && !set.selections.is_empty()
4547 })
4548 .map(move |(replica_id, set)| {
4549 let start_ix = match set.selections.binary_search_by(|probe| {
4550 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4551 }) {
4552 Ok(ix) | Err(ix) => ix,
4553 };
4554 let end_ix = match set.selections.binary_search_by(|probe| {
4555 probe.start.cmp(&range.end, self).then(Ordering::Less)
4556 }) {
4557 Ok(ix) | Err(ix) => ix,
4558 };
4559
4560 (
4561 *replica_id,
4562 set.line_mode,
4563 set.cursor_shape,
4564 set.selections[start_ix..end_ix].iter(),
4565 )
4566 })
4567 }
4568
4569 /// Returns if the buffer contains any diagnostics.
4570 pub fn has_diagnostics(&self) -> bool {
4571 !self.diagnostics.is_empty()
4572 }
4573
4574 /// Returns all the diagnostics intersecting the given range.
4575 pub fn diagnostics_in_range<'a, T, O>(
4576 &'a self,
4577 search_range: Range<T>,
4578 reversed: bool,
4579 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4580 where
4581 T: 'a + Clone + ToOffset,
4582 O: 'a + FromAnchor,
4583 {
4584 let mut iterators: Vec<_> = self
4585 .diagnostics
4586 .iter()
4587 .map(|(_, collection)| {
4588 collection
4589 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4590 .peekable()
4591 })
4592 .collect();
4593
4594 std::iter::from_fn(move || {
4595 let (next_ix, _) = iterators
4596 .iter_mut()
4597 .enumerate()
4598 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4599 .min_by(|(_, a), (_, b)| {
4600 let cmp = a
4601 .range
4602 .start
4603 .cmp(&b.range.start, self)
4604 // when range is equal, sort by diagnostic severity
4605 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4606 // and stabilize order with group_id
4607 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4608 if reversed { cmp.reverse() } else { cmp }
4609 })?;
4610 iterators[next_ix]
4611 .next()
4612 .map(
4613 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4614 diagnostic,
4615 range: FromAnchor::from_anchor(&range.start, self)
4616 ..FromAnchor::from_anchor(&range.end, self),
4617 },
4618 )
4619 })
4620 }
4621
4622 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4623 /// should be used instead.
4624 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4625 &self.diagnostics
4626 }
4627
4628 /// Returns all the diagnostic groups associated with the given
4629 /// language server ID. If no language server ID is provided,
4630 /// all diagnostics groups are returned.
4631 pub fn diagnostic_groups(
4632 &self,
4633 language_server_id: Option<LanguageServerId>,
4634 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4635 let mut groups = Vec::new();
4636
4637 if let Some(language_server_id) = language_server_id {
4638 if let Ok(ix) = self
4639 .diagnostics
4640 .binary_search_by_key(&language_server_id, |e| e.0)
4641 {
4642 self.diagnostics[ix]
4643 .1
4644 .groups(language_server_id, &mut groups, self);
4645 }
4646 } else {
4647 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4648 diagnostics.groups(*language_server_id, &mut groups, self);
4649 }
4650 }
4651
4652 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4653 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4654 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4655 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4656 });
4657
4658 groups
4659 }
4660
4661 /// Returns an iterator over the diagnostics for the given group.
4662 pub fn diagnostic_group<O>(
4663 &self,
4664 group_id: usize,
4665 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4666 where
4667 O: FromAnchor + 'static,
4668 {
4669 self.diagnostics
4670 .iter()
4671 .flat_map(move |(_, set)| set.group(group_id, self))
4672 }
4673
4674 /// An integer version number that accounts for all updates besides
4675 /// the buffer's text itself (which is versioned via a version vector).
4676 pub fn non_text_state_update_count(&self) -> usize {
4677 self.non_text_state_update_count
4678 }
4679
4680 /// An integer version that changes when the buffer's syntax changes.
4681 pub fn syntax_update_count(&self) -> usize {
4682 self.syntax.update_count()
4683 }
4684
4685 /// Returns a snapshot of underlying file.
4686 pub fn file(&self) -> Option<&Arc<dyn File>> {
4687 self.file.as_ref()
4688 }
4689
4690 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4691 if let Some(file) = self.file() {
4692 if file.path().file_name().is_none() || include_root {
4693 Some(file.full_path(cx).to_string_lossy().into_owned())
4694 } else {
4695 Some(file.path().display(file.path_style(cx)).to_string())
4696 }
4697 } else {
4698 None
4699 }
4700 }
4701
4702 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4703 let query_str = query.fuzzy_contents;
4704 if query_str.is_some_and(|query| query.is_empty()) {
4705 return BTreeMap::default();
4706 }
4707
4708 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4709 language,
4710 override_id: None,
4711 }));
4712
4713 let mut query_ix = 0;
4714 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4715 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4716
4717 let mut words = BTreeMap::default();
4718 let mut current_word_start_ix = None;
4719 let mut chunk_ix = query.range.start;
4720 for chunk in self.chunks(query.range, false) {
4721 for (i, c) in chunk.text.char_indices() {
4722 let ix = chunk_ix + i;
4723 if classifier.is_word(c) {
4724 if current_word_start_ix.is_none() {
4725 current_word_start_ix = Some(ix);
4726 }
4727
4728 if let Some(query_chars) = &query_chars
4729 && query_ix < query_len
4730 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4731 {
4732 query_ix += 1;
4733 }
4734 continue;
4735 } else if let Some(word_start) = current_word_start_ix.take()
4736 && query_ix == query_len
4737 {
4738 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4739 let mut word_text = self.text_for_range(word_start..ix).peekable();
4740 let first_char = word_text
4741 .peek()
4742 .and_then(|first_chunk| first_chunk.chars().next());
4743 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4744 if !query.skip_digits
4745 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4746 {
4747 words.insert(word_text.collect(), word_range);
4748 }
4749 }
4750 query_ix = 0;
4751 }
4752 chunk_ix += chunk.text.len();
4753 }
4754
4755 words
4756 }
4757}
4758
4759pub struct WordsQuery<'a> {
4760 /// Only returns words with all chars from the fuzzy string in them.
4761 pub fuzzy_contents: Option<&'a str>,
4762 /// Skips words that start with a digit.
4763 pub skip_digits: bool,
4764 /// Buffer offset range, to look for words.
4765 pub range: Range<usize>,
4766}
4767
4768fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4769 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4770}
4771
4772fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4773 let mut result = IndentSize::spaces(0);
4774 for c in text {
4775 let kind = match c {
4776 ' ' => IndentKind::Space,
4777 '\t' => IndentKind::Tab,
4778 _ => break,
4779 };
4780 if result.len == 0 {
4781 result.kind = kind;
4782 }
4783 result.len += 1;
4784 }
4785 result
4786}
4787
4788impl Clone for BufferSnapshot {
4789 fn clone(&self) -> Self {
4790 Self {
4791 text: self.text.clone(),
4792 syntax: self.syntax.clone(),
4793 file: self.file.clone(),
4794 remote_selections: self.remote_selections.clone(),
4795 diagnostics: self.diagnostics.clone(),
4796 language: self.language.clone(),
4797 non_text_state_update_count: self.non_text_state_update_count,
4798 }
4799 }
4800}
4801
4802impl Deref for BufferSnapshot {
4803 type Target = text::BufferSnapshot;
4804
4805 fn deref(&self) -> &Self::Target {
4806 &self.text
4807 }
4808}
4809
4810unsafe impl Send for BufferChunks<'_> {}
4811
4812impl<'a> BufferChunks<'a> {
4813 pub(crate) fn new(
4814 text: &'a Rope,
4815 range: Range<usize>,
4816 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4817 diagnostics: bool,
4818 buffer_snapshot: Option<&'a BufferSnapshot>,
4819 ) -> Self {
4820 let mut highlights = None;
4821 if let Some((captures, highlight_maps)) = syntax {
4822 highlights = Some(BufferChunkHighlights {
4823 captures,
4824 next_capture: None,
4825 stack: Default::default(),
4826 highlight_maps,
4827 })
4828 }
4829
4830 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4831 let chunks = text.chunks_in_range(range.clone());
4832
4833 let mut this = BufferChunks {
4834 range,
4835 buffer_snapshot,
4836 chunks,
4837 diagnostic_endpoints,
4838 error_depth: 0,
4839 warning_depth: 0,
4840 information_depth: 0,
4841 hint_depth: 0,
4842 unnecessary_depth: 0,
4843 underline: true,
4844 highlights,
4845 };
4846 this.initialize_diagnostic_endpoints();
4847 this
4848 }
4849
4850 /// Seeks to the given byte offset in the buffer.
4851 pub fn seek(&mut self, range: Range<usize>) {
4852 let old_range = std::mem::replace(&mut self.range, range.clone());
4853 self.chunks.set_range(self.range.clone());
4854 if let Some(highlights) = self.highlights.as_mut() {
4855 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4856 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4857 highlights
4858 .stack
4859 .retain(|(end_offset, _)| *end_offset > range.start);
4860 if let Some(capture) = &highlights.next_capture
4861 && range.start >= capture.node.start_byte()
4862 {
4863 let next_capture_end = capture.node.end_byte();
4864 if range.start < next_capture_end {
4865 highlights.stack.push((
4866 next_capture_end,
4867 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4868 ));
4869 }
4870 highlights.next_capture.take();
4871 }
4872 } else if let Some(snapshot) = self.buffer_snapshot {
4873 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4874 *highlights = BufferChunkHighlights {
4875 captures,
4876 next_capture: None,
4877 stack: Default::default(),
4878 highlight_maps,
4879 };
4880 } else {
4881 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4882 // Seeking such BufferChunks is not supported.
4883 debug_assert!(
4884 false,
4885 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4886 );
4887 }
4888
4889 highlights.captures.set_byte_range(self.range.clone());
4890 self.initialize_diagnostic_endpoints();
4891 }
4892 }
4893
4894 fn initialize_diagnostic_endpoints(&mut self) {
4895 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4896 && let Some(buffer) = self.buffer_snapshot
4897 {
4898 let mut diagnostic_endpoints = Vec::new();
4899 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4900 diagnostic_endpoints.push(DiagnosticEndpoint {
4901 offset: entry.range.start,
4902 is_start: true,
4903 severity: entry.diagnostic.severity,
4904 is_unnecessary: entry.diagnostic.is_unnecessary,
4905 underline: entry.diagnostic.underline,
4906 });
4907 diagnostic_endpoints.push(DiagnosticEndpoint {
4908 offset: entry.range.end,
4909 is_start: false,
4910 severity: entry.diagnostic.severity,
4911 is_unnecessary: entry.diagnostic.is_unnecessary,
4912 underline: entry.diagnostic.underline,
4913 });
4914 }
4915 diagnostic_endpoints
4916 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4917 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4918 self.hint_depth = 0;
4919 self.error_depth = 0;
4920 self.warning_depth = 0;
4921 self.information_depth = 0;
4922 }
4923 }
4924
4925 /// The current byte offset in the buffer.
4926 pub fn offset(&self) -> usize {
4927 self.range.start
4928 }
4929
4930 pub fn range(&self) -> Range<usize> {
4931 self.range.clone()
4932 }
4933
4934 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4935 let depth = match endpoint.severity {
4936 DiagnosticSeverity::ERROR => &mut self.error_depth,
4937 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4938 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4939 DiagnosticSeverity::HINT => &mut self.hint_depth,
4940 _ => return,
4941 };
4942 if endpoint.is_start {
4943 *depth += 1;
4944 } else {
4945 *depth -= 1;
4946 }
4947
4948 if endpoint.is_unnecessary {
4949 if endpoint.is_start {
4950 self.unnecessary_depth += 1;
4951 } else {
4952 self.unnecessary_depth -= 1;
4953 }
4954 }
4955 }
4956
4957 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4958 if self.error_depth > 0 {
4959 Some(DiagnosticSeverity::ERROR)
4960 } else if self.warning_depth > 0 {
4961 Some(DiagnosticSeverity::WARNING)
4962 } else if self.information_depth > 0 {
4963 Some(DiagnosticSeverity::INFORMATION)
4964 } else if self.hint_depth > 0 {
4965 Some(DiagnosticSeverity::HINT)
4966 } else {
4967 None
4968 }
4969 }
4970
4971 fn current_code_is_unnecessary(&self) -> bool {
4972 self.unnecessary_depth > 0
4973 }
4974}
4975
4976impl<'a> Iterator for BufferChunks<'a> {
4977 type Item = Chunk<'a>;
4978
4979 fn next(&mut self) -> Option<Self::Item> {
4980 let mut next_capture_start = usize::MAX;
4981 let mut next_diagnostic_endpoint = usize::MAX;
4982
4983 if let Some(highlights) = self.highlights.as_mut() {
4984 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4985 if *parent_capture_end <= self.range.start {
4986 highlights.stack.pop();
4987 } else {
4988 break;
4989 }
4990 }
4991
4992 if highlights.next_capture.is_none() {
4993 highlights.next_capture = highlights.captures.next();
4994 }
4995
4996 while let Some(capture) = highlights.next_capture.as_ref() {
4997 if self.range.start < capture.node.start_byte() {
4998 next_capture_start = capture.node.start_byte();
4999 break;
5000 } else {
5001 let highlight_id =
5002 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5003 highlights
5004 .stack
5005 .push((capture.node.end_byte(), highlight_id));
5006 highlights.next_capture = highlights.captures.next();
5007 }
5008 }
5009 }
5010
5011 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5012 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5013 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5014 if endpoint.offset <= self.range.start {
5015 self.update_diagnostic_depths(endpoint);
5016 diagnostic_endpoints.next();
5017 self.underline = endpoint.underline;
5018 } else {
5019 next_diagnostic_endpoint = endpoint.offset;
5020 break;
5021 }
5022 }
5023 }
5024 self.diagnostic_endpoints = diagnostic_endpoints;
5025
5026 if let Some(ChunkBitmaps {
5027 text: chunk,
5028 chars: chars_map,
5029 tabs,
5030 }) = self.chunks.peek_with_bitmaps()
5031 {
5032 let chunk_start = self.range.start;
5033 let mut chunk_end = (self.chunks.offset() + chunk.len())
5034 .min(next_capture_start)
5035 .min(next_diagnostic_endpoint);
5036 let mut highlight_id = None;
5037 if let Some(highlights) = self.highlights.as_ref()
5038 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5039 {
5040 chunk_end = chunk_end.min(*parent_capture_end);
5041 highlight_id = Some(*parent_highlight_id);
5042 }
5043 let bit_start = chunk_start - self.chunks.offset();
5044 let bit_end = chunk_end - self.chunks.offset();
5045
5046 let slice = &chunk[bit_start..bit_end];
5047
5048 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5049 let tabs = (tabs >> bit_start) & mask;
5050 let chars = (chars_map >> bit_start) & mask;
5051
5052 self.range.start = chunk_end;
5053 if self.range.start == self.chunks.offset() + chunk.len() {
5054 self.chunks.next().unwrap();
5055 }
5056
5057 Some(Chunk {
5058 text: slice,
5059 syntax_highlight_id: highlight_id,
5060 underline: self.underline,
5061 diagnostic_severity: self.current_diagnostic_severity(),
5062 is_unnecessary: self.current_code_is_unnecessary(),
5063 tabs,
5064 chars,
5065 ..Chunk::default()
5066 })
5067 } else {
5068 None
5069 }
5070 }
5071}
5072
5073impl operation_queue::Operation for Operation {
5074 fn lamport_timestamp(&self) -> clock::Lamport {
5075 match self {
5076 Operation::Buffer(_) => {
5077 unreachable!("buffer operations should never be deferred at this layer")
5078 }
5079 Operation::UpdateDiagnostics {
5080 lamport_timestamp, ..
5081 }
5082 | Operation::UpdateSelections {
5083 lamport_timestamp, ..
5084 }
5085 | Operation::UpdateCompletionTriggers {
5086 lamport_timestamp, ..
5087 }
5088 | Operation::UpdateLineEnding {
5089 lamport_timestamp, ..
5090 } => *lamport_timestamp,
5091 }
5092 }
5093}
5094
5095impl Default for Diagnostic {
5096 fn default() -> Self {
5097 Self {
5098 source: Default::default(),
5099 source_kind: DiagnosticSourceKind::Other,
5100 code: None,
5101 code_description: None,
5102 severity: DiagnosticSeverity::ERROR,
5103 message: Default::default(),
5104 markdown: None,
5105 group_id: 0,
5106 is_primary: false,
5107 is_disk_based: false,
5108 is_unnecessary: false,
5109 underline: true,
5110 data: None,
5111 }
5112 }
5113}
5114
5115impl IndentSize {
5116 /// Returns an [`IndentSize`] representing the given spaces.
5117 pub fn spaces(len: u32) -> Self {
5118 Self {
5119 len,
5120 kind: IndentKind::Space,
5121 }
5122 }
5123
5124 /// Returns an [`IndentSize`] representing a tab.
5125 pub fn tab() -> Self {
5126 Self {
5127 len: 1,
5128 kind: IndentKind::Tab,
5129 }
5130 }
5131
5132 /// An iterator over the characters represented by this [`IndentSize`].
5133 pub fn chars(&self) -> impl Iterator<Item = char> {
5134 iter::repeat(self.char()).take(self.len as usize)
5135 }
5136
5137 /// The character representation of this [`IndentSize`].
5138 pub fn char(&self) -> char {
5139 match self.kind {
5140 IndentKind::Space => ' ',
5141 IndentKind::Tab => '\t',
5142 }
5143 }
5144
5145 /// Consumes the current [`IndentSize`] and returns a new one that has
5146 /// been shrunk or enlarged by the given size along the given direction.
5147 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5148 match direction {
5149 Ordering::Less => {
5150 if self.kind == size.kind && self.len >= size.len {
5151 self.len -= size.len;
5152 }
5153 }
5154 Ordering::Equal => {}
5155 Ordering::Greater => {
5156 if self.len == 0 {
5157 self = size;
5158 } else if self.kind == size.kind {
5159 self.len += size.len;
5160 }
5161 }
5162 }
5163 self
5164 }
5165
5166 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5167 match self.kind {
5168 IndentKind::Space => self.len as usize,
5169 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5170 }
5171 }
5172}
5173
5174#[cfg(any(test, feature = "test-support"))]
5175pub struct TestFile {
5176 pub path: Arc<RelPath>,
5177 pub root_name: String,
5178 pub local_root: Option<PathBuf>,
5179}
5180
5181#[cfg(any(test, feature = "test-support"))]
5182impl File for TestFile {
5183 fn path(&self) -> &Arc<RelPath> {
5184 &self.path
5185 }
5186
5187 fn full_path(&self, _: &gpui::App) -> PathBuf {
5188 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5189 }
5190
5191 fn as_local(&self) -> Option<&dyn LocalFile> {
5192 if self.local_root.is_some() {
5193 Some(self)
5194 } else {
5195 None
5196 }
5197 }
5198
5199 fn disk_state(&self) -> DiskState {
5200 unimplemented!()
5201 }
5202
5203 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5204 self.path().file_name().unwrap_or(self.root_name.as_ref())
5205 }
5206
5207 fn worktree_id(&self, _: &App) -> WorktreeId {
5208 WorktreeId::from_usize(0)
5209 }
5210
5211 fn to_proto(&self, _: &App) -> rpc::proto::File {
5212 unimplemented!()
5213 }
5214
5215 fn is_private(&self) -> bool {
5216 false
5217 }
5218
5219 fn path_style(&self, _cx: &App) -> PathStyle {
5220 PathStyle::local()
5221 }
5222}
5223
5224#[cfg(any(test, feature = "test-support"))]
5225impl LocalFile for TestFile {
5226 fn abs_path(&self, _cx: &App) -> PathBuf {
5227 PathBuf::from(self.local_root.as_ref().unwrap())
5228 .join(&self.root_name)
5229 .join(self.path.as_std_path())
5230 }
5231
5232 fn load(&self, _cx: &App) -> Task<Result<String>> {
5233 unimplemented!()
5234 }
5235
5236 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5237 unimplemented!()
5238 }
5239
5240 fn load_with_encoding(&self, _: &App, _: &'static Encoding) -> Task<Result<String>> {
5241 unimplemented!()
5242 }
5243}
5244
5245pub(crate) fn contiguous_ranges(
5246 values: impl Iterator<Item = u32>,
5247 max_len: usize,
5248) -> impl Iterator<Item = Range<u32>> {
5249 let mut values = values;
5250 let mut current_range: Option<Range<u32>> = None;
5251 std::iter::from_fn(move || {
5252 loop {
5253 if let Some(value) = values.next() {
5254 if let Some(range) = &mut current_range
5255 && value == range.end
5256 && range.len() < max_len
5257 {
5258 range.end += 1;
5259 continue;
5260 }
5261
5262 let prev_range = current_range.clone();
5263 current_range = Some(value..(value + 1));
5264 if prev_range.is_some() {
5265 return prev_range;
5266 }
5267 } else {
5268 return current_range.take();
5269 }
5270 }
5271 })
5272}
5273
5274#[derive(Default, Debug)]
5275pub struct CharClassifier {
5276 scope: Option<LanguageScope>,
5277 scope_context: Option<CharScopeContext>,
5278 ignore_punctuation: bool,
5279}
5280
5281impl CharClassifier {
5282 pub fn new(scope: Option<LanguageScope>) -> Self {
5283 Self {
5284 scope,
5285 scope_context: None,
5286 ignore_punctuation: false,
5287 }
5288 }
5289
5290 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5291 Self {
5292 scope_context,
5293 ..self
5294 }
5295 }
5296
5297 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5298 Self {
5299 ignore_punctuation,
5300 ..self
5301 }
5302 }
5303
5304 pub fn is_whitespace(&self, c: char) -> bool {
5305 self.kind(c) == CharKind::Whitespace
5306 }
5307
5308 pub fn is_word(&self, c: char) -> bool {
5309 self.kind(c) == CharKind::Word
5310 }
5311
5312 pub fn is_punctuation(&self, c: char) -> bool {
5313 self.kind(c) == CharKind::Punctuation
5314 }
5315
5316 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5317 if c.is_alphanumeric() || c == '_' {
5318 return CharKind::Word;
5319 }
5320
5321 if let Some(scope) = &self.scope {
5322 let characters = match self.scope_context {
5323 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5324 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5325 None => scope.word_characters(),
5326 };
5327 if let Some(characters) = characters
5328 && characters.contains(&c)
5329 {
5330 return CharKind::Word;
5331 }
5332 }
5333
5334 if c.is_whitespace() {
5335 return CharKind::Whitespace;
5336 }
5337
5338 if ignore_punctuation {
5339 CharKind::Word
5340 } else {
5341 CharKind::Punctuation
5342 }
5343 }
5344
5345 pub fn kind(&self, c: char) -> CharKind {
5346 self.kind_with(c, self.ignore_punctuation)
5347 }
5348}
5349
5350/// Find all of the ranges of whitespace that occur at the ends of lines
5351/// in the given rope.
5352///
5353/// This could also be done with a regex search, but this implementation
5354/// avoids copying text.
5355pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5356 let mut ranges = Vec::new();
5357
5358 let mut offset = 0;
5359 let mut prev_chunk_trailing_whitespace_range = 0..0;
5360 for chunk in rope.chunks() {
5361 let mut prev_line_trailing_whitespace_range = 0..0;
5362 for (i, line) in chunk.split('\n').enumerate() {
5363 let line_end_offset = offset + line.len();
5364 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5365 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5366
5367 if i == 0 && trimmed_line_len == 0 {
5368 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5369 }
5370 if !prev_line_trailing_whitespace_range.is_empty() {
5371 ranges.push(prev_line_trailing_whitespace_range);
5372 }
5373
5374 offset = line_end_offset + 1;
5375 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5376 }
5377
5378 offset -= 1;
5379 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5380 }
5381
5382 if !prev_chunk_trailing_whitespace_range.is_empty() {
5383 ranges.push(prev_chunk_trailing_whitespace_range);
5384 }
5385
5386 ranges
5387}