1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388 /// File stored remotely.
389 Remote,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 DiskState::Remote => None,
400 }
401 }
402
403 pub fn exists(&self) -> bool {
404 match self {
405 DiskState::New => false,
406 DiskState::Present { .. } => true,
407 DiskState::Deleted => false,
408 DiskState::Remote => true,
409 }
410 }
411}
412
413/// The file associated with a buffer, in the case where the file is on the local disk.
414pub trait LocalFile: File {
415 /// Returns the absolute path of this file
416 fn abs_path(&self, cx: &App) -> PathBuf;
417
418 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
419 fn load(&self, cx: &App) -> Task<Result<String>>;
420
421 /// Loads the file's contents from disk.
422 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
423}
424
425/// The auto-indent behavior associated with an editing operation.
426/// For some editing operations, each affected line of text has its
427/// indentation recomputed. For other operations, the entire block
428/// of edited text is adjusted uniformly.
429#[derive(Clone, Debug)]
430pub enum AutoindentMode {
431 /// Indent each line of inserted text.
432 EachLine,
433 /// Apply the same indentation adjustment to all of the lines
434 /// in a given insertion.
435 Block {
436 /// The original indentation column of the first line of each
437 /// insertion, if it has been copied.
438 ///
439 /// Knowing this makes it possible to preserve the relative indentation
440 /// of every line in the insertion from when it was copied.
441 ///
442 /// If the original indent column is `a`, and the first line of insertion
443 /// is then auto-indented to column `b`, then every other line of
444 /// the insertion will be auto-indented to column `b - a`
445 original_indent_columns: Vec<Option<u32>>,
446 },
447}
448
449#[derive(Clone)]
450struct AutoindentRequest {
451 before_edit: BufferSnapshot,
452 entries: Vec<AutoindentRequestEntry>,
453 is_block_mode: bool,
454 ignore_empty_lines: bool,
455}
456
457#[derive(Debug, Clone)]
458struct AutoindentRequestEntry {
459 /// A range of the buffer whose indentation should be adjusted.
460 range: Range<Anchor>,
461 /// Whether or not these lines should be considered brand new, for the
462 /// purpose of auto-indent. When text is not new, its indentation will
463 /// only be adjusted if the suggested indentation level has *changed*
464 /// since the edit was made.
465 first_line_is_new: bool,
466 indent_size: IndentSize,
467 original_indent_column: Option<u32>,
468}
469
470#[derive(Debug)]
471struct IndentSuggestion {
472 basis_row: u32,
473 delta: Ordering,
474 within_error: bool,
475}
476
477struct BufferChunkHighlights<'a> {
478 captures: SyntaxMapCaptures<'a>,
479 next_capture: Option<SyntaxMapCapture<'a>>,
480 stack: Vec<(usize, HighlightId)>,
481 highlight_maps: Vec<HighlightMap>,
482}
483
484/// An iterator that yields chunks of a buffer's text, along with their
485/// syntax highlights and diagnostic status.
486pub struct BufferChunks<'a> {
487 buffer_snapshot: Option<&'a BufferSnapshot>,
488 range: Range<usize>,
489 chunks: text::Chunks<'a>,
490 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
491 error_depth: usize,
492 warning_depth: usize,
493 information_depth: usize,
494 hint_depth: usize,
495 unnecessary_depth: usize,
496 underline: bool,
497 highlights: Option<BufferChunkHighlights<'a>>,
498}
499
500/// A chunk of a buffer's text, along with its syntax highlight and
501/// diagnostic status.
502#[derive(Clone, Debug, Default)]
503pub struct Chunk<'a> {
504 /// The text of the chunk.
505 pub text: &'a str,
506 /// The syntax highlighting style of the chunk.
507 pub syntax_highlight_id: Option<HighlightId>,
508 /// The highlight style that has been applied to this chunk in
509 /// the editor.
510 pub highlight_style: Option<HighlightStyle>,
511 /// The severity of diagnostic associated with this chunk, if any.
512 pub diagnostic_severity: Option<DiagnosticSeverity>,
513 /// A bitset of which characters are tabs in this string.
514 pub tabs: u128,
515 /// Bitmap of character indices in this chunk
516 pub chars: u128,
517 /// Whether this chunk of text is marked as unnecessary.
518 pub is_unnecessary: bool,
519 /// Whether this chunk of text was originally a tab character.
520 pub is_tab: bool,
521 /// Whether this chunk of text was originally an inlay.
522 pub is_inlay: bool,
523 /// Whether to underline the corresponding text range in the editor.
524 pub underline: bool,
525}
526
527/// A set of edits to a given version of a buffer, computed asynchronously.
528#[derive(Debug)]
529pub struct Diff {
530 pub base_version: clock::Global,
531 pub line_ending: LineEnding,
532 pub edits: Vec<(Range<usize>, Arc<str>)>,
533}
534
535#[derive(Debug, Clone, Copy)]
536pub(crate) struct DiagnosticEndpoint {
537 offset: usize,
538 is_start: bool,
539 underline: bool,
540 severity: DiagnosticSeverity,
541 is_unnecessary: bool,
542}
543
544/// A class of characters, used for characterizing a run of text.
545#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
546pub enum CharKind {
547 /// Whitespace.
548 Whitespace,
549 /// Punctuation.
550 Punctuation,
551 /// Word.
552 Word,
553}
554
555/// Context for character classification within a specific scope.
556#[derive(Copy, Clone, Eq, PartialEq, Debug)]
557pub enum CharScopeContext {
558 /// Character classification for completion queries.
559 ///
560 /// This context treats certain characters as word constituents that would
561 /// normally be considered punctuation, such as '-' in Tailwind classes
562 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
563 Completion,
564 /// Character classification for linked edits.
565 ///
566 /// This context handles characters that should be treated as part of
567 /// identifiers during linked editing operations, such as '.' in JSX
568 /// component names like `<Animated.View>`.
569 LinkedEdit,
570}
571
572/// A runnable is a set of data about a region that could be resolved into a task
573pub struct Runnable {
574 pub tags: SmallVec<[RunnableTag; 1]>,
575 pub language: Arc<Language>,
576 pub buffer: BufferId,
577}
578
579#[derive(Default, Clone, Debug)]
580pub struct HighlightedText {
581 pub text: SharedString,
582 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
583}
584
585#[derive(Default, Debug)]
586struct HighlightedTextBuilder {
587 pub text: String,
588 highlights: Vec<(Range<usize>, HighlightStyle)>,
589}
590
591impl HighlightedText {
592 pub fn from_buffer_range<T: ToOffset>(
593 range: Range<T>,
594 snapshot: &text::BufferSnapshot,
595 syntax_snapshot: &SyntaxSnapshot,
596 override_style: Option<HighlightStyle>,
597 syntax_theme: &SyntaxTheme,
598 ) -> Self {
599 let mut highlighted_text = HighlightedTextBuilder::default();
600 highlighted_text.add_text_from_buffer_range(
601 range,
602 snapshot,
603 syntax_snapshot,
604 override_style,
605 syntax_theme,
606 );
607 highlighted_text.build()
608 }
609
610 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
611 gpui::StyledText::new(self.text.clone())
612 .with_default_highlights(default_style, self.highlights.iter().cloned())
613 }
614
615 /// Returns the first line without leading whitespace unless highlighted
616 /// and a boolean indicating if there are more lines after
617 pub fn first_line_preview(self) -> (Self, bool) {
618 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
619 let first_line = &self.text[..newline_ix];
620
621 // Trim leading whitespace, unless an edit starts prior to it.
622 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
623 if let Some((first_highlight_range, _)) = self.highlights.first() {
624 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
625 }
626
627 let preview_text = &first_line[preview_start_ix..];
628 let preview_highlights = self
629 .highlights
630 .into_iter()
631 .skip_while(|(range, _)| range.end <= preview_start_ix)
632 .take_while(|(range, _)| range.start < newline_ix)
633 .filter_map(|(mut range, highlight)| {
634 range.start = range.start.saturating_sub(preview_start_ix);
635 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
636 if range.is_empty() {
637 None
638 } else {
639 Some((range, highlight))
640 }
641 });
642
643 let preview = Self {
644 text: SharedString::new(preview_text),
645 highlights: preview_highlights.collect(),
646 };
647
648 (preview, self.text.len() > newline_ix)
649 }
650}
651
652impl HighlightedTextBuilder {
653 pub fn build(self) -> HighlightedText {
654 HighlightedText {
655 text: self.text.into(),
656 highlights: self.highlights,
657 }
658 }
659
660 pub fn add_text_from_buffer_range<T: ToOffset>(
661 &mut self,
662 range: Range<T>,
663 snapshot: &text::BufferSnapshot,
664 syntax_snapshot: &SyntaxSnapshot,
665 override_style: Option<HighlightStyle>,
666 syntax_theme: &SyntaxTheme,
667 ) {
668 let range = range.to_offset(snapshot);
669 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
670 let start = self.text.len();
671 self.text.push_str(chunk.text);
672 let end = self.text.len();
673
674 if let Some(highlight_style) = chunk
675 .syntax_highlight_id
676 .and_then(|id| id.style(syntax_theme))
677 {
678 let highlight_style = override_style.map_or(highlight_style, |override_style| {
679 highlight_style.highlight(override_style)
680 });
681 self.highlights.push((start..end, highlight_style));
682 } else if let Some(override_style) = override_style {
683 self.highlights.push((start..end, override_style));
684 }
685 }
686 }
687
688 fn highlighted_chunks<'a>(
689 range: Range<usize>,
690 snapshot: &'a text::BufferSnapshot,
691 syntax_snapshot: &'a SyntaxSnapshot,
692 ) -> BufferChunks<'a> {
693 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
694 grammar
695 .highlights_config
696 .as_ref()
697 .map(|config| &config.query)
698 });
699
700 let highlight_maps = captures
701 .grammars()
702 .iter()
703 .map(|grammar| grammar.highlight_map())
704 .collect();
705
706 BufferChunks::new(
707 snapshot.as_rope(),
708 range,
709 Some((captures, highlight_maps)),
710 false,
711 None,
712 )
713 }
714}
715
716#[derive(Clone)]
717pub struct EditPreview {
718 old_snapshot: text::BufferSnapshot,
719 applied_edits_snapshot: text::BufferSnapshot,
720 syntax_snapshot: SyntaxSnapshot,
721}
722
723impl EditPreview {
724 pub fn highlight_edits(
725 &self,
726 current_snapshot: &BufferSnapshot,
727 edits: &[(Range<Anchor>, impl AsRef<str>)],
728 include_deletions: bool,
729 cx: &App,
730 ) -> HighlightedText {
731 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
732 return HighlightedText::default();
733 };
734
735 let mut highlighted_text = HighlightedTextBuilder::default();
736
737 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
738
739 let insertion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().created_background),
741 ..Default::default()
742 };
743 let deletion_highlight_style = HighlightStyle {
744 background_color: Some(cx.theme().status().deleted_background),
745 ..Default::default()
746 };
747 let syntax_theme = cx.theme().syntax();
748
749 for (range, edit_text) in edits {
750 let edit_new_end_in_preview_snapshot = range
751 .end
752 .bias_right(&self.old_snapshot)
753 .to_offset(&self.applied_edits_snapshot);
754 let edit_start_in_preview_snapshot =
755 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
756
757 let unchanged_range_in_preview_snapshot =
758 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
759 if !unchanged_range_in_preview_snapshot.is_empty() {
760 highlighted_text.add_text_from_buffer_range(
761 unchanged_range_in_preview_snapshot,
762 &self.applied_edits_snapshot,
763 &self.syntax_snapshot,
764 None,
765 syntax_theme,
766 );
767 }
768
769 let range_in_current_snapshot = range.to_offset(current_snapshot);
770 if include_deletions && !range_in_current_snapshot.is_empty() {
771 highlighted_text.add_text_from_buffer_range(
772 range_in_current_snapshot,
773 ¤t_snapshot.text,
774 ¤t_snapshot.syntax,
775 Some(deletion_highlight_style),
776 syntax_theme,
777 );
778 }
779
780 if !edit_text.as_ref().is_empty() {
781 highlighted_text.add_text_from_buffer_range(
782 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
783 &self.applied_edits_snapshot,
784 &self.syntax_snapshot,
785 Some(insertion_highlight_style),
786 syntax_theme,
787 );
788 }
789
790 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
791 }
792
793 highlighted_text.add_text_from_buffer_range(
794 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
795 &self.applied_edits_snapshot,
796 &self.syntax_snapshot,
797 None,
798 syntax_theme,
799 );
800
801 highlighted_text.build()
802 }
803
804 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
805 let (first, _) = edits.first()?;
806 let (last, _) = edits.last()?;
807
808 let start = first
809 .start
810 .bias_left(&self.old_snapshot)
811 .to_point(&self.applied_edits_snapshot);
812 let end = last
813 .end
814 .bias_right(&self.old_snapshot)
815 .to_point(&self.applied_edits_snapshot);
816
817 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
818 let range = Point::new(start.row, 0)
819 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
820
821 Some(range.to_offset(&self.applied_edits_snapshot))
822 }
823}
824
825#[derive(Clone, Debug, PartialEq, Eq)]
826pub struct BracketMatch {
827 pub open_range: Range<usize>,
828 pub close_range: Range<usize>,
829 pub newline_only: bool,
830}
831
832impl Buffer {
833 /// Create a new buffer with the given base text.
834 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
835 Self::build(
836 TextBuffer::new(
837 ReplicaId::LOCAL,
838 cx.entity_id().as_non_zero_u64().into(),
839 base_text.into(),
840 ),
841 None,
842 Capability::ReadWrite,
843 )
844 }
845
846 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
847 pub fn local_normalized(
848 base_text_normalized: Rope,
849 line_ending: LineEnding,
850 cx: &Context<Self>,
851 ) -> Self {
852 Self::build(
853 TextBuffer::new_normalized(
854 ReplicaId::LOCAL,
855 cx.entity_id().as_non_zero_u64().into(),
856 line_ending,
857 base_text_normalized,
858 ),
859 None,
860 Capability::ReadWrite,
861 )
862 }
863
864 /// Create a new buffer that is a replica of a remote buffer.
865 pub fn remote(
866 remote_id: BufferId,
867 replica_id: ReplicaId,
868 capability: Capability,
869 base_text: impl Into<String>,
870 ) -> Self {
871 Self::build(
872 TextBuffer::new(replica_id, remote_id, base_text.into()),
873 None,
874 capability,
875 )
876 }
877
878 /// Create a new buffer that is a replica of a remote buffer, populating its
879 /// state from the given protobuf message.
880 pub fn from_proto(
881 replica_id: ReplicaId,
882 capability: Capability,
883 message: proto::BufferState,
884 file: Option<Arc<dyn File>>,
885 ) -> Result<Self> {
886 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
887 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
888 let mut this = Self::build(buffer, file, capability);
889 this.text.set_line_ending(proto::deserialize_line_ending(
890 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
891 ));
892 this.saved_version = proto::deserialize_version(&message.saved_version);
893 this.saved_mtime = message.saved_mtime.map(|time| time.into());
894 Ok(this)
895 }
896
897 /// Serialize the buffer's state to a protobuf message.
898 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
899 proto::BufferState {
900 id: self.remote_id().into(),
901 file: self.file.as_ref().map(|f| f.to_proto(cx)),
902 base_text: self.base_text().to_string(),
903 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
904 saved_version: proto::serialize_version(&self.saved_version),
905 saved_mtime: self.saved_mtime.map(|time| time.into()),
906 }
907 }
908
909 /// Serialize as protobufs all of the changes to the buffer since the given version.
910 pub fn serialize_ops(
911 &self,
912 since: Option<clock::Global>,
913 cx: &App,
914 ) -> Task<Vec<proto::Operation>> {
915 let mut operations = Vec::new();
916 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
917
918 operations.extend(self.remote_selections.iter().map(|(_, set)| {
919 proto::serialize_operation(&Operation::UpdateSelections {
920 selections: set.selections.clone(),
921 lamport_timestamp: set.lamport_timestamp,
922 line_mode: set.line_mode,
923 cursor_shape: set.cursor_shape,
924 })
925 }));
926
927 for (server_id, diagnostics) in &self.diagnostics {
928 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
929 lamport_timestamp: self.diagnostics_timestamp,
930 server_id: *server_id,
931 diagnostics: diagnostics.iter().cloned().collect(),
932 }));
933 }
934
935 for (server_id, completions) in &self.completion_triggers_per_language_server {
936 operations.push(proto::serialize_operation(
937 &Operation::UpdateCompletionTriggers {
938 triggers: completions.iter().cloned().collect(),
939 lamport_timestamp: self.completion_triggers_timestamp,
940 server_id: *server_id,
941 },
942 ));
943 }
944
945 let text_operations = self.text.operations().clone();
946 cx.background_spawn(async move {
947 let since = since.unwrap_or_default();
948 operations.extend(
949 text_operations
950 .iter()
951 .filter(|(_, op)| !since.observed(op.timestamp()))
952 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
953 );
954 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
955 operations
956 })
957 }
958
959 /// Assign a language to the buffer, returning the buffer.
960 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
961 self.set_language(Some(language), cx);
962 self
963 }
964
965 /// Returns the [`Capability`] of this buffer.
966 pub fn capability(&self) -> Capability {
967 self.capability
968 }
969
970 /// Whether this buffer can only be read.
971 pub fn read_only(&self) -> bool {
972 self.capability == Capability::ReadOnly
973 }
974
975 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
976 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
977 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
978 let snapshot = buffer.snapshot();
979 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
980 Self {
981 saved_mtime,
982 saved_version: buffer.version(),
983 preview_version: buffer.version(),
984 reload_task: None,
985 transaction_depth: 0,
986 was_dirty_before_starting_transaction: None,
987 has_unsaved_edits: Cell::new((buffer.version(), false)),
988 text: buffer,
989 branch_state: None,
990 file,
991 capability,
992 syntax_map,
993 reparse: None,
994 non_text_state_update_count: 0,
995 sync_parse_timeout: Duration::from_millis(1),
996 parse_status: watch::channel(ParseStatus::Idle),
997 autoindent_requests: Default::default(),
998 wait_for_autoindent_txs: Default::default(),
999 pending_autoindent: Default::default(),
1000 language: None,
1001 remote_selections: Default::default(),
1002 diagnostics: Default::default(),
1003 diagnostics_timestamp: Lamport::MIN,
1004 completion_triggers: Default::default(),
1005 completion_triggers_per_language_server: Default::default(),
1006 completion_triggers_timestamp: Lamport::MIN,
1007 deferred_ops: OperationQueue::new(),
1008 has_conflict: false,
1009 change_bits: Default::default(),
1010 _subscriptions: Vec::new(),
1011 }
1012 }
1013
1014 pub fn build_snapshot(
1015 text: Rope,
1016 language: Option<Arc<Language>>,
1017 language_registry: Option<Arc<LanguageRegistry>>,
1018 cx: &mut App,
1019 ) -> impl Future<Output = BufferSnapshot> + use<> {
1020 let entity_id = cx.reserve_entity::<Self>().entity_id();
1021 let buffer_id = entity_id.as_non_zero_u64().into();
1022 async move {
1023 let text =
1024 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1025 .snapshot();
1026 let mut syntax = SyntaxMap::new(&text).snapshot();
1027 if let Some(language) = language.clone() {
1028 let language_registry = language_registry.clone();
1029 syntax.reparse(&text, language_registry, language);
1030 }
1031 BufferSnapshot {
1032 text,
1033 syntax,
1034 file: None,
1035 diagnostics: Default::default(),
1036 remote_selections: Default::default(),
1037 language,
1038 non_text_state_update_count: 0,
1039 }
1040 }
1041 }
1042
1043 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1044 let entity_id = cx.reserve_entity::<Self>().entity_id();
1045 let buffer_id = entity_id.as_non_zero_u64().into();
1046 let text = TextBuffer::new_normalized(
1047 ReplicaId::LOCAL,
1048 buffer_id,
1049 Default::default(),
1050 Rope::new(),
1051 )
1052 .snapshot();
1053 let syntax = SyntaxMap::new(&text).snapshot();
1054 BufferSnapshot {
1055 text,
1056 syntax,
1057 file: None,
1058 diagnostics: Default::default(),
1059 remote_selections: Default::default(),
1060 language: None,
1061 non_text_state_update_count: 0,
1062 }
1063 }
1064
1065 #[cfg(any(test, feature = "test-support"))]
1066 pub fn build_snapshot_sync(
1067 text: Rope,
1068 language: Option<Arc<Language>>,
1069 language_registry: Option<Arc<LanguageRegistry>>,
1070 cx: &mut App,
1071 ) -> BufferSnapshot {
1072 let entity_id = cx.reserve_entity::<Self>().entity_id();
1073 let buffer_id = entity_id.as_non_zero_u64().into();
1074 let text =
1075 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1076 .snapshot();
1077 let mut syntax = SyntaxMap::new(&text).snapshot();
1078 if let Some(language) = language.clone() {
1079 syntax.reparse(&text, language_registry, language);
1080 }
1081 BufferSnapshot {
1082 text,
1083 syntax,
1084 file: None,
1085 diagnostics: Default::default(),
1086 remote_selections: Default::default(),
1087 language,
1088 non_text_state_update_count: 0,
1089 }
1090 }
1091
1092 /// Retrieve a snapshot of the buffer's current state. This is computationally
1093 /// cheap, and allows reading from the buffer on a background thread.
1094 pub fn snapshot(&self) -> BufferSnapshot {
1095 let text = self.text.snapshot();
1096 let mut syntax_map = self.syntax_map.lock();
1097 syntax_map.interpolate(&text);
1098 let syntax = syntax_map.snapshot();
1099
1100 BufferSnapshot {
1101 text,
1102 syntax,
1103 file: self.file.clone(),
1104 remote_selections: self.remote_selections.clone(),
1105 diagnostics: self.diagnostics.clone(),
1106 language: self.language.clone(),
1107 non_text_state_update_count: self.non_text_state_update_count,
1108 }
1109 }
1110
1111 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1112 let this = cx.entity();
1113 cx.new(|cx| {
1114 let mut branch = Self {
1115 branch_state: Some(BufferBranchState {
1116 base_buffer: this.clone(),
1117 merged_operations: Default::default(),
1118 }),
1119 language: self.language.clone(),
1120 has_conflict: self.has_conflict,
1121 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1122 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1123 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1124 };
1125 if let Some(language_registry) = self.language_registry() {
1126 branch.set_language_registry(language_registry);
1127 }
1128
1129 // Reparse the branch buffer so that we get syntax highlighting immediately.
1130 branch.reparse(cx);
1131
1132 branch
1133 })
1134 }
1135
1136 pub fn preview_edits(
1137 &self,
1138 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1139 cx: &App,
1140 ) -> Task<EditPreview> {
1141 let registry = self.language_registry();
1142 let language = self.language().cloned();
1143 let old_snapshot = self.text.snapshot();
1144 let mut branch_buffer = self.text.branch();
1145 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1146 cx.background_spawn(async move {
1147 if !edits.is_empty() {
1148 if let Some(language) = language.clone() {
1149 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1150 }
1151
1152 branch_buffer.edit(edits.iter().cloned());
1153 let snapshot = branch_buffer.snapshot();
1154 syntax_snapshot.interpolate(&snapshot);
1155
1156 if let Some(language) = language {
1157 syntax_snapshot.reparse(&snapshot, registry, language);
1158 }
1159 }
1160 EditPreview {
1161 old_snapshot,
1162 applied_edits_snapshot: branch_buffer.snapshot(),
1163 syntax_snapshot,
1164 }
1165 })
1166 }
1167
1168 /// Applies all of the changes in this buffer that intersect any of the
1169 /// given `ranges` to its base buffer.
1170 ///
1171 /// If `ranges` is empty, then all changes will be applied. This buffer must
1172 /// be a branch buffer to call this method.
1173 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1174 let Some(base_buffer) = self.base_buffer() else {
1175 debug_panic!("not a branch buffer");
1176 return;
1177 };
1178
1179 let mut ranges = if ranges.is_empty() {
1180 &[0..usize::MAX]
1181 } else {
1182 ranges.as_slice()
1183 }
1184 .iter()
1185 .peekable();
1186
1187 let mut edits = Vec::new();
1188 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1189 let mut is_included = false;
1190 while let Some(range) = ranges.peek() {
1191 if range.end < edit.new.start {
1192 ranges.next().unwrap();
1193 } else {
1194 if range.start <= edit.new.end {
1195 is_included = true;
1196 }
1197 break;
1198 }
1199 }
1200
1201 if is_included {
1202 edits.push((
1203 edit.old.clone(),
1204 self.text_for_range(edit.new.clone()).collect::<String>(),
1205 ));
1206 }
1207 }
1208
1209 let operation = base_buffer.update(cx, |base_buffer, cx| {
1210 // cx.emit(BufferEvent::DiffBaseChanged);
1211 base_buffer.edit(edits, None, cx)
1212 });
1213
1214 if let Some(operation) = operation
1215 && let Some(BufferBranchState {
1216 merged_operations, ..
1217 }) = &mut self.branch_state
1218 {
1219 merged_operations.push(operation);
1220 }
1221 }
1222
1223 fn on_base_buffer_event(
1224 &mut self,
1225 _: Entity<Buffer>,
1226 event: &BufferEvent,
1227 cx: &mut Context<Self>,
1228 ) {
1229 let BufferEvent::Operation { operation, .. } = event else {
1230 return;
1231 };
1232 let Some(BufferBranchState {
1233 merged_operations, ..
1234 }) = &mut self.branch_state
1235 else {
1236 return;
1237 };
1238
1239 let mut operation_to_undo = None;
1240 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1241 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1242 {
1243 merged_operations.remove(ix);
1244 operation_to_undo = Some(operation.timestamp);
1245 }
1246
1247 self.apply_ops([operation.clone()], cx);
1248
1249 if let Some(timestamp) = operation_to_undo {
1250 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1251 self.undo_operations(counts, cx);
1252 }
1253 }
1254
1255 #[cfg(test)]
1256 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1257 &self.text
1258 }
1259
1260 /// Retrieve a snapshot of the buffer's raw text, without any
1261 /// language-related state like the syntax tree or diagnostics.
1262 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1263 self.text.snapshot()
1264 }
1265
1266 /// The file associated with the buffer, if any.
1267 pub fn file(&self) -> Option<&Arc<dyn File>> {
1268 self.file.as_ref()
1269 }
1270
1271 /// The version of the buffer that was last saved or reloaded from disk.
1272 pub fn saved_version(&self) -> &clock::Global {
1273 &self.saved_version
1274 }
1275
1276 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1277 pub fn saved_mtime(&self) -> Option<MTime> {
1278 self.saved_mtime
1279 }
1280
1281 /// Assign a language to the buffer.
1282 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1283 self.non_text_state_update_count += 1;
1284 self.syntax_map.lock().clear(&self.text);
1285 self.language = language;
1286 self.was_changed();
1287 self.reparse(cx);
1288 cx.emit(BufferEvent::LanguageChanged);
1289 }
1290
1291 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1292 /// other languages if parts of the buffer are written in different languages.
1293 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1294 self.syntax_map
1295 .lock()
1296 .set_language_registry(language_registry);
1297 }
1298
1299 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1300 self.syntax_map.lock().language_registry()
1301 }
1302
1303 /// Assign the line ending type to the buffer.
1304 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1305 self.text.set_line_ending(line_ending);
1306
1307 let lamport_timestamp = self.text.lamport_clock.tick();
1308 self.send_operation(
1309 Operation::UpdateLineEnding {
1310 line_ending,
1311 lamport_timestamp,
1312 },
1313 true,
1314 cx,
1315 );
1316 }
1317
1318 /// Assign the buffer a new [`Capability`].
1319 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1320 if self.capability != capability {
1321 self.capability = capability;
1322 cx.emit(BufferEvent::CapabilityChanged)
1323 }
1324 }
1325
1326 /// This method is called to signal that the buffer has been saved.
1327 pub fn did_save(
1328 &mut self,
1329 version: clock::Global,
1330 mtime: Option<MTime>,
1331 cx: &mut Context<Self>,
1332 ) {
1333 self.saved_version = version.clone();
1334 self.has_unsaved_edits.set((version, false));
1335 self.has_conflict = false;
1336 self.saved_mtime = mtime;
1337 self.was_changed();
1338 cx.emit(BufferEvent::Saved);
1339 cx.notify();
1340 }
1341
1342 /// Reloads the contents of the buffer from disk.
1343 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1344 let (tx, rx) = futures::channel::oneshot::channel();
1345 let prev_version = self.text.version();
1346 self.reload_task = Some(cx.spawn(async move |this, cx| {
1347 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1348 let file = this.file.as_ref()?.as_local()?;
1349
1350 Some((file.disk_state().mtime(), file.load(cx)))
1351 })?
1352 else {
1353 return Ok(());
1354 };
1355
1356 let new_text = new_text.await?;
1357 let diff = this
1358 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1359 .await;
1360 this.update(cx, |this, cx| {
1361 if this.version() == diff.base_version {
1362 this.finalize_last_transaction();
1363 this.apply_diff(diff, cx);
1364 tx.send(this.finalize_last_transaction().cloned()).ok();
1365 this.has_conflict = false;
1366 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1367 } else {
1368 if !diff.edits.is_empty()
1369 || this
1370 .edits_since::<usize>(&diff.base_version)
1371 .next()
1372 .is_some()
1373 {
1374 this.has_conflict = true;
1375 }
1376
1377 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1378 }
1379
1380 this.reload_task.take();
1381 })
1382 }));
1383 rx
1384 }
1385
1386 /// This method is called to signal that the buffer has been reloaded.
1387 pub fn did_reload(
1388 &mut self,
1389 version: clock::Global,
1390 line_ending: LineEnding,
1391 mtime: Option<MTime>,
1392 cx: &mut Context<Self>,
1393 ) {
1394 self.saved_version = version;
1395 self.has_unsaved_edits
1396 .set((self.saved_version.clone(), false));
1397 self.text.set_line_ending(line_ending);
1398 self.saved_mtime = mtime;
1399 cx.emit(BufferEvent::Reloaded);
1400 cx.notify();
1401 }
1402
1403 /// Updates the [`File`] backing this buffer. This should be called when
1404 /// the file has changed or has been deleted.
1405 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1406 let was_dirty = self.is_dirty();
1407 let mut file_changed = false;
1408
1409 if let Some(old_file) = self.file.as_ref() {
1410 if new_file.path() != old_file.path() {
1411 file_changed = true;
1412 }
1413
1414 let old_state = old_file.disk_state();
1415 let new_state = new_file.disk_state();
1416 if old_state != new_state {
1417 file_changed = true;
1418 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1419 cx.emit(BufferEvent::ReloadNeeded)
1420 }
1421 }
1422 } else {
1423 file_changed = true;
1424 };
1425
1426 self.file = Some(new_file);
1427 if file_changed {
1428 self.was_changed();
1429 self.non_text_state_update_count += 1;
1430 if was_dirty != self.is_dirty() {
1431 cx.emit(BufferEvent::DirtyChanged);
1432 }
1433 cx.emit(BufferEvent::FileHandleChanged);
1434 cx.notify();
1435 }
1436 }
1437
1438 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1439 Some(self.branch_state.as_ref()?.base_buffer.clone())
1440 }
1441
1442 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1443 pub fn language(&self) -> Option<&Arc<Language>> {
1444 self.language.as_ref()
1445 }
1446
1447 /// Returns the [`Language`] at the given location.
1448 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1449 let offset = position.to_offset(self);
1450 let mut is_first = true;
1451 let start_anchor = self.anchor_before(offset);
1452 let end_anchor = self.anchor_after(offset);
1453 self.syntax_map
1454 .lock()
1455 .layers_for_range(offset..offset, &self.text, false)
1456 .filter(|layer| {
1457 if is_first {
1458 is_first = false;
1459 return true;
1460 }
1461
1462 layer
1463 .included_sub_ranges
1464 .map(|sub_ranges| {
1465 sub_ranges.iter().any(|sub_range| {
1466 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1467 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1468 !is_before_start && !is_after_end
1469 })
1470 })
1471 .unwrap_or(true)
1472 })
1473 .last()
1474 .map(|info| info.language.clone())
1475 .or_else(|| self.language.clone())
1476 }
1477
1478 /// Returns each [`Language`] for the active syntax layers at the given location.
1479 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1480 let offset = position.to_offset(self);
1481 let mut languages: Vec<Arc<Language>> = self
1482 .syntax_map
1483 .lock()
1484 .layers_for_range(offset..offset, &self.text, false)
1485 .map(|info| info.language.clone())
1486 .collect();
1487
1488 if languages.is_empty()
1489 && let Some(buffer_language) = self.language()
1490 {
1491 languages.push(buffer_language.clone());
1492 }
1493
1494 languages
1495 }
1496
1497 /// An integer version number that accounts for all updates besides
1498 /// the buffer's text itself (which is versioned via a version vector).
1499 pub fn non_text_state_update_count(&self) -> usize {
1500 self.non_text_state_update_count
1501 }
1502
1503 /// Whether the buffer is being parsed in the background.
1504 #[cfg(any(test, feature = "test-support"))]
1505 pub fn is_parsing(&self) -> bool {
1506 self.reparse.is_some()
1507 }
1508
1509 /// Indicates whether the buffer contains any regions that may be
1510 /// written in a language that hasn't been loaded yet.
1511 pub fn contains_unknown_injections(&self) -> bool {
1512 self.syntax_map.lock().contains_unknown_injections()
1513 }
1514
1515 #[cfg(any(test, feature = "test-support"))]
1516 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1517 self.sync_parse_timeout = timeout;
1518 }
1519
1520 /// Called after an edit to synchronize the buffer's main parse tree with
1521 /// the buffer's new underlying state.
1522 ///
1523 /// Locks the syntax map and interpolates the edits since the last reparse
1524 /// into the foreground syntax tree.
1525 ///
1526 /// Then takes a stable snapshot of the syntax map before unlocking it.
1527 /// The snapshot with the interpolated edits is sent to a background thread,
1528 /// where we ask Tree-sitter to perform an incremental parse.
1529 ///
1530 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1531 /// waiting on the parse to complete. As soon as it completes, we proceed
1532 /// synchronously, unless a 1ms timeout elapses.
1533 ///
1534 /// If we time out waiting on the parse, we spawn a second task waiting
1535 /// until the parse does complete and return with the interpolated tree still
1536 /// in the foreground. When the background parse completes, call back into
1537 /// the main thread and assign the foreground parse state.
1538 ///
1539 /// If the buffer or grammar changed since the start of the background parse,
1540 /// initiate an additional reparse recursively. To avoid concurrent parses
1541 /// for the same buffer, we only initiate a new parse if we are not already
1542 /// parsing in the background.
1543 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1544 if self.reparse.is_some() {
1545 return;
1546 }
1547 let language = if let Some(language) = self.language.clone() {
1548 language
1549 } else {
1550 return;
1551 };
1552
1553 let text = self.text_snapshot();
1554 let parsed_version = self.version();
1555
1556 let mut syntax_map = self.syntax_map.lock();
1557 syntax_map.interpolate(&text);
1558 let language_registry = syntax_map.language_registry();
1559 let mut syntax_snapshot = syntax_map.snapshot();
1560 drop(syntax_map);
1561
1562 let parse_task = cx.background_spawn({
1563 let language = language.clone();
1564 let language_registry = language_registry.clone();
1565 async move {
1566 syntax_snapshot.reparse(&text, language_registry, language);
1567 syntax_snapshot
1568 }
1569 });
1570
1571 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1572 match cx
1573 .background_executor()
1574 .block_with_timeout(self.sync_parse_timeout, parse_task)
1575 {
1576 Ok(new_syntax_snapshot) => {
1577 self.did_finish_parsing(new_syntax_snapshot, cx);
1578 self.reparse = None;
1579 }
1580 Err(parse_task) => {
1581 // todo(lw): hot foreground spawn
1582 self.reparse = Some(cx.spawn(async move |this, cx| {
1583 let new_syntax_map = cx.background_spawn(parse_task).await;
1584 this.update(cx, move |this, cx| {
1585 let grammar_changed = || {
1586 this.language.as_ref().is_none_or(|current_language| {
1587 !Arc::ptr_eq(&language, current_language)
1588 })
1589 };
1590 let language_registry_changed = || {
1591 new_syntax_map.contains_unknown_injections()
1592 && language_registry.is_some_and(|registry| {
1593 registry.version() != new_syntax_map.language_registry_version()
1594 })
1595 };
1596 let parse_again = this.version.changed_since(&parsed_version)
1597 || language_registry_changed()
1598 || grammar_changed();
1599 this.did_finish_parsing(new_syntax_map, cx);
1600 this.reparse = None;
1601 if parse_again {
1602 this.reparse(cx);
1603 }
1604 })
1605 .ok();
1606 }));
1607 }
1608 }
1609 }
1610
1611 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1612 self.was_changed();
1613 self.non_text_state_update_count += 1;
1614 self.syntax_map.lock().did_parse(syntax_snapshot);
1615 self.request_autoindent(cx);
1616 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1617 cx.emit(BufferEvent::Reparsed);
1618 cx.notify();
1619 }
1620
1621 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1622 self.parse_status.1.clone()
1623 }
1624
1625 /// Wait until the buffer is no longer parsing
1626 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1627 let mut parse_status = self.parse_status();
1628 async move {
1629 while *parse_status.borrow() != ParseStatus::Idle {
1630 if parse_status.changed().await.is_err() {
1631 break;
1632 }
1633 }
1634 }
1635 }
1636
1637 /// Assign to the buffer a set of diagnostics created by a given language server.
1638 pub fn update_diagnostics(
1639 &mut self,
1640 server_id: LanguageServerId,
1641 diagnostics: DiagnosticSet,
1642 cx: &mut Context<Self>,
1643 ) {
1644 let lamport_timestamp = self.text.lamport_clock.tick();
1645 let op = Operation::UpdateDiagnostics {
1646 server_id,
1647 diagnostics: diagnostics.iter().cloned().collect(),
1648 lamport_timestamp,
1649 };
1650
1651 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1652 self.send_operation(op, true, cx);
1653 }
1654
1655 pub fn buffer_diagnostics(
1656 &self,
1657 for_server: Option<LanguageServerId>,
1658 ) -> Vec<&DiagnosticEntry<Anchor>> {
1659 match for_server {
1660 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1661 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1662 Err(_) => Vec::new(),
1663 },
1664 None => self
1665 .diagnostics
1666 .iter()
1667 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1668 .collect(),
1669 }
1670 }
1671
1672 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1673 if let Some(indent_sizes) = self.compute_autoindents() {
1674 let indent_sizes = cx.background_spawn(indent_sizes);
1675 match cx
1676 .background_executor()
1677 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1678 {
1679 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1680 Err(indent_sizes) => {
1681 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1682 let indent_sizes = indent_sizes.await;
1683 this.update(cx, |this, cx| {
1684 this.apply_autoindents(indent_sizes, cx);
1685 })
1686 .ok();
1687 }));
1688 }
1689 }
1690 } else {
1691 self.autoindent_requests.clear();
1692 for tx in self.wait_for_autoindent_txs.drain(..) {
1693 tx.send(()).ok();
1694 }
1695 }
1696 }
1697
1698 fn compute_autoindents(
1699 &self,
1700 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1701 let max_rows_between_yields = 100;
1702 let snapshot = self.snapshot();
1703 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1704 return None;
1705 }
1706
1707 let autoindent_requests = self.autoindent_requests.clone();
1708 Some(async move {
1709 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1710 for request in autoindent_requests {
1711 // Resolve each edited range to its row in the current buffer and in the
1712 // buffer before this batch of edits.
1713 let mut row_ranges = Vec::new();
1714 let mut old_to_new_rows = BTreeMap::new();
1715 let mut language_indent_sizes_by_new_row = Vec::new();
1716 for entry in &request.entries {
1717 let position = entry.range.start;
1718 let new_row = position.to_point(&snapshot).row;
1719 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1720 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1721
1722 if !entry.first_line_is_new {
1723 let old_row = position.to_point(&request.before_edit).row;
1724 old_to_new_rows.insert(old_row, new_row);
1725 }
1726 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1727 }
1728
1729 // Build a map containing the suggested indentation for each of the edited lines
1730 // with respect to the state of the buffer before these edits. This map is keyed
1731 // by the rows for these lines in the current state of the buffer.
1732 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1733 let old_edited_ranges =
1734 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1735 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1736 let mut language_indent_size = IndentSize::default();
1737 for old_edited_range in old_edited_ranges {
1738 let suggestions = request
1739 .before_edit
1740 .suggest_autoindents(old_edited_range.clone())
1741 .into_iter()
1742 .flatten();
1743 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1744 if let Some(suggestion) = suggestion {
1745 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1746
1747 // Find the indent size based on the language for this row.
1748 while let Some((row, size)) = language_indent_sizes.peek() {
1749 if *row > new_row {
1750 break;
1751 }
1752 language_indent_size = *size;
1753 language_indent_sizes.next();
1754 }
1755
1756 let suggested_indent = old_to_new_rows
1757 .get(&suggestion.basis_row)
1758 .and_then(|from_row| {
1759 Some(old_suggestions.get(from_row).copied()?.0)
1760 })
1761 .unwrap_or_else(|| {
1762 request
1763 .before_edit
1764 .indent_size_for_line(suggestion.basis_row)
1765 })
1766 .with_delta(suggestion.delta, language_indent_size);
1767 old_suggestions
1768 .insert(new_row, (suggested_indent, suggestion.within_error));
1769 }
1770 }
1771 yield_now().await;
1772 }
1773
1774 // Compute new suggestions for each line, but only include them in the result
1775 // if they differ from the old suggestion for that line.
1776 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1777 let mut language_indent_size = IndentSize::default();
1778 for (row_range, original_indent_column) in row_ranges {
1779 let new_edited_row_range = if request.is_block_mode {
1780 row_range.start..row_range.start + 1
1781 } else {
1782 row_range.clone()
1783 };
1784
1785 let suggestions = snapshot
1786 .suggest_autoindents(new_edited_row_range.clone())
1787 .into_iter()
1788 .flatten();
1789 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1790 if let Some(suggestion) = suggestion {
1791 // Find the indent size based on the language for this row.
1792 while let Some((row, size)) = language_indent_sizes.peek() {
1793 if *row > new_row {
1794 break;
1795 }
1796 language_indent_size = *size;
1797 language_indent_sizes.next();
1798 }
1799
1800 let suggested_indent = indent_sizes
1801 .get(&suggestion.basis_row)
1802 .copied()
1803 .map(|e| e.0)
1804 .unwrap_or_else(|| {
1805 snapshot.indent_size_for_line(suggestion.basis_row)
1806 })
1807 .with_delta(suggestion.delta, language_indent_size);
1808
1809 if old_suggestions.get(&new_row).is_none_or(
1810 |(old_indentation, was_within_error)| {
1811 suggested_indent != *old_indentation
1812 && (!suggestion.within_error || *was_within_error)
1813 },
1814 ) {
1815 indent_sizes.insert(
1816 new_row,
1817 (suggested_indent, request.ignore_empty_lines),
1818 );
1819 }
1820 }
1821 }
1822
1823 if let (true, Some(original_indent_column)) =
1824 (request.is_block_mode, original_indent_column)
1825 {
1826 let new_indent =
1827 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1828 *indent
1829 } else {
1830 snapshot.indent_size_for_line(row_range.start)
1831 };
1832 let delta = new_indent.len as i64 - original_indent_column as i64;
1833 if delta != 0 {
1834 for row in row_range.skip(1) {
1835 indent_sizes.entry(row).or_insert_with(|| {
1836 let mut size = snapshot.indent_size_for_line(row);
1837 if size.kind == new_indent.kind {
1838 match delta.cmp(&0) {
1839 Ordering::Greater => size.len += delta as u32,
1840 Ordering::Less => {
1841 size.len = size.len.saturating_sub(-delta as u32)
1842 }
1843 Ordering::Equal => {}
1844 }
1845 }
1846 (size, request.ignore_empty_lines)
1847 });
1848 }
1849 }
1850 }
1851
1852 yield_now().await;
1853 }
1854 }
1855
1856 indent_sizes
1857 .into_iter()
1858 .filter_map(|(row, (indent, ignore_empty_lines))| {
1859 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1860 None
1861 } else {
1862 Some((row, indent))
1863 }
1864 })
1865 .collect()
1866 })
1867 }
1868
1869 fn apply_autoindents(
1870 &mut self,
1871 indent_sizes: BTreeMap<u32, IndentSize>,
1872 cx: &mut Context<Self>,
1873 ) {
1874 self.autoindent_requests.clear();
1875 for tx in self.wait_for_autoindent_txs.drain(..) {
1876 tx.send(()).ok();
1877 }
1878
1879 let edits: Vec<_> = indent_sizes
1880 .into_iter()
1881 .filter_map(|(row, indent_size)| {
1882 let current_size = indent_size_for_line(self, row);
1883 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1884 })
1885 .collect();
1886
1887 let preserve_preview = self.preserve_preview();
1888 self.edit(edits, None, cx);
1889 if preserve_preview {
1890 self.refresh_preview();
1891 }
1892 }
1893
1894 /// Create a minimal edit that will cause the given row to be indented
1895 /// with the given size. After applying this edit, the length of the line
1896 /// will always be at least `new_size.len`.
1897 pub fn edit_for_indent_size_adjustment(
1898 row: u32,
1899 current_size: IndentSize,
1900 new_size: IndentSize,
1901 ) -> Option<(Range<Point>, String)> {
1902 if new_size.kind == current_size.kind {
1903 match new_size.len.cmp(¤t_size.len) {
1904 Ordering::Greater => {
1905 let point = Point::new(row, 0);
1906 Some((
1907 point..point,
1908 iter::repeat(new_size.char())
1909 .take((new_size.len - current_size.len) as usize)
1910 .collect::<String>(),
1911 ))
1912 }
1913
1914 Ordering::Less => Some((
1915 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1916 String::new(),
1917 )),
1918
1919 Ordering::Equal => None,
1920 }
1921 } else {
1922 Some((
1923 Point::new(row, 0)..Point::new(row, current_size.len),
1924 iter::repeat(new_size.char())
1925 .take(new_size.len as usize)
1926 .collect::<String>(),
1927 ))
1928 }
1929 }
1930
1931 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1932 /// and the given new text.
1933 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1934 let old_text = self.as_rope().clone();
1935 let base_version = self.version();
1936 cx.background_executor()
1937 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1938 let old_text = old_text.to_string();
1939 let line_ending = LineEnding::detect(&new_text);
1940 LineEnding::normalize(&mut new_text);
1941 let edits = text_diff(&old_text, &new_text);
1942 Diff {
1943 base_version,
1944 line_ending,
1945 edits,
1946 }
1947 })
1948 }
1949
1950 /// Spawns a background task that searches the buffer for any whitespace
1951 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1952 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1953 let old_text = self.as_rope().clone();
1954 let line_ending = self.line_ending();
1955 let base_version = self.version();
1956 cx.background_spawn(async move {
1957 let ranges = trailing_whitespace_ranges(&old_text);
1958 let empty = Arc::<str>::from("");
1959 Diff {
1960 base_version,
1961 line_ending,
1962 edits: ranges
1963 .into_iter()
1964 .map(|range| (range, empty.clone()))
1965 .collect(),
1966 }
1967 })
1968 }
1969
1970 /// Ensures that the buffer ends with a single newline character, and
1971 /// no other whitespace. Skips if the buffer is empty.
1972 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1973 let len = self.len();
1974 if len == 0 {
1975 return;
1976 }
1977 let mut offset = len;
1978 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1979 let non_whitespace_len = chunk
1980 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1981 .len();
1982 offset -= chunk.len();
1983 offset += non_whitespace_len;
1984 if non_whitespace_len != 0 {
1985 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1986 return;
1987 }
1988 break;
1989 }
1990 }
1991 self.edit([(offset..len, "\n")], None, cx);
1992 }
1993
1994 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1995 /// calculated, then adjust the diff to account for those changes, and discard any
1996 /// parts of the diff that conflict with those changes.
1997 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1998 let snapshot = self.snapshot();
1999 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2000 let mut delta = 0;
2001 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2002 while let Some(edit_since) = edits_since.peek() {
2003 // If the edit occurs after a diff hunk, then it does not
2004 // affect that hunk.
2005 if edit_since.old.start > range.end {
2006 break;
2007 }
2008 // If the edit precedes the diff hunk, then adjust the hunk
2009 // to reflect the edit.
2010 else if edit_since.old.end < range.start {
2011 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2012 edits_since.next();
2013 }
2014 // If the edit intersects a diff hunk, then discard that hunk.
2015 else {
2016 return None;
2017 }
2018 }
2019
2020 let start = (range.start as i64 + delta) as usize;
2021 let end = (range.end as i64 + delta) as usize;
2022 Some((start..end, new_text))
2023 });
2024
2025 self.start_transaction();
2026 self.text.set_line_ending(diff.line_ending);
2027 self.edit(adjusted_edits, None, cx);
2028 self.end_transaction(cx)
2029 }
2030
2031 pub fn has_unsaved_edits(&self) -> bool {
2032 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2033
2034 if last_version == self.version {
2035 self.has_unsaved_edits
2036 .set((last_version, has_unsaved_edits));
2037 return has_unsaved_edits;
2038 }
2039
2040 let has_edits = self.has_edits_since(&self.saved_version);
2041 self.has_unsaved_edits
2042 .set((self.version.clone(), has_edits));
2043 has_edits
2044 }
2045
2046 /// Checks if the buffer has unsaved changes.
2047 pub fn is_dirty(&self) -> bool {
2048 if self.capability == Capability::ReadOnly {
2049 return false;
2050 }
2051 if self.has_conflict {
2052 return true;
2053 }
2054 match self.file.as_ref().map(|f| f.disk_state()) {
2055 Some(DiskState::New) | Some(DiskState::Deleted) => {
2056 !self.is_empty() && self.has_unsaved_edits()
2057 }
2058 _ => self.has_unsaved_edits(),
2059 }
2060 }
2061
2062 /// Checks if the buffer and its file have both changed since the buffer
2063 /// was last saved or reloaded.
2064 pub fn has_conflict(&self) -> bool {
2065 if self.has_conflict {
2066 return true;
2067 }
2068 let Some(file) = self.file.as_ref() else {
2069 return false;
2070 };
2071 match file.disk_state() {
2072 DiskState::New => false,
2073 DiskState::Present { mtime } => match self.saved_mtime {
2074 Some(saved_mtime) => {
2075 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2076 }
2077 None => true,
2078 },
2079 DiskState::Deleted => false,
2080 DiskState::Remote => false,
2081 }
2082 }
2083
2084 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2085 pub fn subscribe(&mut self) -> Subscription {
2086 self.text.subscribe()
2087 }
2088
2089 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2090 ///
2091 /// This allows downstream code to check if the buffer's text has changed without
2092 /// waiting for an effect cycle, which would be required if using eents.
2093 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2094 if let Err(ix) = self
2095 .change_bits
2096 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2097 {
2098 self.change_bits.insert(ix, bit);
2099 }
2100 }
2101
2102 /// Set the change bit for all "listeners".
2103 fn was_changed(&mut self) {
2104 self.change_bits.retain(|change_bit| {
2105 change_bit
2106 .upgrade()
2107 .inspect(|bit| {
2108 _ = bit.replace(true);
2109 })
2110 .is_some()
2111 });
2112 }
2113
2114 /// Starts a transaction, if one is not already in-progress. When undoing or
2115 /// redoing edits, all of the edits performed within a transaction are undone
2116 /// or redone together.
2117 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2118 self.start_transaction_at(Instant::now())
2119 }
2120
2121 /// Starts a transaction, providing the current time. Subsequent transactions
2122 /// that occur within a short period of time will be grouped together. This
2123 /// is controlled by the buffer's undo grouping duration.
2124 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2125 self.transaction_depth += 1;
2126 if self.was_dirty_before_starting_transaction.is_none() {
2127 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2128 }
2129 self.text.start_transaction_at(now)
2130 }
2131
2132 /// Terminates the current transaction, if this is the outermost transaction.
2133 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2134 self.end_transaction_at(Instant::now(), cx)
2135 }
2136
2137 /// Terminates the current transaction, providing the current time. Subsequent transactions
2138 /// that occur within a short period of time will be grouped together. This
2139 /// is controlled by the buffer's undo grouping duration.
2140 pub fn end_transaction_at(
2141 &mut self,
2142 now: Instant,
2143 cx: &mut Context<Self>,
2144 ) -> Option<TransactionId> {
2145 assert!(self.transaction_depth > 0);
2146 self.transaction_depth -= 1;
2147 let was_dirty = if self.transaction_depth == 0 {
2148 self.was_dirty_before_starting_transaction.take().unwrap()
2149 } else {
2150 false
2151 };
2152 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2153 self.did_edit(&start_version, was_dirty, cx);
2154 Some(transaction_id)
2155 } else {
2156 None
2157 }
2158 }
2159
2160 /// Manually add a transaction to the buffer's undo history.
2161 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2162 self.text.push_transaction(transaction, now);
2163 }
2164
2165 /// Differs from `push_transaction` in that it does not clear the redo
2166 /// stack. Intended to be used to create a parent transaction to merge
2167 /// potential child transactions into.
2168 ///
2169 /// The caller is responsible for removing it from the undo history using
2170 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2171 /// are merged into this transaction, the caller is responsible for ensuring
2172 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2173 /// cleared is to create transactions with the usual `start_transaction` and
2174 /// `end_transaction` methods and merging the resulting transactions into
2175 /// the transaction created by this method
2176 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2177 self.text.push_empty_transaction(now)
2178 }
2179
2180 /// Prevent the last transaction from being grouped with any subsequent transactions,
2181 /// even if they occur with the buffer's undo grouping duration.
2182 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2183 self.text.finalize_last_transaction()
2184 }
2185
2186 /// Manually group all changes since a given transaction.
2187 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2188 self.text.group_until_transaction(transaction_id);
2189 }
2190
2191 /// Manually remove a transaction from the buffer's undo history
2192 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2193 self.text.forget_transaction(transaction_id)
2194 }
2195
2196 /// Retrieve a transaction from the buffer's undo history
2197 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2198 self.text.get_transaction(transaction_id)
2199 }
2200
2201 /// Manually merge two transactions in the buffer's undo history.
2202 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2203 self.text.merge_transactions(transaction, destination);
2204 }
2205
2206 /// Waits for the buffer to receive operations with the given timestamps.
2207 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2208 &mut self,
2209 edit_ids: It,
2210 ) -> impl Future<Output = Result<()>> + use<It> {
2211 self.text.wait_for_edits(edit_ids)
2212 }
2213
2214 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2215 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2216 &mut self,
2217 anchors: It,
2218 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2219 self.text.wait_for_anchors(anchors)
2220 }
2221
2222 /// Waits for the buffer to receive operations up to the given version.
2223 pub fn wait_for_version(
2224 &mut self,
2225 version: clock::Global,
2226 ) -> impl Future<Output = Result<()>> + use<> {
2227 self.text.wait_for_version(version)
2228 }
2229
2230 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2231 /// [`Buffer::wait_for_version`] to resolve with an error.
2232 pub fn give_up_waiting(&mut self) {
2233 self.text.give_up_waiting();
2234 }
2235
2236 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2237 let mut rx = None;
2238 if !self.autoindent_requests.is_empty() {
2239 let channel = oneshot::channel();
2240 self.wait_for_autoindent_txs.push(channel.0);
2241 rx = Some(channel.1);
2242 }
2243 rx
2244 }
2245
2246 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2247 pub fn set_active_selections(
2248 &mut self,
2249 selections: Arc<[Selection<Anchor>]>,
2250 line_mode: bool,
2251 cursor_shape: CursorShape,
2252 cx: &mut Context<Self>,
2253 ) {
2254 let lamport_timestamp = self.text.lamport_clock.tick();
2255 self.remote_selections.insert(
2256 self.text.replica_id(),
2257 SelectionSet {
2258 selections: selections.clone(),
2259 lamport_timestamp,
2260 line_mode,
2261 cursor_shape,
2262 },
2263 );
2264 self.send_operation(
2265 Operation::UpdateSelections {
2266 selections,
2267 line_mode,
2268 lamport_timestamp,
2269 cursor_shape,
2270 },
2271 true,
2272 cx,
2273 );
2274 self.non_text_state_update_count += 1;
2275 cx.notify();
2276 }
2277
2278 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2279 /// this replica.
2280 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2281 if self
2282 .remote_selections
2283 .get(&self.text.replica_id())
2284 .is_none_or(|set| !set.selections.is_empty())
2285 {
2286 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2287 }
2288 }
2289
2290 pub fn set_agent_selections(
2291 &mut self,
2292 selections: Arc<[Selection<Anchor>]>,
2293 line_mode: bool,
2294 cursor_shape: CursorShape,
2295 cx: &mut Context<Self>,
2296 ) {
2297 let lamport_timestamp = self.text.lamport_clock.tick();
2298 self.remote_selections.insert(
2299 ReplicaId::AGENT,
2300 SelectionSet {
2301 selections,
2302 lamport_timestamp,
2303 line_mode,
2304 cursor_shape,
2305 },
2306 );
2307 self.non_text_state_update_count += 1;
2308 cx.notify();
2309 }
2310
2311 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2312 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2313 }
2314
2315 /// Replaces the buffer's entire text.
2316 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2317 where
2318 T: Into<Arc<str>>,
2319 {
2320 self.autoindent_requests.clear();
2321 self.edit([(0..self.len(), text)], None, cx)
2322 }
2323
2324 /// Appends the given text to the end of the buffer.
2325 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2326 where
2327 T: Into<Arc<str>>,
2328 {
2329 self.edit([(self.len()..self.len(), text)], None, cx)
2330 }
2331
2332 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2333 /// delete, and a string of text to insert at that location.
2334 ///
2335 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2336 /// request for the edited ranges, which will be processed when the buffer finishes
2337 /// parsing.
2338 ///
2339 /// Parsing takes place at the end of a transaction, and may compute synchronously
2340 /// or asynchronously, depending on the changes.
2341 pub fn edit<I, S, T>(
2342 &mut self,
2343 edits_iter: I,
2344 autoindent_mode: Option<AutoindentMode>,
2345 cx: &mut Context<Self>,
2346 ) -> Option<clock::Lamport>
2347 where
2348 I: IntoIterator<Item = (Range<S>, T)>,
2349 S: ToOffset,
2350 T: Into<Arc<str>>,
2351 {
2352 // Skip invalid edits and coalesce contiguous ones.
2353 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2354
2355 for (range, new_text) in edits_iter {
2356 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2357
2358 if range.start > range.end {
2359 mem::swap(&mut range.start, &mut range.end);
2360 }
2361 let new_text = new_text.into();
2362 if !new_text.is_empty() || !range.is_empty() {
2363 if let Some((prev_range, prev_text)) = edits.last_mut()
2364 && prev_range.end >= range.start
2365 {
2366 prev_range.end = cmp::max(prev_range.end, range.end);
2367 *prev_text = format!("{prev_text}{new_text}").into();
2368 } else {
2369 edits.push((range, new_text));
2370 }
2371 }
2372 }
2373 if edits.is_empty() {
2374 return None;
2375 }
2376
2377 self.start_transaction();
2378 self.pending_autoindent.take();
2379 let autoindent_request = autoindent_mode
2380 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2381
2382 let edit_operation = self.text.edit(edits.iter().cloned());
2383 let edit_id = edit_operation.timestamp();
2384
2385 if let Some((before_edit, mode)) = autoindent_request {
2386 let mut delta = 0isize;
2387 let mut previous_setting = None;
2388 let entries: Vec<_> = edits
2389 .into_iter()
2390 .enumerate()
2391 .zip(&edit_operation.as_edit().unwrap().new_text)
2392 .filter(|((_, (range, _)), _)| {
2393 let language = before_edit.language_at(range.start);
2394 let language_id = language.map(|l| l.id());
2395 if let Some((cached_language_id, auto_indent)) = previous_setting
2396 && cached_language_id == language_id
2397 {
2398 auto_indent
2399 } else {
2400 // The auto-indent setting is not present in editorconfigs, hence
2401 // we can avoid passing the file here.
2402 let auto_indent =
2403 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2404 previous_setting = Some((language_id, auto_indent));
2405 auto_indent
2406 }
2407 })
2408 .map(|((ix, (range, _)), new_text)| {
2409 let new_text_length = new_text.len();
2410 let old_start = range.start.to_point(&before_edit);
2411 let new_start = (delta + range.start as isize) as usize;
2412 let range_len = range.end - range.start;
2413 delta += new_text_length as isize - range_len as isize;
2414
2415 // Decide what range of the insertion to auto-indent, and whether
2416 // the first line of the insertion should be considered a newly-inserted line
2417 // or an edit to an existing line.
2418 let mut range_of_insertion_to_indent = 0..new_text_length;
2419 let mut first_line_is_new = true;
2420
2421 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2422 let old_line_end = before_edit.line_len(old_start.row);
2423
2424 if old_start.column > old_line_start {
2425 first_line_is_new = false;
2426 }
2427
2428 if !new_text.contains('\n')
2429 && (old_start.column + (range_len as u32) < old_line_end
2430 || old_line_end == old_line_start)
2431 {
2432 first_line_is_new = false;
2433 }
2434
2435 // When inserting text starting with a newline, avoid auto-indenting the
2436 // previous line.
2437 if new_text.starts_with('\n') {
2438 range_of_insertion_to_indent.start += 1;
2439 first_line_is_new = true;
2440 }
2441
2442 let mut original_indent_column = None;
2443 if let AutoindentMode::Block {
2444 original_indent_columns,
2445 } = &mode
2446 {
2447 original_indent_column = Some(if new_text.starts_with('\n') {
2448 indent_size_for_text(
2449 new_text[range_of_insertion_to_indent.clone()].chars(),
2450 )
2451 .len
2452 } else {
2453 original_indent_columns
2454 .get(ix)
2455 .copied()
2456 .flatten()
2457 .unwrap_or_else(|| {
2458 indent_size_for_text(
2459 new_text[range_of_insertion_to_indent.clone()].chars(),
2460 )
2461 .len
2462 })
2463 });
2464
2465 // Avoid auto-indenting the line after the edit.
2466 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2467 range_of_insertion_to_indent.end -= 1;
2468 }
2469 }
2470
2471 AutoindentRequestEntry {
2472 first_line_is_new,
2473 original_indent_column,
2474 indent_size: before_edit.language_indent_size_at(range.start, cx),
2475 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2476 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2477 }
2478 })
2479 .collect();
2480
2481 if !entries.is_empty() {
2482 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2483 before_edit,
2484 entries,
2485 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2486 ignore_empty_lines: false,
2487 }));
2488 }
2489 }
2490
2491 self.end_transaction(cx);
2492 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2493 Some(edit_id)
2494 }
2495
2496 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2497 self.was_changed();
2498
2499 if self.edits_since::<usize>(old_version).next().is_none() {
2500 return;
2501 }
2502
2503 self.reparse(cx);
2504 cx.emit(BufferEvent::Edited);
2505 if was_dirty != self.is_dirty() {
2506 cx.emit(BufferEvent::DirtyChanged);
2507 }
2508 cx.notify();
2509 }
2510
2511 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2512 where
2513 I: IntoIterator<Item = Range<T>>,
2514 T: ToOffset + Copy,
2515 {
2516 let before_edit = self.snapshot();
2517 let entries = ranges
2518 .into_iter()
2519 .map(|range| AutoindentRequestEntry {
2520 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2521 first_line_is_new: true,
2522 indent_size: before_edit.language_indent_size_at(range.start, cx),
2523 original_indent_column: None,
2524 })
2525 .collect();
2526 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2527 before_edit,
2528 entries,
2529 is_block_mode: false,
2530 ignore_empty_lines: true,
2531 }));
2532 self.request_autoindent(cx);
2533 }
2534
2535 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2536 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2537 pub fn insert_empty_line(
2538 &mut self,
2539 position: impl ToPoint,
2540 space_above: bool,
2541 space_below: bool,
2542 cx: &mut Context<Self>,
2543 ) -> Point {
2544 let mut position = position.to_point(self);
2545
2546 self.start_transaction();
2547
2548 self.edit(
2549 [(position..position, "\n")],
2550 Some(AutoindentMode::EachLine),
2551 cx,
2552 );
2553
2554 if position.column > 0 {
2555 position += Point::new(1, 0);
2556 }
2557
2558 if !self.is_line_blank(position.row) {
2559 self.edit(
2560 [(position..position, "\n")],
2561 Some(AutoindentMode::EachLine),
2562 cx,
2563 );
2564 }
2565
2566 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2567 self.edit(
2568 [(position..position, "\n")],
2569 Some(AutoindentMode::EachLine),
2570 cx,
2571 );
2572 position.row += 1;
2573 }
2574
2575 if space_below
2576 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2577 {
2578 self.edit(
2579 [(position..position, "\n")],
2580 Some(AutoindentMode::EachLine),
2581 cx,
2582 );
2583 }
2584
2585 self.end_transaction(cx);
2586
2587 position
2588 }
2589
2590 /// Applies the given remote operations to the buffer.
2591 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2592 self.pending_autoindent.take();
2593 let was_dirty = self.is_dirty();
2594 let old_version = self.version.clone();
2595 let mut deferred_ops = Vec::new();
2596 let buffer_ops = ops
2597 .into_iter()
2598 .filter_map(|op| match op {
2599 Operation::Buffer(op) => Some(op),
2600 _ => {
2601 if self.can_apply_op(&op) {
2602 self.apply_op(op, cx);
2603 } else {
2604 deferred_ops.push(op);
2605 }
2606 None
2607 }
2608 })
2609 .collect::<Vec<_>>();
2610 for operation in buffer_ops.iter() {
2611 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2612 }
2613 self.text.apply_ops(buffer_ops);
2614 self.deferred_ops.insert(deferred_ops);
2615 self.flush_deferred_ops(cx);
2616 self.did_edit(&old_version, was_dirty, cx);
2617 // Notify independently of whether the buffer was edited as the operations could include a
2618 // selection update.
2619 cx.notify();
2620 }
2621
2622 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2623 let mut deferred_ops = Vec::new();
2624 for op in self.deferred_ops.drain().iter().cloned() {
2625 if self.can_apply_op(&op) {
2626 self.apply_op(op, cx);
2627 } else {
2628 deferred_ops.push(op);
2629 }
2630 }
2631 self.deferred_ops.insert(deferred_ops);
2632 }
2633
2634 pub fn has_deferred_ops(&self) -> bool {
2635 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2636 }
2637
2638 fn can_apply_op(&self, operation: &Operation) -> bool {
2639 match operation {
2640 Operation::Buffer(_) => {
2641 unreachable!("buffer operations should never be applied at this layer")
2642 }
2643 Operation::UpdateDiagnostics {
2644 diagnostics: diagnostic_set,
2645 ..
2646 } => diagnostic_set.iter().all(|diagnostic| {
2647 self.text.can_resolve(&diagnostic.range.start)
2648 && self.text.can_resolve(&diagnostic.range.end)
2649 }),
2650 Operation::UpdateSelections { selections, .. } => selections
2651 .iter()
2652 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2653 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2654 }
2655 }
2656
2657 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2658 match operation {
2659 Operation::Buffer(_) => {
2660 unreachable!("buffer operations should never be applied at this layer")
2661 }
2662 Operation::UpdateDiagnostics {
2663 server_id,
2664 diagnostics: diagnostic_set,
2665 lamport_timestamp,
2666 } => {
2667 let snapshot = self.snapshot();
2668 self.apply_diagnostic_update(
2669 server_id,
2670 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2671 lamport_timestamp,
2672 cx,
2673 );
2674 }
2675 Operation::UpdateSelections {
2676 selections,
2677 lamport_timestamp,
2678 line_mode,
2679 cursor_shape,
2680 } => {
2681 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2682 && set.lamport_timestamp > lamport_timestamp
2683 {
2684 return;
2685 }
2686
2687 self.remote_selections.insert(
2688 lamport_timestamp.replica_id,
2689 SelectionSet {
2690 selections,
2691 lamport_timestamp,
2692 line_mode,
2693 cursor_shape,
2694 },
2695 );
2696 self.text.lamport_clock.observe(lamport_timestamp);
2697 self.non_text_state_update_count += 1;
2698 }
2699 Operation::UpdateCompletionTriggers {
2700 triggers,
2701 lamport_timestamp,
2702 server_id,
2703 } => {
2704 if triggers.is_empty() {
2705 self.completion_triggers_per_language_server
2706 .remove(&server_id);
2707 self.completion_triggers = self
2708 .completion_triggers_per_language_server
2709 .values()
2710 .flat_map(|triggers| triggers.iter().cloned())
2711 .collect();
2712 } else {
2713 self.completion_triggers_per_language_server
2714 .insert(server_id, triggers.iter().cloned().collect());
2715 self.completion_triggers.extend(triggers);
2716 }
2717 self.text.lamport_clock.observe(lamport_timestamp);
2718 }
2719 Operation::UpdateLineEnding {
2720 line_ending,
2721 lamport_timestamp,
2722 } => {
2723 self.text.set_line_ending(line_ending);
2724 self.text.lamport_clock.observe(lamport_timestamp);
2725 }
2726 }
2727 }
2728
2729 fn apply_diagnostic_update(
2730 &mut self,
2731 server_id: LanguageServerId,
2732 diagnostics: DiagnosticSet,
2733 lamport_timestamp: clock::Lamport,
2734 cx: &mut Context<Self>,
2735 ) {
2736 if lamport_timestamp > self.diagnostics_timestamp {
2737 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2738 if diagnostics.is_empty() {
2739 if let Ok(ix) = ix {
2740 self.diagnostics.remove(ix);
2741 }
2742 } else {
2743 match ix {
2744 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2745 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2746 };
2747 }
2748 self.diagnostics_timestamp = lamport_timestamp;
2749 self.non_text_state_update_count += 1;
2750 self.text.lamport_clock.observe(lamport_timestamp);
2751 cx.notify();
2752 cx.emit(BufferEvent::DiagnosticsUpdated);
2753 }
2754 }
2755
2756 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2757 self.was_changed();
2758 cx.emit(BufferEvent::Operation {
2759 operation,
2760 is_local,
2761 });
2762 }
2763
2764 /// Removes the selections for a given peer.
2765 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2766 self.remote_selections.remove(&replica_id);
2767 cx.notify();
2768 }
2769
2770 /// Undoes the most recent transaction.
2771 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2772 let was_dirty = self.is_dirty();
2773 let old_version = self.version.clone();
2774
2775 if let Some((transaction_id, operation)) = self.text.undo() {
2776 self.send_operation(Operation::Buffer(operation), true, cx);
2777 self.did_edit(&old_version, was_dirty, cx);
2778 Some(transaction_id)
2779 } else {
2780 None
2781 }
2782 }
2783
2784 /// Manually undoes a specific transaction in the buffer's undo history.
2785 pub fn undo_transaction(
2786 &mut self,
2787 transaction_id: TransactionId,
2788 cx: &mut Context<Self>,
2789 ) -> bool {
2790 let was_dirty = self.is_dirty();
2791 let old_version = self.version.clone();
2792 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2793 self.send_operation(Operation::Buffer(operation), true, cx);
2794 self.did_edit(&old_version, was_dirty, cx);
2795 true
2796 } else {
2797 false
2798 }
2799 }
2800
2801 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2802 pub fn undo_to_transaction(
2803 &mut self,
2804 transaction_id: TransactionId,
2805 cx: &mut Context<Self>,
2806 ) -> bool {
2807 let was_dirty = self.is_dirty();
2808 let old_version = self.version.clone();
2809
2810 let operations = self.text.undo_to_transaction(transaction_id);
2811 let undone = !operations.is_empty();
2812 for operation in operations {
2813 self.send_operation(Operation::Buffer(operation), true, cx);
2814 }
2815 if undone {
2816 self.did_edit(&old_version, was_dirty, cx)
2817 }
2818 undone
2819 }
2820
2821 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2822 let was_dirty = self.is_dirty();
2823 let operation = self.text.undo_operations(counts);
2824 let old_version = self.version.clone();
2825 self.send_operation(Operation::Buffer(operation), true, cx);
2826 self.did_edit(&old_version, was_dirty, cx);
2827 }
2828
2829 /// Manually redoes a specific transaction in the buffer's redo history.
2830 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2831 let was_dirty = self.is_dirty();
2832 let old_version = self.version.clone();
2833
2834 if let Some((transaction_id, operation)) = self.text.redo() {
2835 self.send_operation(Operation::Buffer(operation), true, cx);
2836 self.did_edit(&old_version, was_dirty, cx);
2837 Some(transaction_id)
2838 } else {
2839 None
2840 }
2841 }
2842
2843 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2844 pub fn redo_to_transaction(
2845 &mut self,
2846 transaction_id: TransactionId,
2847 cx: &mut Context<Self>,
2848 ) -> bool {
2849 let was_dirty = self.is_dirty();
2850 let old_version = self.version.clone();
2851
2852 let operations = self.text.redo_to_transaction(transaction_id);
2853 let redone = !operations.is_empty();
2854 for operation in operations {
2855 self.send_operation(Operation::Buffer(operation), true, cx);
2856 }
2857 if redone {
2858 self.did_edit(&old_version, was_dirty, cx)
2859 }
2860 redone
2861 }
2862
2863 /// Override current completion triggers with the user-provided completion triggers.
2864 pub fn set_completion_triggers(
2865 &mut self,
2866 server_id: LanguageServerId,
2867 triggers: BTreeSet<String>,
2868 cx: &mut Context<Self>,
2869 ) {
2870 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2871 if triggers.is_empty() {
2872 self.completion_triggers_per_language_server
2873 .remove(&server_id);
2874 self.completion_triggers = self
2875 .completion_triggers_per_language_server
2876 .values()
2877 .flat_map(|triggers| triggers.iter().cloned())
2878 .collect();
2879 } else {
2880 self.completion_triggers_per_language_server
2881 .insert(server_id, triggers.clone());
2882 self.completion_triggers.extend(triggers.iter().cloned());
2883 }
2884 self.send_operation(
2885 Operation::UpdateCompletionTriggers {
2886 triggers: triggers.into_iter().collect(),
2887 lamport_timestamp: self.completion_triggers_timestamp,
2888 server_id,
2889 },
2890 true,
2891 cx,
2892 );
2893 cx.notify();
2894 }
2895
2896 /// Returns a list of strings which trigger a completion menu for this language.
2897 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2898 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2899 &self.completion_triggers
2900 }
2901
2902 /// Call this directly after performing edits to prevent the preview tab
2903 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2904 /// to return false until there are additional edits.
2905 pub fn refresh_preview(&mut self) {
2906 self.preview_version = self.version.clone();
2907 }
2908
2909 /// Whether we should preserve the preview status of a tab containing this buffer.
2910 pub fn preserve_preview(&self) -> bool {
2911 !self.has_edits_since(&self.preview_version)
2912 }
2913}
2914
2915#[doc(hidden)]
2916#[cfg(any(test, feature = "test-support"))]
2917impl Buffer {
2918 pub fn edit_via_marked_text(
2919 &mut self,
2920 marked_string: &str,
2921 autoindent_mode: Option<AutoindentMode>,
2922 cx: &mut Context<Self>,
2923 ) {
2924 let edits = self.edits_for_marked_text(marked_string);
2925 self.edit(edits, autoindent_mode, cx);
2926 }
2927
2928 pub fn set_group_interval(&mut self, group_interval: Duration) {
2929 self.text.set_group_interval(group_interval);
2930 }
2931
2932 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2933 where
2934 T: rand::Rng,
2935 {
2936 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2937 let mut last_end = None;
2938 for _ in 0..old_range_count {
2939 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2940 break;
2941 }
2942
2943 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2944 let mut range = self.random_byte_range(new_start, rng);
2945 if rng.random_bool(0.2) {
2946 mem::swap(&mut range.start, &mut range.end);
2947 }
2948 last_end = Some(range.end);
2949
2950 let new_text_len = rng.random_range(0..10);
2951 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2952 new_text = new_text.to_uppercase();
2953
2954 edits.push((range, new_text));
2955 }
2956 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2957 self.edit(edits, None, cx);
2958 }
2959
2960 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2961 let was_dirty = self.is_dirty();
2962 let old_version = self.version.clone();
2963
2964 let ops = self.text.randomly_undo_redo(rng);
2965 if !ops.is_empty() {
2966 for op in ops {
2967 self.send_operation(Operation::Buffer(op), true, cx);
2968 self.did_edit(&old_version, was_dirty, cx);
2969 }
2970 }
2971 }
2972}
2973
2974impl EventEmitter<BufferEvent> for Buffer {}
2975
2976impl Deref for Buffer {
2977 type Target = TextBuffer;
2978
2979 fn deref(&self) -> &Self::Target {
2980 &self.text
2981 }
2982}
2983
2984impl BufferSnapshot {
2985 /// Returns [`IndentSize`] for a given line that respects user settings and
2986 /// language preferences.
2987 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2988 indent_size_for_line(self, row)
2989 }
2990
2991 /// Returns [`IndentSize`] for a given position that respects user settings
2992 /// and language preferences.
2993 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2994 let settings = language_settings(
2995 self.language_at(position).map(|l| l.name()),
2996 self.file(),
2997 cx,
2998 );
2999 if settings.hard_tabs {
3000 IndentSize::tab()
3001 } else {
3002 IndentSize::spaces(settings.tab_size.get())
3003 }
3004 }
3005
3006 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3007 /// is passed in as `single_indent_size`.
3008 pub fn suggested_indents(
3009 &self,
3010 rows: impl Iterator<Item = u32>,
3011 single_indent_size: IndentSize,
3012 ) -> BTreeMap<u32, IndentSize> {
3013 let mut result = BTreeMap::new();
3014
3015 for row_range in contiguous_ranges(rows, 10) {
3016 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3017 Some(suggestions) => suggestions,
3018 _ => break,
3019 };
3020
3021 for (row, suggestion) in row_range.zip(suggestions) {
3022 let indent_size = if let Some(suggestion) = suggestion {
3023 result
3024 .get(&suggestion.basis_row)
3025 .copied()
3026 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3027 .with_delta(suggestion.delta, single_indent_size)
3028 } else {
3029 self.indent_size_for_line(row)
3030 };
3031
3032 result.insert(row, indent_size);
3033 }
3034 }
3035
3036 result
3037 }
3038
3039 fn suggest_autoindents(
3040 &self,
3041 row_range: Range<u32>,
3042 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3043 let config = &self.language.as_ref()?.config;
3044 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3045
3046 #[derive(Debug, Clone)]
3047 struct StartPosition {
3048 start: Point,
3049 suffix: SharedString,
3050 }
3051
3052 // Find the suggested indentation ranges based on the syntax tree.
3053 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3054 let end = Point::new(row_range.end, 0);
3055 let range = (start..end).to_offset(&self.text);
3056 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3057 Some(&grammar.indents_config.as_ref()?.query)
3058 });
3059 let indent_configs = matches
3060 .grammars()
3061 .iter()
3062 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3063 .collect::<Vec<_>>();
3064
3065 let mut indent_ranges = Vec::<Range<Point>>::new();
3066 let mut start_positions = Vec::<StartPosition>::new();
3067 let mut outdent_positions = Vec::<Point>::new();
3068 while let Some(mat) = matches.peek() {
3069 let mut start: Option<Point> = None;
3070 let mut end: Option<Point> = None;
3071
3072 let config = indent_configs[mat.grammar_index];
3073 for capture in mat.captures {
3074 if capture.index == config.indent_capture_ix {
3075 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3076 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3077 } else if Some(capture.index) == config.start_capture_ix {
3078 start = Some(Point::from_ts_point(capture.node.end_position()));
3079 } else if Some(capture.index) == config.end_capture_ix {
3080 end = Some(Point::from_ts_point(capture.node.start_position()));
3081 } else if Some(capture.index) == config.outdent_capture_ix {
3082 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3083 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3084 start_positions.push(StartPosition {
3085 start: Point::from_ts_point(capture.node.start_position()),
3086 suffix: suffix.clone(),
3087 });
3088 }
3089 }
3090
3091 matches.advance();
3092 if let Some((start, end)) = start.zip(end) {
3093 if start.row == end.row {
3094 continue;
3095 }
3096 let range = start..end;
3097 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3098 Err(ix) => indent_ranges.insert(ix, range),
3099 Ok(ix) => {
3100 let prev_range = &mut indent_ranges[ix];
3101 prev_range.end = prev_range.end.max(range.end);
3102 }
3103 }
3104 }
3105 }
3106
3107 let mut error_ranges = Vec::<Range<Point>>::new();
3108 let mut matches = self
3109 .syntax
3110 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3111 while let Some(mat) = matches.peek() {
3112 let node = mat.captures[0].node;
3113 let start = Point::from_ts_point(node.start_position());
3114 let end = Point::from_ts_point(node.end_position());
3115 let range = start..end;
3116 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3117 Ok(ix) | Err(ix) => ix,
3118 };
3119 let mut end_ix = ix;
3120 while let Some(existing_range) = error_ranges.get(end_ix) {
3121 if existing_range.end < end {
3122 end_ix += 1;
3123 } else {
3124 break;
3125 }
3126 }
3127 error_ranges.splice(ix..end_ix, [range]);
3128 matches.advance();
3129 }
3130
3131 outdent_positions.sort();
3132 for outdent_position in outdent_positions {
3133 // find the innermost indent range containing this outdent_position
3134 // set its end to the outdent position
3135 if let Some(range_to_truncate) = indent_ranges
3136 .iter_mut()
3137 .filter(|indent_range| indent_range.contains(&outdent_position))
3138 .next_back()
3139 {
3140 range_to_truncate.end = outdent_position;
3141 }
3142 }
3143
3144 start_positions.sort_by_key(|b| b.start);
3145
3146 // Find the suggested indentation increases and decreased based on regexes.
3147 let mut regex_outdent_map = HashMap::default();
3148 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3149 let mut start_positions_iter = start_positions.iter().peekable();
3150
3151 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3152 self.for_each_line(
3153 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3154 ..Point::new(row_range.end, 0),
3155 |row, line| {
3156 if config
3157 .decrease_indent_pattern
3158 .as_ref()
3159 .is_some_and(|regex| regex.is_match(line))
3160 {
3161 indent_change_rows.push((row, Ordering::Less));
3162 }
3163 if config
3164 .increase_indent_pattern
3165 .as_ref()
3166 .is_some_and(|regex| regex.is_match(line))
3167 {
3168 indent_change_rows.push((row + 1, Ordering::Greater));
3169 }
3170 while let Some(pos) = start_positions_iter.peek() {
3171 if pos.start.row < row {
3172 let pos = start_positions_iter.next().unwrap();
3173 last_seen_suffix
3174 .entry(pos.suffix.to_string())
3175 .or_default()
3176 .push(pos.start);
3177 } else {
3178 break;
3179 }
3180 }
3181 for rule in &config.decrease_indent_patterns {
3182 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3183 let row_start_column = self.indent_size_for_line(row).len;
3184 let basis_row = rule
3185 .valid_after
3186 .iter()
3187 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3188 .flatten()
3189 .filter(|start_point| start_point.column <= row_start_column)
3190 .max_by_key(|start_point| start_point.row);
3191 if let Some(outdent_to_row) = basis_row {
3192 regex_outdent_map.insert(row, outdent_to_row.row);
3193 }
3194 break;
3195 }
3196 }
3197 },
3198 );
3199
3200 let mut indent_changes = indent_change_rows.into_iter().peekable();
3201 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3202 prev_non_blank_row.unwrap_or(0)
3203 } else {
3204 row_range.start.saturating_sub(1)
3205 };
3206
3207 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3208 Some(row_range.map(move |row| {
3209 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3210
3211 let mut indent_from_prev_row = false;
3212 let mut outdent_from_prev_row = false;
3213 let mut outdent_to_row = u32::MAX;
3214 let mut from_regex = false;
3215
3216 while let Some((indent_row, delta)) = indent_changes.peek() {
3217 match indent_row.cmp(&row) {
3218 Ordering::Equal => match delta {
3219 Ordering::Less => {
3220 from_regex = true;
3221 outdent_from_prev_row = true
3222 }
3223 Ordering::Greater => {
3224 indent_from_prev_row = true;
3225 from_regex = true
3226 }
3227 _ => {}
3228 },
3229
3230 Ordering::Greater => break,
3231 Ordering::Less => {}
3232 }
3233
3234 indent_changes.next();
3235 }
3236
3237 for range in &indent_ranges {
3238 if range.start.row >= row {
3239 break;
3240 }
3241 if range.start.row == prev_row && range.end > row_start {
3242 indent_from_prev_row = true;
3243 }
3244 if range.end > prev_row_start && range.end <= row_start {
3245 outdent_to_row = outdent_to_row.min(range.start.row);
3246 }
3247 }
3248
3249 if let Some(basis_row) = regex_outdent_map.get(&row) {
3250 indent_from_prev_row = false;
3251 outdent_to_row = *basis_row;
3252 from_regex = true;
3253 }
3254
3255 let within_error = error_ranges
3256 .iter()
3257 .any(|e| e.start.row < row && e.end > row_start);
3258
3259 let suggestion = if outdent_to_row == prev_row
3260 || (outdent_from_prev_row && indent_from_prev_row)
3261 {
3262 Some(IndentSuggestion {
3263 basis_row: prev_row,
3264 delta: Ordering::Equal,
3265 within_error: within_error && !from_regex,
3266 })
3267 } else if indent_from_prev_row {
3268 Some(IndentSuggestion {
3269 basis_row: prev_row,
3270 delta: Ordering::Greater,
3271 within_error: within_error && !from_regex,
3272 })
3273 } else if outdent_to_row < prev_row {
3274 Some(IndentSuggestion {
3275 basis_row: outdent_to_row,
3276 delta: Ordering::Equal,
3277 within_error: within_error && !from_regex,
3278 })
3279 } else if outdent_from_prev_row {
3280 Some(IndentSuggestion {
3281 basis_row: prev_row,
3282 delta: Ordering::Less,
3283 within_error: within_error && !from_regex,
3284 })
3285 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3286 {
3287 Some(IndentSuggestion {
3288 basis_row: prev_row,
3289 delta: Ordering::Equal,
3290 within_error: within_error && !from_regex,
3291 })
3292 } else {
3293 None
3294 };
3295
3296 prev_row = row;
3297 prev_row_start = row_start;
3298 suggestion
3299 }))
3300 }
3301
3302 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3303 while row > 0 {
3304 row -= 1;
3305 if !self.is_line_blank(row) {
3306 return Some(row);
3307 }
3308 }
3309 None
3310 }
3311
3312 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3313 let captures = self.syntax.captures(range, &self.text, |grammar| {
3314 grammar
3315 .highlights_config
3316 .as_ref()
3317 .map(|config| &config.query)
3318 });
3319 let highlight_maps = captures
3320 .grammars()
3321 .iter()
3322 .map(|grammar| grammar.highlight_map())
3323 .collect();
3324 (captures, highlight_maps)
3325 }
3326
3327 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3328 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3329 /// returned in chunks where each chunk has a single syntax highlighting style and
3330 /// diagnostic status.
3331 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3332 let range = range.start.to_offset(self)..range.end.to_offset(self);
3333
3334 let mut syntax = None;
3335 if language_aware {
3336 syntax = Some(self.get_highlights(range.clone()));
3337 }
3338 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3339 let diagnostics = language_aware;
3340 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3341 }
3342
3343 pub fn highlighted_text_for_range<T: ToOffset>(
3344 &self,
3345 range: Range<T>,
3346 override_style: Option<HighlightStyle>,
3347 syntax_theme: &SyntaxTheme,
3348 ) -> HighlightedText {
3349 HighlightedText::from_buffer_range(
3350 range,
3351 &self.text,
3352 &self.syntax,
3353 override_style,
3354 syntax_theme,
3355 )
3356 }
3357
3358 /// Invokes the given callback for each line of text in the given range of the buffer.
3359 /// Uses callback to avoid allocating a string for each line.
3360 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3361 let mut line = String::new();
3362 let mut row = range.start.row;
3363 for chunk in self
3364 .as_rope()
3365 .chunks_in_range(range.to_offset(self))
3366 .chain(["\n"])
3367 {
3368 for (newline_ix, text) in chunk.split('\n').enumerate() {
3369 if newline_ix > 0 {
3370 callback(row, &line);
3371 row += 1;
3372 line.clear();
3373 }
3374 line.push_str(text);
3375 }
3376 }
3377 }
3378
3379 /// Iterates over every [`SyntaxLayer`] in the buffer.
3380 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3381 self.syntax_layers_for_range(0..self.len(), true)
3382 }
3383
3384 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3385 let offset = position.to_offset(self);
3386 self.syntax_layers_for_range(offset..offset, false)
3387 .filter(|l| {
3388 if let Some(ranges) = l.included_sub_ranges {
3389 ranges.iter().any(|range| {
3390 let start = range.start.to_offset(self);
3391 start <= offset && {
3392 let end = range.end.to_offset(self);
3393 offset < end
3394 }
3395 })
3396 } else {
3397 l.node().start_byte() <= offset && l.node().end_byte() > offset
3398 }
3399 })
3400 .last()
3401 }
3402
3403 pub fn syntax_layers_for_range<D: ToOffset>(
3404 &self,
3405 range: Range<D>,
3406 include_hidden: bool,
3407 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3408 self.syntax
3409 .layers_for_range(range, &self.text, include_hidden)
3410 }
3411
3412 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3413 &self,
3414 range: Range<D>,
3415 ) -> Option<SyntaxLayer<'_>> {
3416 let range = range.to_offset(self);
3417 self.syntax
3418 .layers_for_range(range, &self.text, false)
3419 .max_by(|a, b| {
3420 if a.depth != b.depth {
3421 a.depth.cmp(&b.depth)
3422 } else if a.offset.0 != b.offset.0 {
3423 a.offset.0.cmp(&b.offset.0)
3424 } else {
3425 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3426 }
3427 })
3428 }
3429
3430 /// Returns the main [`Language`].
3431 pub fn language(&self) -> Option<&Arc<Language>> {
3432 self.language.as_ref()
3433 }
3434
3435 /// Returns the [`Language`] at the given location.
3436 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3437 self.syntax_layer_at(position)
3438 .map(|info| info.language)
3439 .or(self.language.as_ref())
3440 }
3441
3442 /// Returns the settings for the language at the given location.
3443 pub fn settings_at<'a, D: ToOffset>(
3444 &'a self,
3445 position: D,
3446 cx: &'a App,
3447 ) -> Cow<'a, LanguageSettings> {
3448 language_settings(
3449 self.language_at(position).map(|l| l.name()),
3450 self.file.as_ref(),
3451 cx,
3452 )
3453 }
3454
3455 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3456 CharClassifier::new(self.language_scope_at(point))
3457 }
3458
3459 /// Returns the [`LanguageScope`] at the given location.
3460 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3461 let offset = position.to_offset(self);
3462 let mut scope = None;
3463 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3464
3465 // Use the layer that has the smallest node intersecting the given point.
3466 for layer in self
3467 .syntax
3468 .layers_for_range(offset..offset, &self.text, false)
3469 {
3470 let mut cursor = layer.node().walk();
3471
3472 let mut range = None;
3473 loop {
3474 let child_range = cursor.node().byte_range();
3475 if !child_range.contains(&offset) {
3476 break;
3477 }
3478
3479 range = Some(child_range);
3480 if cursor.goto_first_child_for_byte(offset).is_none() {
3481 break;
3482 }
3483 }
3484
3485 if let Some(range) = range
3486 && smallest_range_and_depth.as_ref().is_none_or(
3487 |(smallest_range, smallest_range_depth)| {
3488 if layer.depth > *smallest_range_depth {
3489 true
3490 } else if layer.depth == *smallest_range_depth {
3491 range.len() < smallest_range.len()
3492 } else {
3493 false
3494 }
3495 },
3496 )
3497 {
3498 smallest_range_and_depth = Some((range, layer.depth));
3499 scope = Some(LanguageScope {
3500 language: layer.language.clone(),
3501 override_id: layer.override_id(offset, &self.text),
3502 });
3503 }
3504 }
3505
3506 scope.or_else(|| {
3507 self.language.clone().map(|language| LanguageScope {
3508 language,
3509 override_id: None,
3510 })
3511 })
3512 }
3513
3514 /// Returns a tuple of the range and character kind of the word
3515 /// surrounding the given position.
3516 pub fn surrounding_word<T: ToOffset>(
3517 &self,
3518 start: T,
3519 scope_context: Option<CharScopeContext>,
3520 ) -> (Range<usize>, Option<CharKind>) {
3521 let mut start = start.to_offset(self);
3522 let mut end = start;
3523 let mut next_chars = self.chars_at(start).take(128).peekable();
3524 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3525
3526 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3527 let word_kind = cmp::max(
3528 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3529 next_chars.peek().copied().map(|c| classifier.kind(c)),
3530 );
3531
3532 for ch in prev_chars {
3533 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3534 start -= ch.len_utf8();
3535 } else {
3536 break;
3537 }
3538 }
3539
3540 for ch in next_chars {
3541 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3542 end += ch.len_utf8();
3543 } else {
3544 break;
3545 }
3546 }
3547
3548 (start..end, word_kind)
3549 }
3550
3551 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3552 /// range. When `require_larger` is true, the node found must be larger than the query range.
3553 ///
3554 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3555 /// be moved to the root of the tree.
3556 fn goto_node_enclosing_range(
3557 cursor: &mut tree_sitter::TreeCursor,
3558 query_range: &Range<usize>,
3559 require_larger: bool,
3560 ) -> bool {
3561 let mut ascending = false;
3562 loop {
3563 let mut range = cursor.node().byte_range();
3564 if query_range.is_empty() {
3565 // When the query range is empty and the current node starts after it, move to the
3566 // previous sibling to find the node the containing node.
3567 if range.start > query_range.start {
3568 cursor.goto_previous_sibling();
3569 range = cursor.node().byte_range();
3570 }
3571 } else {
3572 // When the query range is non-empty and the current node ends exactly at the start,
3573 // move to the next sibling to find a node that extends beyond the start.
3574 if range.end == query_range.start {
3575 cursor.goto_next_sibling();
3576 range = cursor.node().byte_range();
3577 }
3578 }
3579
3580 let encloses = range.contains_inclusive(query_range)
3581 && (!require_larger || range.len() > query_range.len());
3582 if !encloses {
3583 ascending = true;
3584 if !cursor.goto_parent() {
3585 return false;
3586 }
3587 continue;
3588 } else if ascending {
3589 return true;
3590 }
3591
3592 // Descend into the current node.
3593 if cursor
3594 .goto_first_child_for_byte(query_range.start)
3595 .is_none()
3596 {
3597 return true;
3598 }
3599 }
3600 }
3601
3602 pub fn syntax_ancestor<'a, T: ToOffset>(
3603 &'a self,
3604 range: Range<T>,
3605 ) -> Option<tree_sitter::Node<'a>> {
3606 let range = range.start.to_offset(self)..range.end.to_offset(self);
3607 let mut result: Option<tree_sitter::Node<'a>> = None;
3608 for layer in self
3609 .syntax
3610 .layers_for_range(range.clone(), &self.text, true)
3611 {
3612 let mut cursor = layer.node().walk();
3613
3614 // Find the node that both contains the range and is larger than it.
3615 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3616 continue;
3617 }
3618
3619 let left_node = cursor.node();
3620 let mut layer_result = left_node;
3621
3622 // For an empty range, try to find another node immediately to the right of the range.
3623 if left_node.end_byte() == range.start {
3624 let mut right_node = None;
3625 while !cursor.goto_next_sibling() {
3626 if !cursor.goto_parent() {
3627 break;
3628 }
3629 }
3630
3631 while cursor.node().start_byte() == range.start {
3632 right_node = Some(cursor.node());
3633 if !cursor.goto_first_child() {
3634 break;
3635 }
3636 }
3637
3638 // If there is a candidate node on both sides of the (empty) range, then
3639 // decide between the two by favoring a named node over an anonymous token.
3640 // If both nodes are the same in that regard, favor the right one.
3641 if let Some(right_node) = right_node
3642 && (right_node.is_named() || !left_node.is_named())
3643 {
3644 layer_result = right_node;
3645 }
3646 }
3647
3648 if let Some(previous_result) = &result
3649 && previous_result.byte_range().len() < layer_result.byte_range().len()
3650 {
3651 continue;
3652 }
3653 result = Some(layer_result);
3654 }
3655
3656 result
3657 }
3658
3659 /// Find the previous sibling syntax node at the given range.
3660 ///
3661 /// This function locates the syntax node that precedes the node containing
3662 /// the given range. It searches hierarchically by:
3663 /// 1. Finding the node that contains the given range
3664 /// 2. Looking for the previous sibling at the same tree level
3665 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3666 ///
3667 /// Returns `None` if there is no previous sibling at any ancestor level.
3668 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3669 &'a self,
3670 range: Range<T>,
3671 ) -> Option<tree_sitter::Node<'a>> {
3672 let range = range.start.to_offset(self)..range.end.to_offset(self);
3673 let mut result: Option<tree_sitter::Node<'a>> = None;
3674
3675 for layer in self
3676 .syntax
3677 .layers_for_range(range.clone(), &self.text, true)
3678 {
3679 let mut cursor = layer.node().walk();
3680
3681 // Find the node that contains the range
3682 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3683 continue;
3684 }
3685
3686 // Look for the previous sibling, moving up ancestor levels if needed
3687 loop {
3688 if cursor.goto_previous_sibling() {
3689 let layer_result = cursor.node();
3690
3691 if let Some(previous_result) = &result {
3692 if previous_result.byte_range().end < layer_result.byte_range().end {
3693 continue;
3694 }
3695 }
3696 result = Some(layer_result);
3697 break;
3698 }
3699
3700 // No sibling found at this level, try moving up to parent
3701 if !cursor.goto_parent() {
3702 break;
3703 }
3704 }
3705 }
3706
3707 result
3708 }
3709
3710 /// Find the next sibling syntax node at the given range.
3711 ///
3712 /// This function locates the syntax node that follows the node containing
3713 /// the given range. It searches hierarchically by:
3714 /// 1. Finding the node that contains the given range
3715 /// 2. Looking for the next sibling at the same tree level
3716 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3717 ///
3718 /// Returns `None` if there is no next sibling at any ancestor level.
3719 pub fn syntax_next_sibling<'a, T: ToOffset>(
3720 &'a self,
3721 range: Range<T>,
3722 ) -> Option<tree_sitter::Node<'a>> {
3723 let range = range.start.to_offset(self)..range.end.to_offset(self);
3724 let mut result: Option<tree_sitter::Node<'a>> = None;
3725
3726 for layer in self
3727 .syntax
3728 .layers_for_range(range.clone(), &self.text, true)
3729 {
3730 let mut cursor = layer.node().walk();
3731
3732 // Find the node that contains the range
3733 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3734 continue;
3735 }
3736
3737 // Look for the next sibling, moving up ancestor levels if needed
3738 loop {
3739 if cursor.goto_next_sibling() {
3740 let layer_result = cursor.node();
3741
3742 if let Some(previous_result) = &result {
3743 if previous_result.byte_range().start > layer_result.byte_range().start {
3744 continue;
3745 }
3746 }
3747 result = Some(layer_result);
3748 break;
3749 }
3750
3751 // No sibling found at this level, try moving up to parent
3752 if !cursor.goto_parent() {
3753 break;
3754 }
3755 }
3756 }
3757
3758 result
3759 }
3760
3761 /// Returns the root syntax node within the given row
3762 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3763 let start_offset = position.to_offset(self);
3764
3765 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3766
3767 let layer = self
3768 .syntax
3769 .layers_for_range(start_offset..start_offset, &self.text, true)
3770 .next()?;
3771
3772 let mut cursor = layer.node().walk();
3773
3774 // Descend to the first leaf that touches the start of the range.
3775 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3776 if cursor.node().end_byte() == start_offset {
3777 cursor.goto_next_sibling();
3778 }
3779 }
3780
3781 // Ascend to the root node within the same row.
3782 while cursor.goto_parent() {
3783 if cursor.node().start_position().row != row {
3784 break;
3785 }
3786 }
3787
3788 Some(cursor.node())
3789 }
3790
3791 /// Returns the outline for the buffer.
3792 ///
3793 /// This method allows passing an optional [`SyntaxTheme`] to
3794 /// syntax-highlight the returned symbols.
3795 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3796 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3797 }
3798
3799 /// Returns all the symbols that contain the given position.
3800 ///
3801 /// This method allows passing an optional [`SyntaxTheme`] to
3802 /// syntax-highlight the returned symbols.
3803 pub fn symbols_containing<T: ToOffset>(
3804 &self,
3805 position: T,
3806 theme: Option<&SyntaxTheme>,
3807 ) -> Vec<OutlineItem<Anchor>> {
3808 let position = position.to_offset(self);
3809 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3810 let end = self.clip_offset(position + 1, Bias::Right);
3811 let mut items = self.outline_items_containing(start..end, false, theme);
3812 let mut prev_depth = None;
3813 items.retain(|item| {
3814 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3815 prev_depth = Some(item.depth);
3816 result
3817 });
3818 items
3819 }
3820
3821 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3822 let range = range.to_offset(self);
3823 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3824 grammar.outline_config.as_ref().map(|c| &c.query)
3825 });
3826 let configs = matches
3827 .grammars()
3828 .iter()
3829 .map(|g| g.outline_config.as_ref().unwrap())
3830 .collect::<Vec<_>>();
3831
3832 while let Some(mat) = matches.peek() {
3833 let config = &configs[mat.grammar_index];
3834 let containing_item_node = maybe!({
3835 let item_node = mat.captures.iter().find_map(|cap| {
3836 if cap.index == config.item_capture_ix {
3837 Some(cap.node)
3838 } else {
3839 None
3840 }
3841 })?;
3842
3843 let item_byte_range = item_node.byte_range();
3844 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3845 None
3846 } else {
3847 Some(item_node)
3848 }
3849 });
3850
3851 if let Some(item_node) = containing_item_node {
3852 return Some(
3853 Point::from_ts_point(item_node.start_position())
3854 ..Point::from_ts_point(item_node.end_position()),
3855 );
3856 }
3857
3858 matches.advance();
3859 }
3860 None
3861 }
3862
3863 pub fn outline_items_containing<T: ToOffset>(
3864 &self,
3865 range: Range<T>,
3866 include_extra_context: bool,
3867 theme: Option<&SyntaxTheme>,
3868 ) -> Vec<OutlineItem<Anchor>> {
3869 self.outline_items_containing_internal(
3870 range,
3871 include_extra_context,
3872 theme,
3873 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3874 )
3875 }
3876
3877 pub fn outline_items_as_points_containing<T: ToOffset>(
3878 &self,
3879 range: Range<T>,
3880 include_extra_context: bool,
3881 theme: Option<&SyntaxTheme>,
3882 ) -> Vec<OutlineItem<Point>> {
3883 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3884 range
3885 })
3886 }
3887
3888 fn outline_items_containing_internal<T: ToOffset, U>(
3889 &self,
3890 range: Range<T>,
3891 include_extra_context: bool,
3892 theme: Option<&SyntaxTheme>,
3893 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3894 ) -> Vec<OutlineItem<U>> {
3895 let range = range.to_offset(self);
3896 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3897 grammar.outline_config.as_ref().map(|c| &c.query)
3898 });
3899
3900 let mut items = Vec::new();
3901 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3902 while let Some(mat) = matches.peek() {
3903 let config = matches.grammars()[mat.grammar_index]
3904 .outline_config
3905 .as_ref()
3906 .unwrap();
3907 if let Some(item) =
3908 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3909 {
3910 items.push(item);
3911 } else if let Some(capture) = mat
3912 .captures
3913 .iter()
3914 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3915 {
3916 let capture_range = capture.node.start_position()..capture.node.end_position();
3917 let mut capture_row_range =
3918 capture_range.start.row as u32..capture_range.end.row as u32;
3919 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3920 {
3921 capture_row_range.end -= 1;
3922 }
3923 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3924 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3925 last_row_range.end = capture_row_range.end;
3926 } else {
3927 annotation_row_ranges.push(capture_row_range);
3928 }
3929 } else {
3930 annotation_row_ranges.push(capture_row_range);
3931 }
3932 }
3933 matches.advance();
3934 }
3935
3936 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3937
3938 // Assign depths based on containment relationships and convert to anchors.
3939 let mut item_ends_stack = Vec::<Point>::new();
3940 let mut anchor_items = Vec::new();
3941 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3942 for item in items {
3943 while let Some(last_end) = item_ends_stack.last().copied() {
3944 if last_end < item.range.end {
3945 item_ends_stack.pop();
3946 } else {
3947 break;
3948 }
3949 }
3950
3951 let mut annotation_row_range = None;
3952 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3953 let row_preceding_item = item.range.start.row.saturating_sub(1);
3954 if next_annotation_row_range.end < row_preceding_item {
3955 annotation_row_ranges.next();
3956 } else {
3957 if next_annotation_row_range.end == row_preceding_item {
3958 annotation_row_range = Some(next_annotation_row_range.clone());
3959 annotation_row_ranges.next();
3960 }
3961 break;
3962 }
3963 }
3964
3965 anchor_items.push(OutlineItem {
3966 depth: item_ends_stack.len(),
3967 range: range_callback(self, item.range.clone()),
3968 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3969 text: item.text,
3970 highlight_ranges: item.highlight_ranges,
3971 name_ranges: item.name_ranges,
3972 body_range: item.body_range.map(|r| range_callback(self, r)),
3973 annotation_range: annotation_row_range.map(|annotation_range| {
3974 let point_range = Point::new(annotation_range.start, 0)
3975 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3976 range_callback(self, point_range)
3977 }),
3978 });
3979 item_ends_stack.push(item.range.end);
3980 }
3981
3982 anchor_items
3983 }
3984
3985 fn next_outline_item(
3986 &self,
3987 config: &OutlineConfig,
3988 mat: &SyntaxMapMatch,
3989 range: &Range<usize>,
3990 include_extra_context: bool,
3991 theme: Option<&SyntaxTheme>,
3992 ) -> Option<OutlineItem<Point>> {
3993 let item_node = mat.captures.iter().find_map(|cap| {
3994 if cap.index == config.item_capture_ix {
3995 Some(cap.node)
3996 } else {
3997 None
3998 }
3999 })?;
4000
4001 let item_byte_range = item_node.byte_range();
4002 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4003 return None;
4004 }
4005 let item_point_range = Point::from_ts_point(item_node.start_position())
4006 ..Point::from_ts_point(item_node.end_position());
4007
4008 let mut open_point = None;
4009 let mut close_point = None;
4010
4011 let mut buffer_ranges = Vec::new();
4012 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4013 let mut range = node.start_byte()..node.end_byte();
4014 let start = node.start_position();
4015 if node.end_position().row > start.row {
4016 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4017 }
4018
4019 if !range.is_empty() {
4020 buffer_ranges.push((range, node_is_name));
4021 }
4022 };
4023
4024 for capture in mat.captures {
4025 if capture.index == config.name_capture_ix {
4026 add_to_buffer_ranges(capture.node, true);
4027 } else if Some(capture.index) == config.context_capture_ix
4028 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4029 {
4030 add_to_buffer_ranges(capture.node, false);
4031 } else {
4032 if Some(capture.index) == config.open_capture_ix {
4033 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4034 } else if Some(capture.index) == config.close_capture_ix {
4035 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4036 }
4037 }
4038 }
4039
4040 if buffer_ranges.is_empty() {
4041 return None;
4042 }
4043 let source_range_for_text =
4044 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4045
4046 let mut text = String::new();
4047 let mut highlight_ranges = Vec::new();
4048 let mut name_ranges = Vec::new();
4049 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4050 let mut last_buffer_range_end = 0;
4051 for (buffer_range, is_name) in buffer_ranges {
4052 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4053 if space_added {
4054 text.push(' ');
4055 }
4056 let before_append_len = text.len();
4057 let mut offset = buffer_range.start;
4058 chunks.seek(buffer_range.clone());
4059 for mut chunk in chunks.by_ref() {
4060 if chunk.text.len() > buffer_range.end - offset {
4061 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4062 offset = buffer_range.end;
4063 } else {
4064 offset += chunk.text.len();
4065 }
4066 let style = chunk
4067 .syntax_highlight_id
4068 .zip(theme)
4069 .and_then(|(highlight, theme)| highlight.style(theme));
4070 if let Some(style) = style {
4071 let start = text.len();
4072 let end = start + chunk.text.len();
4073 highlight_ranges.push((start..end, style));
4074 }
4075 text.push_str(chunk.text);
4076 if offset >= buffer_range.end {
4077 break;
4078 }
4079 }
4080 if is_name {
4081 let after_append_len = text.len();
4082 let start = if space_added && !name_ranges.is_empty() {
4083 before_append_len - 1
4084 } else {
4085 before_append_len
4086 };
4087 name_ranges.push(start..after_append_len);
4088 }
4089 last_buffer_range_end = buffer_range.end;
4090 }
4091
4092 Some(OutlineItem {
4093 depth: 0, // We'll calculate the depth later
4094 range: item_point_range,
4095 source_range_for_text: source_range_for_text.to_point(self),
4096 text,
4097 highlight_ranges,
4098 name_ranges,
4099 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4100 annotation_range: None,
4101 })
4102 }
4103
4104 pub fn function_body_fold_ranges<T: ToOffset>(
4105 &self,
4106 within: Range<T>,
4107 ) -> impl Iterator<Item = Range<usize>> + '_ {
4108 self.text_object_ranges(within, TreeSitterOptions::default())
4109 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4110 }
4111
4112 /// For each grammar in the language, runs the provided
4113 /// [`tree_sitter::Query`] against the given range.
4114 pub fn matches(
4115 &self,
4116 range: Range<usize>,
4117 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4118 ) -> SyntaxMapMatches<'_> {
4119 self.syntax.matches(range, self, query)
4120 }
4121
4122 pub fn all_bracket_ranges(
4123 &self,
4124 range: Range<usize>,
4125 ) -> impl Iterator<Item = BracketMatch> + '_ {
4126 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4127 grammar.brackets_config.as_ref().map(|c| &c.query)
4128 });
4129 let configs = matches
4130 .grammars()
4131 .iter()
4132 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4133 .collect::<Vec<_>>();
4134
4135 iter::from_fn(move || {
4136 while let Some(mat) = matches.peek() {
4137 let mut open = None;
4138 let mut close = None;
4139 let config = &configs[mat.grammar_index];
4140 let pattern = &config.patterns[mat.pattern_index];
4141 for capture in mat.captures {
4142 if capture.index == config.open_capture_ix {
4143 open = Some(capture.node.byte_range());
4144 } else if capture.index == config.close_capture_ix {
4145 close = Some(capture.node.byte_range());
4146 }
4147 }
4148
4149 matches.advance();
4150
4151 let Some((open_range, close_range)) = open.zip(close) else {
4152 continue;
4153 };
4154
4155 let bracket_range = open_range.start..=close_range.end;
4156 if !bracket_range.overlaps(&range) {
4157 continue;
4158 }
4159
4160 return Some(BracketMatch {
4161 open_range,
4162 close_range,
4163 newline_only: pattern.newline_only,
4164 });
4165 }
4166 None
4167 })
4168 }
4169
4170 /// Returns bracket range pairs overlapping or adjacent to `range`
4171 pub fn bracket_ranges<T: ToOffset>(
4172 &self,
4173 range: Range<T>,
4174 ) -> impl Iterator<Item = BracketMatch> + '_ {
4175 // Find bracket pairs that *inclusively* contain the given range.
4176 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4177 self.all_bracket_ranges(range)
4178 .filter(|pair| !pair.newline_only)
4179 }
4180
4181 pub fn debug_variables_query<T: ToOffset>(
4182 &self,
4183 range: Range<T>,
4184 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4185 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4186
4187 let mut matches = self.syntax.matches_with_options(
4188 range.clone(),
4189 &self.text,
4190 TreeSitterOptions::default(),
4191 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4192 );
4193
4194 let configs = matches
4195 .grammars()
4196 .iter()
4197 .map(|grammar| grammar.debug_variables_config.as_ref())
4198 .collect::<Vec<_>>();
4199
4200 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4201
4202 iter::from_fn(move || {
4203 loop {
4204 while let Some(capture) = captures.pop() {
4205 if capture.0.overlaps(&range) {
4206 return Some(capture);
4207 }
4208 }
4209
4210 let mat = matches.peek()?;
4211
4212 let Some(config) = configs[mat.grammar_index].as_ref() else {
4213 matches.advance();
4214 continue;
4215 };
4216
4217 for capture in mat.captures {
4218 let Some(ix) = config
4219 .objects_by_capture_ix
4220 .binary_search_by_key(&capture.index, |e| e.0)
4221 .ok()
4222 else {
4223 continue;
4224 };
4225 let text_object = config.objects_by_capture_ix[ix].1;
4226 let byte_range = capture.node.byte_range();
4227
4228 let mut found = false;
4229 for (range, existing) in captures.iter_mut() {
4230 if existing == &text_object {
4231 range.start = range.start.min(byte_range.start);
4232 range.end = range.end.max(byte_range.end);
4233 found = true;
4234 break;
4235 }
4236 }
4237
4238 if !found {
4239 captures.push((byte_range, text_object));
4240 }
4241 }
4242
4243 matches.advance();
4244 }
4245 })
4246 }
4247
4248 pub fn text_object_ranges<T: ToOffset>(
4249 &self,
4250 range: Range<T>,
4251 options: TreeSitterOptions,
4252 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4253 let range =
4254 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4255
4256 let mut matches =
4257 self.syntax
4258 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4259 grammar.text_object_config.as_ref().map(|c| &c.query)
4260 });
4261
4262 let configs = matches
4263 .grammars()
4264 .iter()
4265 .map(|grammar| grammar.text_object_config.as_ref())
4266 .collect::<Vec<_>>();
4267
4268 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4269
4270 iter::from_fn(move || {
4271 loop {
4272 while let Some(capture) = captures.pop() {
4273 if capture.0.overlaps(&range) {
4274 return Some(capture);
4275 }
4276 }
4277
4278 let mat = matches.peek()?;
4279
4280 let Some(config) = configs[mat.grammar_index].as_ref() else {
4281 matches.advance();
4282 continue;
4283 };
4284
4285 for capture in mat.captures {
4286 let Some(ix) = config
4287 .text_objects_by_capture_ix
4288 .binary_search_by_key(&capture.index, |e| e.0)
4289 .ok()
4290 else {
4291 continue;
4292 };
4293 let text_object = config.text_objects_by_capture_ix[ix].1;
4294 let byte_range = capture.node.byte_range();
4295
4296 let mut found = false;
4297 for (range, existing) in captures.iter_mut() {
4298 if existing == &text_object {
4299 range.start = range.start.min(byte_range.start);
4300 range.end = range.end.max(byte_range.end);
4301 found = true;
4302 break;
4303 }
4304 }
4305
4306 if !found {
4307 captures.push((byte_range, text_object));
4308 }
4309 }
4310
4311 matches.advance();
4312 }
4313 })
4314 }
4315
4316 /// Returns enclosing bracket ranges containing the given range
4317 pub fn enclosing_bracket_ranges<T: ToOffset>(
4318 &self,
4319 range: Range<T>,
4320 ) -> impl Iterator<Item = BracketMatch> + '_ {
4321 let range = range.start.to_offset(self)..range.end.to_offset(self);
4322
4323 self.bracket_ranges(range.clone()).filter(move |pair| {
4324 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4325 })
4326 }
4327
4328 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4329 ///
4330 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4331 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4332 &self,
4333 range: Range<T>,
4334 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4335 ) -> Option<(Range<usize>, Range<usize>)> {
4336 let range = range.start.to_offset(self)..range.end.to_offset(self);
4337
4338 // Get the ranges of the innermost pair of brackets.
4339 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4340
4341 for pair in self.enclosing_bracket_ranges(range) {
4342 if let Some(range_filter) = range_filter
4343 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4344 {
4345 continue;
4346 }
4347
4348 let len = pair.close_range.end - pair.open_range.start;
4349
4350 if let Some((existing_open, existing_close)) = &result {
4351 let existing_len = existing_close.end - existing_open.start;
4352 if len > existing_len {
4353 continue;
4354 }
4355 }
4356
4357 result = Some((pair.open_range, pair.close_range));
4358 }
4359
4360 result
4361 }
4362
4363 /// Returns anchor ranges for any matches of the redaction query.
4364 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4365 /// will be run on the relevant section of the buffer.
4366 pub fn redacted_ranges<T: ToOffset>(
4367 &self,
4368 range: Range<T>,
4369 ) -> impl Iterator<Item = Range<usize>> + '_ {
4370 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4371 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4372 grammar
4373 .redactions_config
4374 .as_ref()
4375 .map(|config| &config.query)
4376 });
4377
4378 let configs = syntax_matches
4379 .grammars()
4380 .iter()
4381 .map(|grammar| grammar.redactions_config.as_ref())
4382 .collect::<Vec<_>>();
4383
4384 iter::from_fn(move || {
4385 let redacted_range = syntax_matches
4386 .peek()
4387 .and_then(|mat| {
4388 configs[mat.grammar_index].and_then(|config| {
4389 mat.captures
4390 .iter()
4391 .find(|capture| capture.index == config.redaction_capture_ix)
4392 })
4393 })
4394 .map(|mat| mat.node.byte_range());
4395 syntax_matches.advance();
4396 redacted_range
4397 })
4398 }
4399
4400 pub fn injections_intersecting_range<T: ToOffset>(
4401 &self,
4402 range: Range<T>,
4403 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4404 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4405
4406 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4407 grammar
4408 .injection_config
4409 .as_ref()
4410 .map(|config| &config.query)
4411 });
4412
4413 let configs = syntax_matches
4414 .grammars()
4415 .iter()
4416 .map(|grammar| grammar.injection_config.as_ref())
4417 .collect::<Vec<_>>();
4418
4419 iter::from_fn(move || {
4420 let ranges = syntax_matches.peek().and_then(|mat| {
4421 let config = &configs[mat.grammar_index]?;
4422 let content_capture_range = mat.captures.iter().find_map(|capture| {
4423 if capture.index == config.content_capture_ix {
4424 Some(capture.node.byte_range())
4425 } else {
4426 None
4427 }
4428 })?;
4429 let language = self.language_at(content_capture_range.start)?;
4430 Some((content_capture_range, language))
4431 });
4432 syntax_matches.advance();
4433 ranges
4434 })
4435 }
4436
4437 pub fn runnable_ranges(
4438 &self,
4439 offset_range: Range<usize>,
4440 ) -> impl Iterator<Item = RunnableRange> + '_ {
4441 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4442 grammar.runnable_config.as_ref().map(|config| &config.query)
4443 });
4444
4445 let test_configs = syntax_matches
4446 .grammars()
4447 .iter()
4448 .map(|grammar| grammar.runnable_config.as_ref())
4449 .collect::<Vec<_>>();
4450
4451 iter::from_fn(move || {
4452 loop {
4453 let mat = syntax_matches.peek()?;
4454
4455 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4456 let mut run_range = None;
4457 let full_range = mat.captures.iter().fold(
4458 Range {
4459 start: usize::MAX,
4460 end: 0,
4461 },
4462 |mut acc, next| {
4463 let byte_range = next.node.byte_range();
4464 if acc.start > byte_range.start {
4465 acc.start = byte_range.start;
4466 }
4467 if acc.end < byte_range.end {
4468 acc.end = byte_range.end;
4469 }
4470 acc
4471 },
4472 );
4473 if full_range.start > full_range.end {
4474 // We did not find a full spanning range of this match.
4475 return None;
4476 }
4477 let extra_captures: SmallVec<[_; 1]> =
4478 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4479 test_configs
4480 .extra_captures
4481 .get(capture.index as usize)
4482 .cloned()
4483 .and_then(|tag_name| match tag_name {
4484 RunnableCapture::Named(name) => {
4485 Some((capture.node.byte_range(), name))
4486 }
4487 RunnableCapture::Run => {
4488 let _ = run_range.insert(capture.node.byte_range());
4489 None
4490 }
4491 })
4492 }));
4493 let run_range = run_range?;
4494 let tags = test_configs
4495 .query
4496 .property_settings(mat.pattern_index)
4497 .iter()
4498 .filter_map(|property| {
4499 if *property.key == *"tag" {
4500 property
4501 .value
4502 .as_ref()
4503 .map(|value| RunnableTag(value.to_string().into()))
4504 } else {
4505 None
4506 }
4507 })
4508 .collect();
4509 let extra_captures = extra_captures
4510 .into_iter()
4511 .map(|(range, name)| {
4512 (
4513 name.to_string(),
4514 self.text_for_range(range).collect::<String>(),
4515 )
4516 })
4517 .collect();
4518 // All tags should have the same range.
4519 Some(RunnableRange {
4520 run_range,
4521 full_range,
4522 runnable: Runnable {
4523 tags,
4524 language: mat.language,
4525 buffer: self.remote_id(),
4526 },
4527 extra_captures,
4528 buffer_id: self.remote_id(),
4529 })
4530 });
4531
4532 syntax_matches.advance();
4533 if test_range.is_some() {
4534 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4535 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4536 return test_range;
4537 }
4538 }
4539 })
4540 }
4541
4542 /// Returns selections for remote peers intersecting the given range.
4543 #[allow(clippy::type_complexity)]
4544 pub fn selections_in_range(
4545 &self,
4546 range: Range<Anchor>,
4547 include_local: bool,
4548 ) -> impl Iterator<
4549 Item = (
4550 ReplicaId,
4551 bool,
4552 CursorShape,
4553 impl Iterator<Item = &Selection<Anchor>> + '_,
4554 ),
4555 > + '_ {
4556 self.remote_selections
4557 .iter()
4558 .filter(move |(replica_id, set)| {
4559 (include_local || **replica_id != self.text.replica_id())
4560 && !set.selections.is_empty()
4561 })
4562 .map(move |(replica_id, set)| {
4563 let start_ix = match set.selections.binary_search_by(|probe| {
4564 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4565 }) {
4566 Ok(ix) | Err(ix) => ix,
4567 };
4568 let end_ix = match set.selections.binary_search_by(|probe| {
4569 probe.start.cmp(&range.end, self).then(Ordering::Less)
4570 }) {
4571 Ok(ix) | Err(ix) => ix,
4572 };
4573
4574 (
4575 *replica_id,
4576 set.line_mode,
4577 set.cursor_shape,
4578 set.selections[start_ix..end_ix].iter(),
4579 )
4580 })
4581 }
4582
4583 /// Returns if the buffer contains any diagnostics.
4584 pub fn has_diagnostics(&self) -> bool {
4585 !self.diagnostics.is_empty()
4586 }
4587
4588 /// Returns all the diagnostics intersecting the given range.
4589 pub fn diagnostics_in_range<'a, T, O>(
4590 &'a self,
4591 search_range: Range<T>,
4592 reversed: bool,
4593 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4594 where
4595 T: 'a + Clone + ToOffset,
4596 O: 'a + FromAnchor,
4597 {
4598 let mut iterators: Vec<_> = self
4599 .diagnostics
4600 .iter()
4601 .map(|(_, collection)| {
4602 collection
4603 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4604 .peekable()
4605 })
4606 .collect();
4607
4608 std::iter::from_fn(move || {
4609 let (next_ix, _) = iterators
4610 .iter_mut()
4611 .enumerate()
4612 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4613 .min_by(|(_, a), (_, b)| {
4614 let cmp = a
4615 .range
4616 .start
4617 .cmp(&b.range.start, self)
4618 // when range is equal, sort by diagnostic severity
4619 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4620 // and stabilize order with group_id
4621 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4622 if reversed { cmp.reverse() } else { cmp }
4623 })?;
4624 iterators[next_ix]
4625 .next()
4626 .map(
4627 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4628 diagnostic,
4629 range: FromAnchor::from_anchor(&range.start, self)
4630 ..FromAnchor::from_anchor(&range.end, self),
4631 },
4632 )
4633 })
4634 }
4635
4636 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4637 /// should be used instead.
4638 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4639 &self.diagnostics
4640 }
4641
4642 /// Returns all the diagnostic groups associated with the given
4643 /// language server ID. If no language server ID is provided,
4644 /// all diagnostics groups are returned.
4645 pub fn diagnostic_groups(
4646 &self,
4647 language_server_id: Option<LanguageServerId>,
4648 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4649 let mut groups = Vec::new();
4650
4651 if let Some(language_server_id) = language_server_id {
4652 if let Ok(ix) = self
4653 .diagnostics
4654 .binary_search_by_key(&language_server_id, |e| e.0)
4655 {
4656 self.diagnostics[ix]
4657 .1
4658 .groups(language_server_id, &mut groups, self);
4659 }
4660 } else {
4661 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4662 diagnostics.groups(*language_server_id, &mut groups, self);
4663 }
4664 }
4665
4666 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4667 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4668 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4669 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4670 });
4671
4672 groups
4673 }
4674
4675 /// Returns an iterator over the diagnostics for the given group.
4676 pub fn diagnostic_group<O>(
4677 &self,
4678 group_id: usize,
4679 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4680 where
4681 O: FromAnchor + 'static,
4682 {
4683 self.diagnostics
4684 .iter()
4685 .flat_map(move |(_, set)| set.group(group_id, self))
4686 }
4687
4688 /// An integer version number that accounts for all updates besides
4689 /// the buffer's text itself (which is versioned via a version vector).
4690 pub fn non_text_state_update_count(&self) -> usize {
4691 self.non_text_state_update_count
4692 }
4693
4694 /// An integer version that changes when the buffer's syntax changes.
4695 pub fn syntax_update_count(&self) -> usize {
4696 self.syntax.update_count()
4697 }
4698
4699 /// Returns a snapshot of underlying file.
4700 pub fn file(&self) -> Option<&Arc<dyn File>> {
4701 self.file.as_ref()
4702 }
4703
4704 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4705 if let Some(file) = self.file() {
4706 if file.path().file_name().is_none() || include_root {
4707 Some(file.full_path(cx).to_string_lossy().into_owned())
4708 } else {
4709 Some(file.path().display(file.path_style(cx)).to_string())
4710 }
4711 } else {
4712 None
4713 }
4714 }
4715
4716 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4717 let query_str = query.fuzzy_contents;
4718 if query_str.is_some_and(|query| query.is_empty()) {
4719 return BTreeMap::default();
4720 }
4721
4722 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4723 language,
4724 override_id: None,
4725 }));
4726
4727 let mut query_ix = 0;
4728 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4729 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4730
4731 let mut words = BTreeMap::default();
4732 let mut current_word_start_ix = None;
4733 let mut chunk_ix = query.range.start;
4734 for chunk in self.chunks(query.range, false) {
4735 for (i, c) in chunk.text.char_indices() {
4736 let ix = chunk_ix + i;
4737 if classifier.is_word(c) {
4738 if current_word_start_ix.is_none() {
4739 current_word_start_ix = Some(ix);
4740 }
4741
4742 if let Some(query_chars) = &query_chars
4743 && query_ix < query_len
4744 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4745 {
4746 query_ix += 1;
4747 }
4748 continue;
4749 } else if let Some(word_start) = current_word_start_ix.take()
4750 && query_ix == query_len
4751 {
4752 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4753 let mut word_text = self.text_for_range(word_start..ix).peekable();
4754 let first_char = word_text
4755 .peek()
4756 .and_then(|first_chunk| first_chunk.chars().next());
4757 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4758 if !query.skip_digits
4759 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4760 {
4761 words.insert(word_text.collect(), word_range);
4762 }
4763 }
4764 query_ix = 0;
4765 }
4766 chunk_ix += chunk.text.len();
4767 }
4768
4769 words
4770 }
4771}
4772
4773pub struct WordsQuery<'a> {
4774 /// Only returns words with all chars from the fuzzy string in them.
4775 pub fuzzy_contents: Option<&'a str>,
4776 /// Skips words that start with a digit.
4777 pub skip_digits: bool,
4778 /// Buffer offset range, to look for words.
4779 pub range: Range<usize>,
4780}
4781
4782fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4783 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4784}
4785
4786fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4787 let mut result = IndentSize::spaces(0);
4788 for c in text {
4789 let kind = match c {
4790 ' ' => IndentKind::Space,
4791 '\t' => IndentKind::Tab,
4792 _ => break,
4793 };
4794 if result.len == 0 {
4795 result.kind = kind;
4796 }
4797 result.len += 1;
4798 }
4799 result
4800}
4801
4802impl Clone for BufferSnapshot {
4803 fn clone(&self) -> Self {
4804 Self {
4805 text: self.text.clone(),
4806 syntax: self.syntax.clone(),
4807 file: self.file.clone(),
4808 remote_selections: self.remote_selections.clone(),
4809 diagnostics: self.diagnostics.clone(),
4810 language: self.language.clone(),
4811 non_text_state_update_count: self.non_text_state_update_count,
4812 }
4813 }
4814}
4815
4816impl Deref for BufferSnapshot {
4817 type Target = text::BufferSnapshot;
4818
4819 fn deref(&self) -> &Self::Target {
4820 &self.text
4821 }
4822}
4823
4824unsafe impl Send for BufferChunks<'_> {}
4825
4826impl<'a> BufferChunks<'a> {
4827 pub(crate) fn new(
4828 text: &'a Rope,
4829 range: Range<usize>,
4830 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4831 diagnostics: bool,
4832 buffer_snapshot: Option<&'a BufferSnapshot>,
4833 ) -> Self {
4834 let mut highlights = None;
4835 if let Some((captures, highlight_maps)) = syntax {
4836 highlights = Some(BufferChunkHighlights {
4837 captures,
4838 next_capture: None,
4839 stack: Default::default(),
4840 highlight_maps,
4841 })
4842 }
4843
4844 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4845 let chunks = text.chunks_in_range(range.clone());
4846
4847 let mut this = BufferChunks {
4848 range,
4849 buffer_snapshot,
4850 chunks,
4851 diagnostic_endpoints,
4852 error_depth: 0,
4853 warning_depth: 0,
4854 information_depth: 0,
4855 hint_depth: 0,
4856 unnecessary_depth: 0,
4857 underline: true,
4858 highlights,
4859 };
4860 this.initialize_diagnostic_endpoints();
4861 this
4862 }
4863
4864 /// Seeks to the given byte offset in the buffer.
4865 pub fn seek(&mut self, range: Range<usize>) {
4866 let old_range = std::mem::replace(&mut self.range, range.clone());
4867 self.chunks.set_range(self.range.clone());
4868 if let Some(highlights) = self.highlights.as_mut() {
4869 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4870 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4871 highlights
4872 .stack
4873 .retain(|(end_offset, _)| *end_offset > range.start);
4874 if let Some(capture) = &highlights.next_capture
4875 && range.start >= capture.node.start_byte()
4876 {
4877 let next_capture_end = capture.node.end_byte();
4878 if range.start < next_capture_end {
4879 highlights.stack.push((
4880 next_capture_end,
4881 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4882 ));
4883 }
4884 highlights.next_capture.take();
4885 }
4886 } else if let Some(snapshot) = self.buffer_snapshot {
4887 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4888 *highlights = BufferChunkHighlights {
4889 captures,
4890 next_capture: None,
4891 stack: Default::default(),
4892 highlight_maps,
4893 };
4894 } else {
4895 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4896 // Seeking such BufferChunks is not supported.
4897 debug_assert!(
4898 false,
4899 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4900 );
4901 }
4902
4903 highlights.captures.set_byte_range(self.range.clone());
4904 self.initialize_diagnostic_endpoints();
4905 }
4906 }
4907
4908 fn initialize_diagnostic_endpoints(&mut self) {
4909 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4910 && let Some(buffer) = self.buffer_snapshot
4911 {
4912 let mut diagnostic_endpoints = Vec::new();
4913 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4914 diagnostic_endpoints.push(DiagnosticEndpoint {
4915 offset: entry.range.start,
4916 is_start: true,
4917 severity: entry.diagnostic.severity,
4918 is_unnecessary: entry.diagnostic.is_unnecessary,
4919 underline: entry.diagnostic.underline,
4920 });
4921 diagnostic_endpoints.push(DiagnosticEndpoint {
4922 offset: entry.range.end,
4923 is_start: false,
4924 severity: entry.diagnostic.severity,
4925 is_unnecessary: entry.diagnostic.is_unnecessary,
4926 underline: entry.diagnostic.underline,
4927 });
4928 }
4929 diagnostic_endpoints
4930 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4931 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4932 self.hint_depth = 0;
4933 self.error_depth = 0;
4934 self.warning_depth = 0;
4935 self.information_depth = 0;
4936 }
4937 }
4938
4939 /// The current byte offset in the buffer.
4940 pub fn offset(&self) -> usize {
4941 self.range.start
4942 }
4943
4944 pub fn range(&self) -> Range<usize> {
4945 self.range.clone()
4946 }
4947
4948 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4949 let depth = match endpoint.severity {
4950 DiagnosticSeverity::ERROR => &mut self.error_depth,
4951 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4952 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4953 DiagnosticSeverity::HINT => &mut self.hint_depth,
4954 _ => return,
4955 };
4956 if endpoint.is_start {
4957 *depth += 1;
4958 } else {
4959 *depth -= 1;
4960 }
4961
4962 if endpoint.is_unnecessary {
4963 if endpoint.is_start {
4964 self.unnecessary_depth += 1;
4965 } else {
4966 self.unnecessary_depth -= 1;
4967 }
4968 }
4969 }
4970
4971 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4972 if self.error_depth > 0 {
4973 Some(DiagnosticSeverity::ERROR)
4974 } else if self.warning_depth > 0 {
4975 Some(DiagnosticSeverity::WARNING)
4976 } else if self.information_depth > 0 {
4977 Some(DiagnosticSeverity::INFORMATION)
4978 } else if self.hint_depth > 0 {
4979 Some(DiagnosticSeverity::HINT)
4980 } else {
4981 None
4982 }
4983 }
4984
4985 fn current_code_is_unnecessary(&self) -> bool {
4986 self.unnecessary_depth > 0
4987 }
4988}
4989
4990impl<'a> Iterator for BufferChunks<'a> {
4991 type Item = Chunk<'a>;
4992
4993 fn next(&mut self) -> Option<Self::Item> {
4994 let mut next_capture_start = usize::MAX;
4995 let mut next_diagnostic_endpoint = usize::MAX;
4996
4997 if let Some(highlights) = self.highlights.as_mut() {
4998 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4999 if *parent_capture_end <= self.range.start {
5000 highlights.stack.pop();
5001 } else {
5002 break;
5003 }
5004 }
5005
5006 if highlights.next_capture.is_none() {
5007 highlights.next_capture = highlights.captures.next();
5008 }
5009
5010 while let Some(capture) = highlights.next_capture.as_ref() {
5011 if self.range.start < capture.node.start_byte() {
5012 next_capture_start = capture.node.start_byte();
5013 break;
5014 } else {
5015 let highlight_id =
5016 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5017 highlights
5018 .stack
5019 .push((capture.node.end_byte(), highlight_id));
5020 highlights.next_capture = highlights.captures.next();
5021 }
5022 }
5023 }
5024
5025 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5026 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5027 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5028 if endpoint.offset <= self.range.start {
5029 self.update_diagnostic_depths(endpoint);
5030 diagnostic_endpoints.next();
5031 self.underline = endpoint.underline;
5032 } else {
5033 next_diagnostic_endpoint = endpoint.offset;
5034 break;
5035 }
5036 }
5037 }
5038 self.diagnostic_endpoints = diagnostic_endpoints;
5039
5040 if let Some(ChunkBitmaps {
5041 text: chunk,
5042 chars: chars_map,
5043 tabs,
5044 }) = self.chunks.peek_with_bitmaps()
5045 {
5046 let chunk_start = self.range.start;
5047 let mut chunk_end = (self.chunks.offset() + chunk.len())
5048 .min(next_capture_start)
5049 .min(next_diagnostic_endpoint);
5050 let mut highlight_id = None;
5051 if let Some(highlights) = self.highlights.as_ref()
5052 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5053 {
5054 chunk_end = chunk_end.min(*parent_capture_end);
5055 highlight_id = Some(*parent_highlight_id);
5056 }
5057 let bit_start = chunk_start - self.chunks.offset();
5058 let bit_end = chunk_end - self.chunks.offset();
5059
5060 let slice = &chunk[bit_start..bit_end];
5061
5062 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5063 let tabs = (tabs >> bit_start) & mask;
5064 let chars = (chars_map >> bit_start) & mask;
5065
5066 self.range.start = chunk_end;
5067 if self.range.start == self.chunks.offset() + chunk.len() {
5068 self.chunks.next().unwrap();
5069 }
5070
5071 Some(Chunk {
5072 text: slice,
5073 syntax_highlight_id: highlight_id,
5074 underline: self.underline,
5075 diagnostic_severity: self.current_diagnostic_severity(),
5076 is_unnecessary: self.current_code_is_unnecessary(),
5077 tabs,
5078 chars,
5079 ..Chunk::default()
5080 })
5081 } else {
5082 None
5083 }
5084 }
5085}
5086
5087impl operation_queue::Operation for Operation {
5088 fn lamport_timestamp(&self) -> clock::Lamport {
5089 match self {
5090 Operation::Buffer(_) => {
5091 unreachable!("buffer operations should never be deferred at this layer")
5092 }
5093 Operation::UpdateDiagnostics {
5094 lamport_timestamp, ..
5095 }
5096 | Operation::UpdateSelections {
5097 lamport_timestamp, ..
5098 }
5099 | Operation::UpdateCompletionTriggers {
5100 lamport_timestamp, ..
5101 }
5102 | Operation::UpdateLineEnding {
5103 lamport_timestamp, ..
5104 } => *lamport_timestamp,
5105 }
5106 }
5107}
5108
5109impl Default for Diagnostic {
5110 fn default() -> Self {
5111 Self {
5112 source: Default::default(),
5113 source_kind: DiagnosticSourceKind::Other,
5114 code: None,
5115 code_description: None,
5116 severity: DiagnosticSeverity::ERROR,
5117 message: Default::default(),
5118 markdown: None,
5119 group_id: 0,
5120 is_primary: false,
5121 is_disk_based: false,
5122 is_unnecessary: false,
5123 underline: true,
5124 data: None,
5125 }
5126 }
5127}
5128
5129impl IndentSize {
5130 /// Returns an [`IndentSize`] representing the given spaces.
5131 pub fn spaces(len: u32) -> Self {
5132 Self {
5133 len,
5134 kind: IndentKind::Space,
5135 }
5136 }
5137
5138 /// Returns an [`IndentSize`] representing a tab.
5139 pub fn tab() -> Self {
5140 Self {
5141 len: 1,
5142 kind: IndentKind::Tab,
5143 }
5144 }
5145
5146 /// An iterator over the characters represented by this [`IndentSize`].
5147 pub fn chars(&self) -> impl Iterator<Item = char> {
5148 iter::repeat(self.char()).take(self.len as usize)
5149 }
5150
5151 /// The character representation of this [`IndentSize`].
5152 pub fn char(&self) -> char {
5153 match self.kind {
5154 IndentKind::Space => ' ',
5155 IndentKind::Tab => '\t',
5156 }
5157 }
5158
5159 /// Consumes the current [`IndentSize`] and returns a new one that has
5160 /// been shrunk or enlarged by the given size along the given direction.
5161 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5162 match direction {
5163 Ordering::Less => {
5164 if self.kind == size.kind && self.len >= size.len {
5165 self.len -= size.len;
5166 }
5167 }
5168 Ordering::Equal => {}
5169 Ordering::Greater => {
5170 if self.len == 0 {
5171 self = size;
5172 } else if self.kind == size.kind {
5173 self.len += size.len;
5174 }
5175 }
5176 }
5177 self
5178 }
5179
5180 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5181 match self.kind {
5182 IndentKind::Space => self.len as usize,
5183 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5184 }
5185 }
5186}
5187
5188#[cfg(any(test, feature = "test-support"))]
5189pub struct TestFile {
5190 pub path: Arc<RelPath>,
5191 pub root_name: String,
5192 pub local_root: Option<PathBuf>,
5193}
5194
5195#[cfg(any(test, feature = "test-support"))]
5196impl File for TestFile {
5197 fn path(&self) -> &Arc<RelPath> {
5198 &self.path
5199 }
5200
5201 fn full_path(&self, _: &gpui::App) -> PathBuf {
5202 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5203 }
5204
5205 fn as_local(&self) -> Option<&dyn LocalFile> {
5206 if self.local_root.is_some() {
5207 Some(self)
5208 } else {
5209 None
5210 }
5211 }
5212
5213 fn disk_state(&self) -> DiskState {
5214 unimplemented!()
5215 }
5216
5217 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5218 self.path().file_name().unwrap_or(self.root_name.as_ref())
5219 }
5220
5221 fn worktree_id(&self, _: &App) -> WorktreeId {
5222 WorktreeId::from_usize(0)
5223 }
5224
5225 fn to_proto(&self, _: &App) -> rpc::proto::File {
5226 unimplemented!()
5227 }
5228
5229 fn is_private(&self) -> bool {
5230 false
5231 }
5232
5233 fn path_style(&self, _cx: &App) -> PathStyle {
5234 PathStyle::local()
5235 }
5236}
5237
5238#[cfg(any(test, feature = "test-support"))]
5239impl LocalFile for TestFile {
5240 fn abs_path(&self, _cx: &App) -> PathBuf {
5241 PathBuf::from(self.local_root.as_ref().unwrap())
5242 .join(&self.root_name)
5243 .join(self.path.as_std_path())
5244 }
5245
5246 fn load(&self, _cx: &App) -> Task<Result<String>> {
5247 unimplemented!()
5248 }
5249
5250 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5251 unimplemented!()
5252 }
5253}
5254
5255pub(crate) fn contiguous_ranges(
5256 values: impl Iterator<Item = u32>,
5257 max_len: usize,
5258) -> impl Iterator<Item = Range<u32>> {
5259 let mut values = values;
5260 let mut current_range: Option<Range<u32>> = None;
5261 std::iter::from_fn(move || {
5262 loop {
5263 if let Some(value) = values.next() {
5264 if let Some(range) = &mut current_range
5265 && value == range.end
5266 && range.len() < max_len
5267 {
5268 range.end += 1;
5269 continue;
5270 }
5271
5272 let prev_range = current_range.clone();
5273 current_range = Some(value..(value + 1));
5274 if prev_range.is_some() {
5275 return prev_range;
5276 }
5277 } else {
5278 return current_range.take();
5279 }
5280 }
5281 })
5282}
5283
5284#[derive(Default, Debug)]
5285pub struct CharClassifier {
5286 scope: Option<LanguageScope>,
5287 scope_context: Option<CharScopeContext>,
5288 ignore_punctuation: bool,
5289}
5290
5291impl CharClassifier {
5292 pub fn new(scope: Option<LanguageScope>) -> Self {
5293 Self {
5294 scope,
5295 scope_context: None,
5296 ignore_punctuation: false,
5297 }
5298 }
5299
5300 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5301 Self {
5302 scope_context,
5303 ..self
5304 }
5305 }
5306
5307 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5308 Self {
5309 ignore_punctuation,
5310 ..self
5311 }
5312 }
5313
5314 pub fn is_whitespace(&self, c: char) -> bool {
5315 self.kind(c) == CharKind::Whitespace
5316 }
5317
5318 pub fn is_word(&self, c: char) -> bool {
5319 self.kind(c) == CharKind::Word
5320 }
5321
5322 pub fn is_punctuation(&self, c: char) -> bool {
5323 self.kind(c) == CharKind::Punctuation
5324 }
5325
5326 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5327 if c.is_alphanumeric() || c == '_' {
5328 return CharKind::Word;
5329 }
5330
5331 if let Some(scope) = &self.scope {
5332 let characters = match self.scope_context {
5333 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5334 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5335 None => scope.word_characters(),
5336 };
5337 if let Some(characters) = characters
5338 && characters.contains(&c)
5339 {
5340 return CharKind::Word;
5341 }
5342 }
5343
5344 if c.is_whitespace() {
5345 return CharKind::Whitespace;
5346 }
5347
5348 if ignore_punctuation {
5349 CharKind::Word
5350 } else {
5351 CharKind::Punctuation
5352 }
5353 }
5354
5355 pub fn kind(&self, c: char) -> CharKind {
5356 self.kind_with(c, self.ignore_punctuation)
5357 }
5358}
5359
5360/// Find all of the ranges of whitespace that occur at the ends of lines
5361/// in the given rope.
5362///
5363/// This could also be done with a regex search, but this implementation
5364/// avoids copying text.
5365pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5366 let mut ranges = Vec::new();
5367
5368 let mut offset = 0;
5369 let mut prev_chunk_trailing_whitespace_range = 0..0;
5370 for chunk in rope.chunks() {
5371 let mut prev_line_trailing_whitespace_range = 0..0;
5372 for (i, line) in chunk.split('\n').enumerate() {
5373 let line_end_offset = offset + line.len();
5374 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5375 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5376
5377 if i == 0 && trimmed_line_len == 0 {
5378 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5379 }
5380 if !prev_line_trailing_whitespace_range.is_empty() {
5381 ranges.push(prev_line_trailing_whitespace_range);
5382 }
5383
5384 offset = line_end_offset + 1;
5385 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5386 }
5387
5388 offset -= 1;
5389 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5390 }
5391
5392 if !prev_chunk_trailing_whitespace_range.is_empty() {
5393 ranges.push(prev_chunk_trailing_whitespace_range);
5394 }
5395
5396 ranges
5397}