1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, String)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
751
752 let unchanged_range_in_preview_snapshot =
753 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
754 if !unchanged_range_in_preview_snapshot.is_empty() {
755 highlighted_text.add_text_from_buffer_range(
756 unchanged_range_in_preview_snapshot,
757 &self.applied_edits_snapshot,
758 &self.syntax_snapshot,
759 None,
760 syntax_theme,
761 );
762 }
763
764 let range_in_current_snapshot = range.to_offset(current_snapshot);
765 if include_deletions && !range_in_current_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 range_in_current_snapshot,
768 ¤t_snapshot.text,
769 ¤t_snapshot.syntax,
770 Some(deletion_highlight_style),
771 syntax_theme,
772 );
773 }
774
775 if !edit_text.is_empty() {
776 highlighted_text.add_text_from_buffer_range(
777 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
778 &self.applied_edits_snapshot,
779 &self.syntax_snapshot,
780 Some(insertion_highlight_style),
781 syntax_theme,
782 );
783 }
784
785 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
786 }
787
788 highlighted_text.add_text_from_buffer_range(
789 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
790 &self.applied_edits_snapshot,
791 &self.syntax_snapshot,
792 None,
793 syntax_theme,
794 );
795
796 highlighted_text.build()
797 }
798
799 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
800 let (first, _) = edits.first()?;
801 let (last, _) = edits.last()?;
802
803 let start = first
804 .start
805 .bias_left(&self.old_snapshot)
806 .to_point(&self.applied_edits_snapshot);
807 let end = last
808 .end
809 .bias_right(&self.old_snapshot)
810 .to_point(&self.applied_edits_snapshot);
811
812 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
813 let range = Point::new(start.row, 0)
814 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
815
816 Some(range.to_offset(&self.applied_edits_snapshot))
817 }
818}
819
820#[derive(Clone, Debug, PartialEq, Eq)]
821pub struct BracketMatch {
822 pub open_range: Range<usize>,
823 pub close_range: Range<usize>,
824 pub newline_only: bool,
825}
826
827impl Buffer {
828 /// Create a new buffer with the given base text.
829 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
830 Self::build(
831 TextBuffer::new(
832 ReplicaId::LOCAL,
833 cx.entity_id().as_non_zero_u64().into(),
834 base_text.into(),
835 ),
836 None,
837 Capability::ReadWrite,
838 )
839 }
840
841 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
842 pub fn local_normalized(
843 base_text_normalized: Rope,
844 line_ending: LineEnding,
845 cx: &Context<Self>,
846 ) -> Self {
847 Self::build(
848 TextBuffer::new_normalized(
849 ReplicaId::LOCAL,
850 cx.entity_id().as_non_zero_u64().into(),
851 line_ending,
852 base_text_normalized,
853 ),
854 None,
855 Capability::ReadWrite,
856 )
857 }
858
859 /// Create a new buffer that is a replica of a remote buffer.
860 pub fn remote(
861 remote_id: BufferId,
862 replica_id: ReplicaId,
863 capability: Capability,
864 base_text: impl Into<String>,
865 ) -> Self {
866 Self::build(
867 TextBuffer::new(replica_id, remote_id, base_text.into()),
868 None,
869 capability,
870 )
871 }
872
873 /// Create a new buffer that is a replica of a remote buffer, populating its
874 /// state from the given protobuf message.
875 pub fn from_proto(
876 replica_id: ReplicaId,
877 capability: Capability,
878 message: proto::BufferState,
879 file: Option<Arc<dyn File>>,
880 ) -> Result<Self> {
881 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
882 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
883 let mut this = Self::build(buffer, file, capability);
884 this.text.set_line_ending(proto::deserialize_line_ending(
885 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
886 ));
887 this.saved_version = proto::deserialize_version(&message.saved_version);
888 this.saved_mtime = message.saved_mtime.map(|time| time.into());
889 Ok(this)
890 }
891
892 /// Serialize the buffer's state to a protobuf message.
893 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
894 proto::BufferState {
895 id: self.remote_id().into(),
896 file: self.file.as_ref().map(|f| f.to_proto(cx)),
897 base_text: self.base_text().to_string(),
898 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
899 saved_version: proto::serialize_version(&self.saved_version),
900 saved_mtime: self.saved_mtime.map(|time| time.into()),
901 }
902 }
903
904 /// Serialize as protobufs all of the changes to the buffer since the given version.
905 pub fn serialize_ops(
906 &self,
907 since: Option<clock::Global>,
908 cx: &App,
909 ) -> Task<Vec<proto::Operation>> {
910 let mut operations = Vec::new();
911 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
912
913 operations.extend(self.remote_selections.iter().map(|(_, set)| {
914 proto::serialize_operation(&Operation::UpdateSelections {
915 selections: set.selections.clone(),
916 lamport_timestamp: set.lamport_timestamp,
917 line_mode: set.line_mode,
918 cursor_shape: set.cursor_shape,
919 })
920 }));
921
922 for (server_id, diagnostics) in &self.diagnostics {
923 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
924 lamport_timestamp: self.diagnostics_timestamp,
925 server_id: *server_id,
926 diagnostics: diagnostics.iter().cloned().collect(),
927 }));
928 }
929
930 for (server_id, completions) in &self.completion_triggers_per_language_server {
931 operations.push(proto::serialize_operation(
932 &Operation::UpdateCompletionTriggers {
933 triggers: completions.iter().cloned().collect(),
934 lamport_timestamp: self.completion_triggers_timestamp,
935 server_id: *server_id,
936 },
937 ));
938 }
939
940 let text_operations = self.text.operations().clone();
941 cx.background_spawn(async move {
942 let since = since.unwrap_or_default();
943 operations.extend(
944 text_operations
945 .iter()
946 .filter(|(_, op)| !since.observed(op.timestamp()))
947 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
948 );
949 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
950 operations
951 })
952 }
953
954 /// Assign a language to the buffer, returning the buffer.
955 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
956 self.set_language(Some(language), cx);
957 self
958 }
959
960 /// Returns the [`Capability`] of this buffer.
961 pub fn capability(&self) -> Capability {
962 self.capability
963 }
964
965 /// Whether this buffer can only be read.
966 pub fn read_only(&self) -> bool {
967 self.capability == Capability::ReadOnly
968 }
969
970 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
971 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
972 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
973 let snapshot = buffer.snapshot();
974 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
975 Self {
976 saved_mtime,
977 saved_version: buffer.version(),
978 preview_version: buffer.version(),
979 reload_task: None,
980 transaction_depth: 0,
981 was_dirty_before_starting_transaction: None,
982 has_unsaved_edits: Cell::new((buffer.version(), false)),
983 text: buffer,
984 branch_state: None,
985 file,
986 capability,
987 syntax_map,
988 reparse: None,
989 non_text_state_update_count: 0,
990 sync_parse_timeout: Duration::from_millis(1),
991 parse_status: watch::channel(ParseStatus::Idle),
992 autoindent_requests: Default::default(),
993 wait_for_autoindent_txs: Default::default(),
994 pending_autoindent: Default::default(),
995 language: None,
996 remote_selections: Default::default(),
997 diagnostics: Default::default(),
998 diagnostics_timestamp: Lamport::MIN,
999 completion_triggers: Default::default(),
1000 completion_triggers_per_language_server: Default::default(),
1001 completion_triggers_timestamp: Lamport::MIN,
1002 deferred_ops: OperationQueue::new(),
1003 has_conflict: false,
1004 change_bits: Default::default(),
1005 _subscriptions: Vec::new(),
1006 }
1007 }
1008
1009 pub fn build_snapshot(
1010 text: Rope,
1011 language: Option<Arc<Language>>,
1012 language_registry: Option<Arc<LanguageRegistry>>,
1013 cx: &mut App,
1014 ) -> impl Future<Output = BufferSnapshot> + use<> {
1015 let entity_id = cx.reserve_entity::<Self>().entity_id();
1016 let buffer_id = entity_id.as_non_zero_u64().into();
1017 async move {
1018 let text =
1019 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1020 .snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let language_registry = language_registry.clone();
1024 syntax.reparse(&text, language_registry, language);
1025 }
1026 BufferSnapshot {
1027 text,
1028 syntax,
1029 file: None,
1030 diagnostics: Default::default(),
1031 remote_selections: Default::default(),
1032 language,
1033 non_text_state_update_count: 0,
1034 }
1035 }
1036 }
1037
1038 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1039 let entity_id = cx.reserve_entity::<Self>().entity_id();
1040 let buffer_id = entity_id.as_non_zero_u64().into();
1041 let text = TextBuffer::new_normalized(
1042 ReplicaId::LOCAL,
1043 buffer_id,
1044 Default::default(),
1045 Rope::new(),
1046 )
1047 .snapshot();
1048 let syntax = SyntaxMap::new(&text).snapshot();
1049 BufferSnapshot {
1050 text,
1051 syntax,
1052 file: None,
1053 diagnostics: Default::default(),
1054 remote_selections: Default::default(),
1055 language: None,
1056 non_text_state_update_count: 0,
1057 }
1058 }
1059
1060 #[cfg(any(test, feature = "test-support"))]
1061 pub fn build_snapshot_sync(
1062 text: Rope,
1063 language: Option<Arc<Language>>,
1064 language_registry: Option<Arc<LanguageRegistry>>,
1065 cx: &mut App,
1066 ) -> BufferSnapshot {
1067 let entity_id = cx.reserve_entity::<Self>().entity_id();
1068 let buffer_id = entity_id.as_non_zero_u64().into();
1069 let text =
1070 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1071 .snapshot();
1072 let mut syntax = SyntaxMap::new(&text).snapshot();
1073 if let Some(language) = language.clone() {
1074 syntax.reparse(&text, language_registry, language);
1075 }
1076 BufferSnapshot {
1077 text,
1078 syntax,
1079 file: None,
1080 diagnostics: Default::default(),
1081 remote_selections: Default::default(),
1082 language,
1083 non_text_state_update_count: 0,
1084 }
1085 }
1086
1087 /// Retrieve a snapshot of the buffer's current state. This is computationally
1088 /// cheap, and allows reading from the buffer on a background thread.
1089 pub fn snapshot(&self) -> BufferSnapshot {
1090 let text = self.text.snapshot();
1091 let mut syntax_map = self.syntax_map.lock();
1092 syntax_map.interpolate(&text);
1093 let syntax = syntax_map.snapshot();
1094
1095 BufferSnapshot {
1096 text,
1097 syntax,
1098 file: self.file.clone(),
1099 remote_selections: self.remote_selections.clone(),
1100 diagnostics: self.diagnostics.clone(),
1101 language: self.language.clone(),
1102 non_text_state_update_count: self.non_text_state_update_count,
1103 }
1104 }
1105
1106 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1107 let this = cx.entity();
1108 cx.new(|cx| {
1109 let mut branch = Self {
1110 branch_state: Some(BufferBranchState {
1111 base_buffer: this.clone(),
1112 merged_operations: Default::default(),
1113 }),
1114 language: self.language.clone(),
1115 has_conflict: self.has_conflict,
1116 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1117 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1118 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1119 };
1120 if let Some(language_registry) = self.language_registry() {
1121 branch.set_language_registry(language_registry);
1122 }
1123
1124 // Reparse the branch buffer so that we get syntax highlighting immediately.
1125 branch.reparse(cx);
1126
1127 branch
1128 })
1129 }
1130
1131 pub fn preview_edits(
1132 &self,
1133 edits: Arc<[(Range<Anchor>, String)]>,
1134 cx: &App,
1135 ) -> Task<EditPreview> {
1136 let registry = self.language_registry();
1137 let language = self.language().cloned();
1138 let old_snapshot = self.text.snapshot();
1139 let mut branch_buffer = self.text.branch();
1140 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1141 cx.background_spawn(async move {
1142 if !edits.is_empty() {
1143 if let Some(language) = language.clone() {
1144 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1145 }
1146
1147 branch_buffer.edit(edits.iter().cloned());
1148 let snapshot = branch_buffer.snapshot();
1149 syntax_snapshot.interpolate(&snapshot);
1150
1151 if let Some(language) = language {
1152 syntax_snapshot.reparse(&snapshot, registry, language);
1153 }
1154 }
1155 EditPreview {
1156 old_snapshot,
1157 applied_edits_snapshot: branch_buffer.snapshot(),
1158 syntax_snapshot,
1159 }
1160 })
1161 }
1162
1163 /// Applies all of the changes in this buffer that intersect any of the
1164 /// given `ranges` to its base buffer.
1165 ///
1166 /// If `ranges` is empty, then all changes will be applied. This buffer must
1167 /// be a branch buffer to call this method.
1168 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1169 let Some(base_buffer) = self.base_buffer() else {
1170 debug_panic!("not a branch buffer");
1171 return;
1172 };
1173
1174 let mut ranges = if ranges.is_empty() {
1175 &[0..usize::MAX]
1176 } else {
1177 ranges.as_slice()
1178 }
1179 .iter()
1180 .peekable();
1181
1182 let mut edits = Vec::new();
1183 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1184 let mut is_included = false;
1185 while let Some(range) = ranges.peek() {
1186 if range.end < edit.new.start {
1187 ranges.next().unwrap();
1188 } else {
1189 if range.start <= edit.new.end {
1190 is_included = true;
1191 }
1192 break;
1193 }
1194 }
1195
1196 if is_included {
1197 edits.push((
1198 edit.old.clone(),
1199 self.text_for_range(edit.new.clone()).collect::<String>(),
1200 ));
1201 }
1202 }
1203
1204 let operation = base_buffer.update(cx, |base_buffer, cx| {
1205 // cx.emit(BufferEvent::DiffBaseChanged);
1206 base_buffer.edit(edits, None, cx)
1207 });
1208
1209 if let Some(operation) = operation
1210 && let Some(BufferBranchState {
1211 merged_operations, ..
1212 }) = &mut self.branch_state
1213 {
1214 merged_operations.push(operation);
1215 }
1216 }
1217
1218 fn on_base_buffer_event(
1219 &mut self,
1220 _: Entity<Buffer>,
1221 event: &BufferEvent,
1222 cx: &mut Context<Self>,
1223 ) {
1224 let BufferEvent::Operation { operation, .. } = event else {
1225 return;
1226 };
1227 let Some(BufferBranchState {
1228 merged_operations, ..
1229 }) = &mut self.branch_state
1230 else {
1231 return;
1232 };
1233
1234 let mut operation_to_undo = None;
1235 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1236 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1237 {
1238 merged_operations.remove(ix);
1239 operation_to_undo = Some(operation.timestamp);
1240 }
1241
1242 self.apply_ops([operation.clone()], cx);
1243
1244 if let Some(timestamp) = operation_to_undo {
1245 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1246 self.undo_operations(counts, cx);
1247 }
1248 }
1249
1250 #[cfg(test)]
1251 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1252 &self.text
1253 }
1254
1255 /// Retrieve a snapshot of the buffer's raw text, without any
1256 /// language-related state like the syntax tree or diagnostics.
1257 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1258 self.text.snapshot()
1259 }
1260
1261 /// The file associated with the buffer, if any.
1262 pub fn file(&self) -> Option<&Arc<dyn File>> {
1263 self.file.as_ref()
1264 }
1265
1266 /// The version of the buffer that was last saved or reloaded from disk.
1267 pub fn saved_version(&self) -> &clock::Global {
1268 &self.saved_version
1269 }
1270
1271 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1272 pub fn saved_mtime(&self) -> Option<MTime> {
1273 self.saved_mtime
1274 }
1275
1276 /// Assign a language to the buffer.
1277 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1278 self.non_text_state_update_count += 1;
1279 self.syntax_map.lock().clear(&self.text);
1280 self.language = language;
1281 self.was_changed();
1282 self.reparse(cx);
1283 cx.emit(BufferEvent::LanguageChanged);
1284 }
1285
1286 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1287 /// other languages if parts of the buffer are written in different languages.
1288 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1289 self.syntax_map
1290 .lock()
1291 .set_language_registry(language_registry);
1292 }
1293
1294 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1295 self.syntax_map.lock().language_registry()
1296 }
1297
1298 /// Assign the line ending type to the buffer.
1299 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1300 self.text.set_line_ending(line_ending);
1301
1302 let lamport_timestamp = self.text.lamport_clock.tick();
1303 self.send_operation(
1304 Operation::UpdateLineEnding {
1305 line_ending,
1306 lamport_timestamp,
1307 },
1308 true,
1309 cx,
1310 );
1311 }
1312
1313 /// Assign the buffer a new [`Capability`].
1314 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1315 if self.capability != capability {
1316 self.capability = capability;
1317 cx.emit(BufferEvent::CapabilityChanged)
1318 }
1319 }
1320
1321 /// This method is called to signal that the buffer has been saved.
1322 pub fn did_save(
1323 &mut self,
1324 version: clock::Global,
1325 mtime: Option<MTime>,
1326 cx: &mut Context<Self>,
1327 ) {
1328 self.saved_version = version.clone();
1329 self.has_unsaved_edits.set((version, false));
1330 self.has_conflict = false;
1331 self.saved_mtime = mtime;
1332 self.was_changed();
1333 cx.emit(BufferEvent::Saved);
1334 cx.notify();
1335 }
1336
1337 /// Reloads the contents of the buffer from disk.
1338 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1339 let (tx, rx) = futures::channel::oneshot::channel();
1340 let prev_version = self.text.version();
1341 self.reload_task = Some(cx.spawn(async move |this, cx| {
1342 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1343 let file = this.file.as_ref()?.as_local()?;
1344
1345 Some((file.disk_state().mtime(), file.load(cx)))
1346 })?
1347 else {
1348 return Ok(());
1349 };
1350
1351 let new_text = new_text.await?;
1352 let diff = this
1353 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1354 .await;
1355 this.update(cx, |this, cx| {
1356 if this.version() == diff.base_version {
1357 this.finalize_last_transaction();
1358 this.apply_diff(diff, cx);
1359 tx.send(this.finalize_last_transaction().cloned()).ok();
1360 this.has_conflict = false;
1361 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1362 } else {
1363 if !diff.edits.is_empty()
1364 || this
1365 .edits_since::<usize>(&diff.base_version)
1366 .next()
1367 .is_some()
1368 {
1369 this.has_conflict = true;
1370 }
1371
1372 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1373 }
1374
1375 this.reload_task.take();
1376 })
1377 }));
1378 rx
1379 }
1380
1381 /// This method is called to signal that the buffer has been reloaded.
1382 pub fn did_reload(
1383 &mut self,
1384 version: clock::Global,
1385 line_ending: LineEnding,
1386 mtime: Option<MTime>,
1387 cx: &mut Context<Self>,
1388 ) {
1389 self.saved_version = version;
1390 self.has_unsaved_edits
1391 .set((self.saved_version.clone(), false));
1392 self.text.set_line_ending(line_ending);
1393 self.saved_mtime = mtime;
1394 cx.emit(BufferEvent::Reloaded);
1395 cx.notify();
1396 }
1397
1398 /// Updates the [`File`] backing this buffer. This should be called when
1399 /// the file has changed or has been deleted.
1400 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1401 let was_dirty = self.is_dirty();
1402 let mut file_changed = false;
1403
1404 if let Some(old_file) = self.file.as_ref() {
1405 if new_file.path() != old_file.path() {
1406 file_changed = true;
1407 }
1408
1409 let old_state = old_file.disk_state();
1410 let new_state = new_file.disk_state();
1411 if old_state != new_state {
1412 file_changed = true;
1413 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1414 cx.emit(BufferEvent::ReloadNeeded)
1415 }
1416 }
1417 } else {
1418 file_changed = true;
1419 };
1420
1421 self.file = Some(new_file);
1422 if file_changed {
1423 self.was_changed();
1424 self.non_text_state_update_count += 1;
1425 if was_dirty != self.is_dirty() {
1426 cx.emit(BufferEvent::DirtyChanged);
1427 }
1428 cx.emit(BufferEvent::FileHandleChanged);
1429 cx.notify();
1430 }
1431 }
1432
1433 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1434 Some(self.branch_state.as_ref()?.base_buffer.clone())
1435 }
1436
1437 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1438 pub fn language(&self) -> Option<&Arc<Language>> {
1439 self.language.as_ref()
1440 }
1441
1442 /// Returns the [`Language`] at the given location.
1443 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1444 let offset = position.to_offset(self);
1445 let mut is_first = true;
1446 let start_anchor = self.anchor_before(offset);
1447 let end_anchor = self.anchor_after(offset);
1448 self.syntax_map
1449 .lock()
1450 .layers_for_range(offset..offset, &self.text, false)
1451 .filter(|layer| {
1452 if is_first {
1453 is_first = false;
1454 return true;
1455 }
1456
1457 layer
1458 .included_sub_ranges
1459 .map(|sub_ranges| {
1460 sub_ranges.iter().any(|sub_range| {
1461 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1462 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1463 !is_before_start && !is_after_end
1464 })
1465 })
1466 .unwrap_or(true)
1467 })
1468 .last()
1469 .map(|info| info.language.clone())
1470 .or_else(|| self.language.clone())
1471 }
1472
1473 /// Returns each [`Language`] for the active syntax layers at the given location.
1474 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1475 let offset = position.to_offset(self);
1476 let mut languages: Vec<Arc<Language>> = self
1477 .syntax_map
1478 .lock()
1479 .layers_for_range(offset..offset, &self.text, false)
1480 .map(|info| info.language.clone())
1481 .collect();
1482
1483 if languages.is_empty()
1484 && let Some(buffer_language) = self.language()
1485 {
1486 languages.push(buffer_language.clone());
1487 }
1488
1489 languages
1490 }
1491
1492 /// An integer version number that accounts for all updates besides
1493 /// the buffer's text itself (which is versioned via a version vector).
1494 pub fn non_text_state_update_count(&self) -> usize {
1495 self.non_text_state_update_count
1496 }
1497
1498 /// Whether the buffer is being parsed in the background.
1499 #[cfg(any(test, feature = "test-support"))]
1500 pub fn is_parsing(&self) -> bool {
1501 self.reparse.is_some()
1502 }
1503
1504 /// Indicates whether the buffer contains any regions that may be
1505 /// written in a language that hasn't been loaded yet.
1506 pub fn contains_unknown_injections(&self) -> bool {
1507 self.syntax_map.lock().contains_unknown_injections()
1508 }
1509
1510 #[cfg(any(test, feature = "test-support"))]
1511 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1512 self.sync_parse_timeout = timeout;
1513 }
1514
1515 /// Called after an edit to synchronize the buffer's main parse tree with
1516 /// the buffer's new underlying state.
1517 ///
1518 /// Locks the syntax map and interpolates the edits since the last reparse
1519 /// into the foreground syntax tree.
1520 ///
1521 /// Then takes a stable snapshot of the syntax map before unlocking it.
1522 /// The snapshot with the interpolated edits is sent to a background thread,
1523 /// where we ask Tree-sitter to perform an incremental parse.
1524 ///
1525 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1526 /// waiting on the parse to complete. As soon as it completes, we proceed
1527 /// synchronously, unless a 1ms timeout elapses.
1528 ///
1529 /// If we time out waiting on the parse, we spawn a second task waiting
1530 /// until the parse does complete and return with the interpolated tree still
1531 /// in the foreground. When the background parse completes, call back into
1532 /// the main thread and assign the foreground parse state.
1533 ///
1534 /// If the buffer or grammar changed since the start of the background parse,
1535 /// initiate an additional reparse recursively. To avoid concurrent parses
1536 /// for the same buffer, we only initiate a new parse if we are not already
1537 /// parsing in the background.
1538 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1539 if self.reparse.is_some() {
1540 return;
1541 }
1542 let language = if let Some(language) = self.language.clone() {
1543 language
1544 } else {
1545 return;
1546 };
1547
1548 let text = self.text_snapshot();
1549 let parsed_version = self.version();
1550
1551 let mut syntax_map = self.syntax_map.lock();
1552 syntax_map.interpolate(&text);
1553 let language_registry = syntax_map.language_registry();
1554 let mut syntax_snapshot = syntax_map.snapshot();
1555 drop(syntax_map);
1556
1557 let parse_task = cx.background_spawn({
1558 let language = language.clone();
1559 let language_registry = language_registry.clone();
1560 async move {
1561 syntax_snapshot.reparse(&text, language_registry, language);
1562 syntax_snapshot
1563 }
1564 });
1565
1566 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1567 match cx
1568 .background_executor()
1569 .block_with_timeout(self.sync_parse_timeout, parse_task)
1570 {
1571 Ok(new_syntax_snapshot) => {
1572 self.did_finish_parsing(new_syntax_snapshot, cx);
1573 self.reparse = None;
1574 }
1575 Err(parse_task) => {
1576 // todo(lw): hot foreground spawn
1577 self.reparse = Some(cx.spawn(async move |this, cx| {
1578 let new_syntax_map = cx.background_spawn(parse_task).await;
1579 this.update(cx, move |this, cx| {
1580 let grammar_changed = || {
1581 this.language.as_ref().is_none_or(|current_language| {
1582 !Arc::ptr_eq(&language, current_language)
1583 })
1584 };
1585 let language_registry_changed = || {
1586 new_syntax_map.contains_unknown_injections()
1587 && language_registry.is_some_and(|registry| {
1588 registry.version() != new_syntax_map.language_registry_version()
1589 })
1590 };
1591 let parse_again = this.version.changed_since(&parsed_version)
1592 || language_registry_changed()
1593 || grammar_changed();
1594 this.did_finish_parsing(new_syntax_map, cx);
1595 this.reparse = None;
1596 if parse_again {
1597 this.reparse(cx);
1598 }
1599 })
1600 .ok();
1601 }));
1602 }
1603 }
1604 }
1605
1606 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1607 self.was_changed();
1608 self.non_text_state_update_count += 1;
1609 self.syntax_map.lock().did_parse(syntax_snapshot);
1610 self.request_autoindent(cx);
1611 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1612 cx.emit(BufferEvent::Reparsed);
1613 cx.notify();
1614 }
1615
1616 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1617 self.parse_status.1.clone()
1618 }
1619
1620 /// Assign to the buffer a set of diagnostics created by a given language server.
1621 pub fn update_diagnostics(
1622 &mut self,
1623 server_id: LanguageServerId,
1624 diagnostics: DiagnosticSet,
1625 cx: &mut Context<Self>,
1626 ) {
1627 let lamport_timestamp = self.text.lamport_clock.tick();
1628 let op = Operation::UpdateDiagnostics {
1629 server_id,
1630 diagnostics: diagnostics.iter().cloned().collect(),
1631 lamport_timestamp,
1632 };
1633
1634 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1635 self.send_operation(op, true, cx);
1636 }
1637
1638 pub fn buffer_diagnostics(
1639 &self,
1640 for_server: Option<LanguageServerId>,
1641 ) -> Vec<&DiagnosticEntry<Anchor>> {
1642 match for_server {
1643 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1644 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1645 Err(_) => Vec::new(),
1646 },
1647 None => self
1648 .diagnostics
1649 .iter()
1650 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1651 .collect(),
1652 }
1653 }
1654
1655 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1656 if let Some(indent_sizes) = self.compute_autoindents() {
1657 let indent_sizes = cx.background_spawn(indent_sizes);
1658 match cx
1659 .background_executor()
1660 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1661 {
1662 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1663 Err(indent_sizes) => {
1664 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1665 let indent_sizes = indent_sizes.await;
1666 this.update(cx, |this, cx| {
1667 this.apply_autoindents(indent_sizes, cx);
1668 })
1669 .ok();
1670 }));
1671 }
1672 }
1673 } else {
1674 self.autoindent_requests.clear();
1675 for tx in self.wait_for_autoindent_txs.drain(..) {
1676 tx.send(()).ok();
1677 }
1678 }
1679 }
1680
1681 fn compute_autoindents(
1682 &self,
1683 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1684 let max_rows_between_yields = 100;
1685 let snapshot = self.snapshot();
1686 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1687 return None;
1688 }
1689
1690 let autoindent_requests = self.autoindent_requests.clone();
1691 Some(async move {
1692 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1693 for request in autoindent_requests {
1694 // Resolve each edited range to its row in the current buffer and in the
1695 // buffer before this batch of edits.
1696 let mut row_ranges = Vec::new();
1697 let mut old_to_new_rows = BTreeMap::new();
1698 let mut language_indent_sizes_by_new_row = Vec::new();
1699 for entry in &request.entries {
1700 let position = entry.range.start;
1701 let new_row = position.to_point(&snapshot).row;
1702 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1703 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1704
1705 if !entry.first_line_is_new {
1706 let old_row = position.to_point(&request.before_edit).row;
1707 old_to_new_rows.insert(old_row, new_row);
1708 }
1709 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1710 }
1711
1712 // Build a map containing the suggested indentation for each of the edited lines
1713 // with respect to the state of the buffer before these edits. This map is keyed
1714 // by the rows for these lines in the current state of the buffer.
1715 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1716 let old_edited_ranges =
1717 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1718 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1719 let mut language_indent_size = IndentSize::default();
1720 for old_edited_range in old_edited_ranges {
1721 let suggestions = request
1722 .before_edit
1723 .suggest_autoindents(old_edited_range.clone())
1724 .into_iter()
1725 .flatten();
1726 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1727 if let Some(suggestion) = suggestion {
1728 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1729
1730 // Find the indent size based on the language for this row.
1731 while let Some((row, size)) = language_indent_sizes.peek() {
1732 if *row > new_row {
1733 break;
1734 }
1735 language_indent_size = *size;
1736 language_indent_sizes.next();
1737 }
1738
1739 let suggested_indent = old_to_new_rows
1740 .get(&suggestion.basis_row)
1741 .and_then(|from_row| {
1742 Some(old_suggestions.get(from_row).copied()?.0)
1743 })
1744 .unwrap_or_else(|| {
1745 request
1746 .before_edit
1747 .indent_size_for_line(suggestion.basis_row)
1748 })
1749 .with_delta(suggestion.delta, language_indent_size);
1750 old_suggestions
1751 .insert(new_row, (suggested_indent, suggestion.within_error));
1752 }
1753 }
1754 yield_now().await;
1755 }
1756
1757 // Compute new suggestions for each line, but only include them in the result
1758 // if they differ from the old suggestion for that line.
1759 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1760 let mut language_indent_size = IndentSize::default();
1761 for (row_range, original_indent_column) in row_ranges {
1762 let new_edited_row_range = if request.is_block_mode {
1763 row_range.start..row_range.start + 1
1764 } else {
1765 row_range.clone()
1766 };
1767
1768 let suggestions = snapshot
1769 .suggest_autoindents(new_edited_row_range.clone())
1770 .into_iter()
1771 .flatten();
1772 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1773 if let Some(suggestion) = suggestion {
1774 // Find the indent size based on the language for this row.
1775 while let Some((row, size)) = language_indent_sizes.peek() {
1776 if *row > new_row {
1777 break;
1778 }
1779 language_indent_size = *size;
1780 language_indent_sizes.next();
1781 }
1782
1783 let suggested_indent = indent_sizes
1784 .get(&suggestion.basis_row)
1785 .copied()
1786 .map(|e| e.0)
1787 .unwrap_or_else(|| {
1788 snapshot.indent_size_for_line(suggestion.basis_row)
1789 })
1790 .with_delta(suggestion.delta, language_indent_size);
1791
1792 if old_suggestions.get(&new_row).is_none_or(
1793 |(old_indentation, was_within_error)| {
1794 suggested_indent != *old_indentation
1795 && (!suggestion.within_error || *was_within_error)
1796 },
1797 ) {
1798 indent_sizes.insert(
1799 new_row,
1800 (suggested_indent, request.ignore_empty_lines),
1801 );
1802 }
1803 }
1804 }
1805
1806 if let (true, Some(original_indent_column)) =
1807 (request.is_block_mode, original_indent_column)
1808 {
1809 let new_indent =
1810 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1811 *indent
1812 } else {
1813 snapshot.indent_size_for_line(row_range.start)
1814 };
1815 let delta = new_indent.len as i64 - original_indent_column as i64;
1816 if delta != 0 {
1817 for row in row_range.skip(1) {
1818 indent_sizes.entry(row).or_insert_with(|| {
1819 let mut size = snapshot.indent_size_for_line(row);
1820 if size.kind == new_indent.kind {
1821 match delta.cmp(&0) {
1822 Ordering::Greater => size.len += delta as u32,
1823 Ordering::Less => {
1824 size.len = size.len.saturating_sub(-delta as u32)
1825 }
1826 Ordering::Equal => {}
1827 }
1828 }
1829 (size, request.ignore_empty_lines)
1830 });
1831 }
1832 }
1833 }
1834
1835 yield_now().await;
1836 }
1837 }
1838
1839 indent_sizes
1840 .into_iter()
1841 .filter_map(|(row, (indent, ignore_empty_lines))| {
1842 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1843 None
1844 } else {
1845 Some((row, indent))
1846 }
1847 })
1848 .collect()
1849 })
1850 }
1851
1852 fn apply_autoindents(
1853 &mut self,
1854 indent_sizes: BTreeMap<u32, IndentSize>,
1855 cx: &mut Context<Self>,
1856 ) {
1857 self.autoindent_requests.clear();
1858 for tx in self.wait_for_autoindent_txs.drain(..) {
1859 tx.send(()).ok();
1860 }
1861
1862 let edits: Vec<_> = indent_sizes
1863 .into_iter()
1864 .filter_map(|(row, indent_size)| {
1865 let current_size = indent_size_for_line(self, row);
1866 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1867 })
1868 .collect();
1869
1870 let preserve_preview = self.preserve_preview();
1871 self.edit(edits, None, cx);
1872 if preserve_preview {
1873 self.refresh_preview();
1874 }
1875 }
1876
1877 /// Create a minimal edit that will cause the given row to be indented
1878 /// with the given size. After applying this edit, the length of the line
1879 /// will always be at least `new_size.len`.
1880 pub fn edit_for_indent_size_adjustment(
1881 row: u32,
1882 current_size: IndentSize,
1883 new_size: IndentSize,
1884 ) -> Option<(Range<Point>, String)> {
1885 if new_size.kind == current_size.kind {
1886 match new_size.len.cmp(¤t_size.len) {
1887 Ordering::Greater => {
1888 let point = Point::new(row, 0);
1889 Some((
1890 point..point,
1891 iter::repeat(new_size.char())
1892 .take((new_size.len - current_size.len) as usize)
1893 .collect::<String>(),
1894 ))
1895 }
1896
1897 Ordering::Less => Some((
1898 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1899 String::new(),
1900 )),
1901
1902 Ordering::Equal => None,
1903 }
1904 } else {
1905 Some((
1906 Point::new(row, 0)..Point::new(row, current_size.len),
1907 iter::repeat(new_size.char())
1908 .take(new_size.len as usize)
1909 .collect::<String>(),
1910 ))
1911 }
1912 }
1913
1914 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1915 /// and the given new text.
1916 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1917 let old_text = self.as_rope().clone();
1918 let base_version = self.version();
1919 cx.background_executor()
1920 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1921 let old_text = old_text.to_string();
1922 let line_ending = LineEnding::detect(&new_text);
1923 LineEnding::normalize(&mut new_text);
1924 let edits = text_diff(&old_text, &new_text);
1925 Diff {
1926 base_version,
1927 line_ending,
1928 edits,
1929 }
1930 })
1931 }
1932
1933 /// Spawns a background task that searches the buffer for any whitespace
1934 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1935 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1936 let old_text = self.as_rope().clone();
1937 let line_ending = self.line_ending();
1938 let base_version = self.version();
1939 cx.background_spawn(async move {
1940 let ranges = trailing_whitespace_ranges(&old_text);
1941 let empty = Arc::<str>::from("");
1942 Diff {
1943 base_version,
1944 line_ending,
1945 edits: ranges
1946 .into_iter()
1947 .map(|range| (range, empty.clone()))
1948 .collect(),
1949 }
1950 })
1951 }
1952
1953 /// Ensures that the buffer ends with a single newline character, and
1954 /// no other whitespace. Skips if the buffer is empty.
1955 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1956 let len = self.len();
1957 if len == 0 {
1958 return;
1959 }
1960 let mut offset = len;
1961 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1962 let non_whitespace_len = chunk
1963 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1964 .len();
1965 offset -= chunk.len();
1966 offset += non_whitespace_len;
1967 if non_whitespace_len != 0 {
1968 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1969 return;
1970 }
1971 break;
1972 }
1973 }
1974 self.edit([(offset..len, "\n")], None, cx);
1975 }
1976
1977 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1978 /// calculated, then adjust the diff to account for those changes, and discard any
1979 /// parts of the diff that conflict with those changes.
1980 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1981 let snapshot = self.snapshot();
1982 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1983 let mut delta = 0;
1984 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1985 while let Some(edit_since) = edits_since.peek() {
1986 // If the edit occurs after a diff hunk, then it does not
1987 // affect that hunk.
1988 if edit_since.old.start > range.end {
1989 break;
1990 }
1991 // If the edit precedes the diff hunk, then adjust the hunk
1992 // to reflect the edit.
1993 else if edit_since.old.end < range.start {
1994 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1995 edits_since.next();
1996 }
1997 // If the edit intersects a diff hunk, then discard that hunk.
1998 else {
1999 return None;
2000 }
2001 }
2002
2003 let start = (range.start as i64 + delta) as usize;
2004 let end = (range.end as i64 + delta) as usize;
2005 Some((start..end, new_text))
2006 });
2007
2008 self.start_transaction();
2009 self.text.set_line_ending(diff.line_ending);
2010 self.edit(adjusted_edits, None, cx);
2011 self.end_transaction(cx)
2012 }
2013
2014 pub fn has_unsaved_edits(&self) -> bool {
2015 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2016
2017 if last_version == self.version {
2018 self.has_unsaved_edits
2019 .set((last_version, has_unsaved_edits));
2020 return has_unsaved_edits;
2021 }
2022
2023 let has_edits = self.has_edits_since(&self.saved_version);
2024 self.has_unsaved_edits
2025 .set((self.version.clone(), has_edits));
2026 has_edits
2027 }
2028
2029 /// Checks if the buffer has unsaved changes.
2030 pub fn is_dirty(&self) -> bool {
2031 if self.capability == Capability::ReadOnly {
2032 return false;
2033 }
2034 if self.has_conflict {
2035 return true;
2036 }
2037 match self.file.as_ref().map(|f| f.disk_state()) {
2038 Some(DiskState::New) | Some(DiskState::Deleted) => {
2039 !self.is_empty() && self.has_unsaved_edits()
2040 }
2041 _ => self.has_unsaved_edits(),
2042 }
2043 }
2044
2045 /// Checks if the buffer and its file have both changed since the buffer
2046 /// was last saved or reloaded.
2047 pub fn has_conflict(&self) -> bool {
2048 if self.has_conflict {
2049 return true;
2050 }
2051 let Some(file) = self.file.as_ref() else {
2052 return false;
2053 };
2054 match file.disk_state() {
2055 DiskState::New => false,
2056 DiskState::Present { mtime } => match self.saved_mtime {
2057 Some(saved_mtime) => {
2058 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2059 }
2060 None => true,
2061 },
2062 DiskState::Deleted => false,
2063 }
2064 }
2065
2066 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2067 pub fn subscribe(&mut self) -> Subscription {
2068 self.text.subscribe()
2069 }
2070
2071 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2072 ///
2073 /// This allows downstream code to check if the buffer's text has changed without
2074 /// waiting for an effect cycle, which would be required if using eents.
2075 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2076 if let Err(ix) = self
2077 .change_bits
2078 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2079 {
2080 self.change_bits.insert(ix, bit);
2081 }
2082 }
2083
2084 /// Set the change bit for all "listeners".
2085 fn was_changed(&mut self) {
2086 self.change_bits.retain(|change_bit| {
2087 change_bit
2088 .upgrade()
2089 .inspect(|bit| {
2090 _ = bit.replace(true);
2091 })
2092 .is_some()
2093 });
2094 }
2095
2096 /// Starts a transaction, if one is not already in-progress. When undoing or
2097 /// redoing edits, all of the edits performed within a transaction are undone
2098 /// or redone together.
2099 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2100 self.start_transaction_at(Instant::now())
2101 }
2102
2103 /// Starts a transaction, providing the current time. Subsequent transactions
2104 /// that occur within a short period of time will be grouped together. This
2105 /// is controlled by the buffer's undo grouping duration.
2106 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2107 self.transaction_depth += 1;
2108 if self.was_dirty_before_starting_transaction.is_none() {
2109 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2110 }
2111 self.text.start_transaction_at(now)
2112 }
2113
2114 /// Terminates the current transaction, if this is the outermost transaction.
2115 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2116 self.end_transaction_at(Instant::now(), cx)
2117 }
2118
2119 /// Terminates the current transaction, providing the current time. Subsequent transactions
2120 /// that occur within a short period of time will be grouped together. This
2121 /// is controlled by the buffer's undo grouping duration.
2122 pub fn end_transaction_at(
2123 &mut self,
2124 now: Instant,
2125 cx: &mut Context<Self>,
2126 ) -> Option<TransactionId> {
2127 assert!(self.transaction_depth > 0);
2128 self.transaction_depth -= 1;
2129 let was_dirty = if self.transaction_depth == 0 {
2130 self.was_dirty_before_starting_transaction.take().unwrap()
2131 } else {
2132 false
2133 };
2134 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2135 self.did_edit(&start_version, was_dirty, cx);
2136 Some(transaction_id)
2137 } else {
2138 None
2139 }
2140 }
2141
2142 /// Manually add a transaction to the buffer's undo history.
2143 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2144 self.text.push_transaction(transaction, now);
2145 }
2146
2147 /// Differs from `push_transaction` in that it does not clear the redo
2148 /// stack. Intended to be used to create a parent transaction to merge
2149 /// potential child transactions into.
2150 ///
2151 /// The caller is responsible for removing it from the undo history using
2152 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2153 /// are merged into this transaction, the caller is responsible for ensuring
2154 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2155 /// cleared is to create transactions with the usual `start_transaction` and
2156 /// `end_transaction` methods and merging the resulting transactions into
2157 /// the transaction created by this method
2158 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2159 self.text.push_empty_transaction(now)
2160 }
2161
2162 /// Prevent the last transaction from being grouped with any subsequent transactions,
2163 /// even if they occur with the buffer's undo grouping duration.
2164 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2165 self.text.finalize_last_transaction()
2166 }
2167
2168 /// Manually group all changes since a given transaction.
2169 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2170 self.text.group_until_transaction(transaction_id);
2171 }
2172
2173 /// Manually remove a transaction from the buffer's undo history
2174 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2175 self.text.forget_transaction(transaction_id)
2176 }
2177
2178 /// Retrieve a transaction from the buffer's undo history
2179 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2180 self.text.get_transaction(transaction_id)
2181 }
2182
2183 /// Manually merge two transactions in the buffer's undo history.
2184 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2185 self.text.merge_transactions(transaction, destination);
2186 }
2187
2188 /// Waits for the buffer to receive operations with the given timestamps.
2189 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2190 &mut self,
2191 edit_ids: It,
2192 ) -> impl Future<Output = Result<()>> + use<It> {
2193 self.text.wait_for_edits(edit_ids)
2194 }
2195
2196 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2197 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2198 &mut self,
2199 anchors: It,
2200 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2201 self.text.wait_for_anchors(anchors)
2202 }
2203
2204 /// Waits for the buffer to receive operations up to the given version.
2205 pub fn wait_for_version(
2206 &mut self,
2207 version: clock::Global,
2208 ) -> impl Future<Output = Result<()>> + use<> {
2209 self.text.wait_for_version(version)
2210 }
2211
2212 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2213 /// [`Buffer::wait_for_version`] to resolve with an error.
2214 pub fn give_up_waiting(&mut self) {
2215 self.text.give_up_waiting();
2216 }
2217
2218 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2219 let mut rx = None;
2220 if !self.autoindent_requests.is_empty() {
2221 let channel = oneshot::channel();
2222 self.wait_for_autoindent_txs.push(channel.0);
2223 rx = Some(channel.1);
2224 }
2225 rx
2226 }
2227
2228 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2229 pub fn set_active_selections(
2230 &mut self,
2231 selections: Arc<[Selection<Anchor>]>,
2232 line_mode: bool,
2233 cursor_shape: CursorShape,
2234 cx: &mut Context<Self>,
2235 ) {
2236 let lamport_timestamp = self.text.lamport_clock.tick();
2237 self.remote_selections.insert(
2238 self.text.replica_id(),
2239 SelectionSet {
2240 selections: selections.clone(),
2241 lamport_timestamp,
2242 line_mode,
2243 cursor_shape,
2244 },
2245 );
2246 self.send_operation(
2247 Operation::UpdateSelections {
2248 selections,
2249 line_mode,
2250 lamport_timestamp,
2251 cursor_shape,
2252 },
2253 true,
2254 cx,
2255 );
2256 self.non_text_state_update_count += 1;
2257 cx.notify();
2258 }
2259
2260 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2261 /// this replica.
2262 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2263 if self
2264 .remote_selections
2265 .get(&self.text.replica_id())
2266 .is_none_or(|set| !set.selections.is_empty())
2267 {
2268 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2269 }
2270 }
2271
2272 pub fn set_agent_selections(
2273 &mut self,
2274 selections: Arc<[Selection<Anchor>]>,
2275 line_mode: bool,
2276 cursor_shape: CursorShape,
2277 cx: &mut Context<Self>,
2278 ) {
2279 let lamport_timestamp = self.text.lamport_clock.tick();
2280 self.remote_selections.insert(
2281 ReplicaId::AGENT,
2282 SelectionSet {
2283 selections,
2284 lamport_timestamp,
2285 line_mode,
2286 cursor_shape,
2287 },
2288 );
2289 self.non_text_state_update_count += 1;
2290 cx.notify();
2291 }
2292
2293 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2294 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2295 }
2296
2297 /// Replaces the buffer's entire text.
2298 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2299 where
2300 T: Into<Arc<str>>,
2301 {
2302 self.autoindent_requests.clear();
2303 self.edit([(0..self.len(), text)], None, cx)
2304 }
2305
2306 /// Appends the given text to the end of the buffer.
2307 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2308 where
2309 T: Into<Arc<str>>,
2310 {
2311 self.edit([(self.len()..self.len(), text)], None, cx)
2312 }
2313
2314 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2315 /// delete, and a string of text to insert at that location.
2316 ///
2317 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2318 /// request for the edited ranges, which will be processed when the buffer finishes
2319 /// parsing.
2320 ///
2321 /// Parsing takes place at the end of a transaction, and may compute synchronously
2322 /// or asynchronously, depending on the changes.
2323 pub fn edit<I, S, T>(
2324 &mut self,
2325 edits_iter: I,
2326 autoindent_mode: Option<AutoindentMode>,
2327 cx: &mut Context<Self>,
2328 ) -> Option<clock::Lamport>
2329 where
2330 I: IntoIterator<Item = (Range<S>, T)>,
2331 S: ToOffset,
2332 T: Into<Arc<str>>,
2333 {
2334 // Skip invalid edits and coalesce contiguous ones.
2335 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2336
2337 for (range, new_text) in edits_iter {
2338 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2339
2340 if range.start > range.end {
2341 mem::swap(&mut range.start, &mut range.end);
2342 }
2343 let new_text = new_text.into();
2344 if !new_text.is_empty() || !range.is_empty() {
2345 if let Some((prev_range, prev_text)) = edits.last_mut()
2346 && prev_range.end >= range.start
2347 {
2348 prev_range.end = cmp::max(prev_range.end, range.end);
2349 *prev_text = format!("{prev_text}{new_text}").into();
2350 } else {
2351 edits.push((range, new_text));
2352 }
2353 }
2354 }
2355 if edits.is_empty() {
2356 return None;
2357 }
2358
2359 self.start_transaction();
2360 self.pending_autoindent.take();
2361 let autoindent_request = autoindent_mode
2362 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2363
2364 let edit_operation = self.text.edit(edits.iter().cloned());
2365 let edit_id = edit_operation.timestamp();
2366
2367 if let Some((before_edit, mode)) = autoindent_request {
2368 let mut delta = 0isize;
2369 let mut previous_setting = None;
2370 let entries: Vec<_> = edits
2371 .into_iter()
2372 .enumerate()
2373 .zip(&edit_operation.as_edit().unwrap().new_text)
2374 .filter(|((_, (range, _)), _)| {
2375 let language = before_edit.language_at(range.start);
2376 let language_id = language.map(|l| l.id());
2377 if let Some((cached_language_id, auto_indent)) = previous_setting
2378 && cached_language_id == language_id
2379 {
2380 auto_indent
2381 } else {
2382 // The auto-indent setting is not present in editorconfigs, hence
2383 // we can avoid passing the file here.
2384 let auto_indent =
2385 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2386 previous_setting = Some((language_id, auto_indent));
2387 auto_indent
2388 }
2389 })
2390 .map(|((ix, (range, _)), new_text)| {
2391 let new_text_length = new_text.len();
2392 let old_start = range.start.to_point(&before_edit);
2393 let new_start = (delta + range.start as isize) as usize;
2394 let range_len = range.end - range.start;
2395 delta += new_text_length as isize - range_len as isize;
2396
2397 // Decide what range of the insertion to auto-indent, and whether
2398 // the first line of the insertion should be considered a newly-inserted line
2399 // or an edit to an existing line.
2400 let mut range_of_insertion_to_indent = 0..new_text_length;
2401 let mut first_line_is_new = true;
2402
2403 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2404 let old_line_end = before_edit.line_len(old_start.row);
2405
2406 if old_start.column > old_line_start {
2407 first_line_is_new = false;
2408 }
2409
2410 if !new_text.contains('\n')
2411 && (old_start.column + (range_len as u32) < old_line_end
2412 || old_line_end == old_line_start)
2413 {
2414 first_line_is_new = false;
2415 }
2416
2417 // When inserting text starting with a newline, avoid auto-indenting the
2418 // previous line.
2419 if new_text.starts_with('\n') {
2420 range_of_insertion_to_indent.start += 1;
2421 first_line_is_new = true;
2422 }
2423
2424 let mut original_indent_column = None;
2425 if let AutoindentMode::Block {
2426 original_indent_columns,
2427 } = &mode
2428 {
2429 original_indent_column = Some(if new_text.starts_with('\n') {
2430 indent_size_for_text(
2431 new_text[range_of_insertion_to_indent.clone()].chars(),
2432 )
2433 .len
2434 } else {
2435 original_indent_columns
2436 .get(ix)
2437 .copied()
2438 .flatten()
2439 .unwrap_or_else(|| {
2440 indent_size_for_text(
2441 new_text[range_of_insertion_to_indent.clone()].chars(),
2442 )
2443 .len
2444 })
2445 });
2446
2447 // Avoid auto-indenting the line after the edit.
2448 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2449 range_of_insertion_to_indent.end -= 1;
2450 }
2451 }
2452
2453 AutoindentRequestEntry {
2454 first_line_is_new,
2455 original_indent_column,
2456 indent_size: before_edit.language_indent_size_at(range.start, cx),
2457 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2458 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2459 }
2460 })
2461 .collect();
2462
2463 if !entries.is_empty() {
2464 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2465 before_edit,
2466 entries,
2467 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2468 ignore_empty_lines: false,
2469 }));
2470 }
2471 }
2472
2473 self.end_transaction(cx);
2474 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2475 Some(edit_id)
2476 }
2477
2478 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2479 self.was_changed();
2480
2481 if self.edits_since::<usize>(old_version).next().is_none() {
2482 return;
2483 }
2484
2485 self.reparse(cx);
2486 cx.emit(BufferEvent::Edited);
2487 if was_dirty != self.is_dirty() {
2488 cx.emit(BufferEvent::DirtyChanged);
2489 }
2490 cx.notify();
2491 }
2492
2493 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2494 where
2495 I: IntoIterator<Item = Range<T>>,
2496 T: ToOffset + Copy,
2497 {
2498 let before_edit = self.snapshot();
2499 let entries = ranges
2500 .into_iter()
2501 .map(|range| AutoindentRequestEntry {
2502 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2503 first_line_is_new: true,
2504 indent_size: before_edit.language_indent_size_at(range.start, cx),
2505 original_indent_column: None,
2506 })
2507 .collect();
2508 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2509 before_edit,
2510 entries,
2511 is_block_mode: false,
2512 ignore_empty_lines: true,
2513 }));
2514 self.request_autoindent(cx);
2515 }
2516
2517 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2518 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2519 pub fn insert_empty_line(
2520 &mut self,
2521 position: impl ToPoint,
2522 space_above: bool,
2523 space_below: bool,
2524 cx: &mut Context<Self>,
2525 ) -> Point {
2526 let mut position = position.to_point(self);
2527
2528 self.start_transaction();
2529
2530 self.edit(
2531 [(position..position, "\n")],
2532 Some(AutoindentMode::EachLine),
2533 cx,
2534 );
2535
2536 if position.column > 0 {
2537 position += Point::new(1, 0);
2538 }
2539
2540 if !self.is_line_blank(position.row) {
2541 self.edit(
2542 [(position..position, "\n")],
2543 Some(AutoindentMode::EachLine),
2544 cx,
2545 );
2546 }
2547
2548 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2549 self.edit(
2550 [(position..position, "\n")],
2551 Some(AutoindentMode::EachLine),
2552 cx,
2553 );
2554 position.row += 1;
2555 }
2556
2557 if space_below
2558 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2559 {
2560 self.edit(
2561 [(position..position, "\n")],
2562 Some(AutoindentMode::EachLine),
2563 cx,
2564 );
2565 }
2566
2567 self.end_transaction(cx);
2568
2569 position
2570 }
2571
2572 /// Applies the given remote operations to the buffer.
2573 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2574 self.pending_autoindent.take();
2575 let was_dirty = self.is_dirty();
2576 let old_version = self.version.clone();
2577 let mut deferred_ops = Vec::new();
2578 let buffer_ops = ops
2579 .into_iter()
2580 .filter_map(|op| match op {
2581 Operation::Buffer(op) => Some(op),
2582 _ => {
2583 if self.can_apply_op(&op) {
2584 self.apply_op(op, cx);
2585 } else {
2586 deferred_ops.push(op);
2587 }
2588 None
2589 }
2590 })
2591 .collect::<Vec<_>>();
2592 for operation in buffer_ops.iter() {
2593 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2594 }
2595 self.text.apply_ops(buffer_ops);
2596 self.deferred_ops.insert(deferred_ops);
2597 self.flush_deferred_ops(cx);
2598 self.did_edit(&old_version, was_dirty, cx);
2599 // Notify independently of whether the buffer was edited as the operations could include a
2600 // selection update.
2601 cx.notify();
2602 }
2603
2604 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2605 let mut deferred_ops = Vec::new();
2606 for op in self.deferred_ops.drain().iter().cloned() {
2607 if self.can_apply_op(&op) {
2608 self.apply_op(op, cx);
2609 } else {
2610 deferred_ops.push(op);
2611 }
2612 }
2613 self.deferred_ops.insert(deferred_ops);
2614 }
2615
2616 pub fn has_deferred_ops(&self) -> bool {
2617 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2618 }
2619
2620 fn can_apply_op(&self, operation: &Operation) -> bool {
2621 match operation {
2622 Operation::Buffer(_) => {
2623 unreachable!("buffer operations should never be applied at this layer")
2624 }
2625 Operation::UpdateDiagnostics {
2626 diagnostics: diagnostic_set,
2627 ..
2628 } => diagnostic_set.iter().all(|diagnostic| {
2629 self.text.can_resolve(&diagnostic.range.start)
2630 && self.text.can_resolve(&diagnostic.range.end)
2631 }),
2632 Operation::UpdateSelections { selections, .. } => selections
2633 .iter()
2634 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2635 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2636 }
2637 }
2638
2639 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2640 match operation {
2641 Operation::Buffer(_) => {
2642 unreachable!("buffer operations should never be applied at this layer")
2643 }
2644 Operation::UpdateDiagnostics {
2645 server_id,
2646 diagnostics: diagnostic_set,
2647 lamport_timestamp,
2648 } => {
2649 let snapshot = self.snapshot();
2650 self.apply_diagnostic_update(
2651 server_id,
2652 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2653 lamport_timestamp,
2654 cx,
2655 );
2656 }
2657 Operation::UpdateSelections {
2658 selections,
2659 lamport_timestamp,
2660 line_mode,
2661 cursor_shape,
2662 } => {
2663 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2664 && set.lamport_timestamp > lamport_timestamp
2665 {
2666 return;
2667 }
2668
2669 self.remote_selections.insert(
2670 lamport_timestamp.replica_id,
2671 SelectionSet {
2672 selections,
2673 lamport_timestamp,
2674 line_mode,
2675 cursor_shape,
2676 },
2677 );
2678 self.text.lamport_clock.observe(lamport_timestamp);
2679 self.non_text_state_update_count += 1;
2680 }
2681 Operation::UpdateCompletionTriggers {
2682 triggers,
2683 lamport_timestamp,
2684 server_id,
2685 } => {
2686 if triggers.is_empty() {
2687 self.completion_triggers_per_language_server
2688 .remove(&server_id);
2689 self.completion_triggers = self
2690 .completion_triggers_per_language_server
2691 .values()
2692 .flat_map(|triggers| triggers.iter().cloned())
2693 .collect();
2694 } else {
2695 self.completion_triggers_per_language_server
2696 .insert(server_id, triggers.iter().cloned().collect());
2697 self.completion_triggers.extend(triggers);
2698 }
2699 self.text.lamport_clock.observe(lamport_timestamp);
2700 }
2701 Operation::UpdateLineEnding {
2702 line_ending,
2703 lamport_timestamp,
2704 } => {
2705 self.text.set_line_ending(line_ending);
2706 self.text.lamport_clock.observe(lamport_timestamp);
2707 }
2708 }
2709 }
2710
2711 fn apply_diagnostic_update(
2712 &mut self,
2713 server_id: LanguageServerId,
2714 diagnostics: DiagnosticSet,
2715 lamport_timestamp: clock::Lamport,
2716 cx: &mut Context<Self>,
2717 ) {
2718 if lamport_timestamp > self.diagnostics_timestamp {
2719 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2720 if diagnostics.is_empty() {
2721 if let Ok(ix) = ix {
2722 self.diagnostics.remove(ix);
2723 }
2724 } else {
2725 match ix {
2726 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2727 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2728 };
2729 }
2730 self.diagnostics_timestamp = lamport_timestamp;
2731 self.non_text_state_update_count += 1;
2732 self.text.lamport_clock.observe(lamport_timestamp);
2733 cx.notify();
2734 cx.emit(BufferEvent::DiagnosticsUpdated);
2735 }
2736 }
2737
2738 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2739 self.was_changed();
2740 cx.emit(BufferEvent::Operation {
2741 operation,
2742 is_local,
2743 });
2744 }
2745
2746 /// Removes the selections for a given peer.
2747 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2748 self.remote_selections.remove(&replica_id);
2749 cx.notify();
2750 }
2751
2752 /// Undoes the most recent transaction.
2753 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2754 let was_dirty = self.is_dirty();
2755 let old_version = self.version.clone();
2756
2757 if let Some((transaction_id, operation)) = self.text.undo() {
2758 self.send_operation(Operation::Buffer(operation), true, cx);
2759 self.did_edit(&old_version, was_dirty, cx);
2760 Some(transaction_id)
2761 } else {
2762 None
2763 }
2764 }
2765
2766 /// Manually undoes a specific transaction in the buffer's undo history.
2767 pub fn undo_transaction(
2768 &mut self,
2769 transaction_id: TransactionId,
2770 cx: &mut Context<Self>,
2771 ) -> bool {
2772 let was_dirty = self.is_dirty();
2773 let old_version = self.version.clone();
2774 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2775 self.send_operation(Operation::Buffer(operation), true, cx);
2776 self.did_edit(&old_version, was_dirty, cx);
2777 true
2778 } else {
2779 false
2780 }
2781 }
2782
2783 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2784 pub fn undo_to_transaction(
2785 &mut self,
2786 transaction_id: TransactionId,
2787 cx: &mut Context<Self>,
2788 ) -> bool {
2789 let was_dirty = self.is_dirty();
2790 let old_version = self.version.clone();
2791
2792 let operations = self.text.undo_to_transaction(transaction_id);
2793 let undone = !operations.is_empty();
2794 for operation in operations {
2795 self.send_operation(Operation::Buffer(operation), true, cx);
2796 }
2797 if undone {
2798 self.did_edit(&old_version, was_dirty, cx)
2799 }
2800 undone
2801 }
2802
2803 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2804 let was_dirty = self.is_dirty();
2805 let operation = self.text.undo_operations(counts);
2806 let old_version = self.version.clone();
2807 self.send_operation(Operation::Buffer(operation), true, cx);
2808 self.did_edit(&old_version, was_dirty, cx);
2809 }
2810
2811 /// Manually redoes a specific transaction in the buffer's redo history.
2812 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2813 let was_dirty = self.is_dirty();
2814 let old_version = self.version.clone();
2815
2816 if let Some((transaction_id, operation)) = self.text.redo() {
2817 self.send_operation(Operation::Buffer(operation), true, cx);
2818 self.did_edit(&old_version, was_dirty, cx);
2819 Some(transaction_id)
2820 } else {
2821 None
2822 }
2823 }
2824
2825 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2826 pub fn redo_to_transaction(
2827 &mut self,
2828 transaction_id: TransactionId,
2829 cx: &mut Context<Self>,
2830 ) -> bool {
2831 let was_dirty = self.is_dirty();
2832 let old_version = self.version.clone();
2833
2834 let operations = self.text.redo_to_transaction(transaction_id);
2835 let redone = !operations.is_empty();
2836 for operation in operations {
2837 self.send_operation(Operation::Buffer(operation), true, cx);
2838 }
2839 if redone {
2840 self.did_edit(&old_version, was_dirty, cx)
2841 }
2842 redone
2843 }
2844
2845 /// Override current completion triggers with the user-provided completion triggers.
2846 pub fn set_completion_triggers(
2847 &mut self,
2848 server_id: LanguageServerId,
2849 triggers: BTreeSet<String>,
2850 cx: &mut Context<Self>,
2851 ) {
2852 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2853 if triggers.is_empty() {
2854 self.completion_triggers_per_language_server
2855 .remove(&server_id);
2856 self.completion_triggers = self
2857 .completion_triggers_per_language_server
2858 .values()
2859 .flat_map(|triggers| triggers.iter().cloned())
2860 .collect();
2861 } else {
2862 self.completion_triggers_per_language_server
2863 .insert(server_id, triggers.clone());
2864 self.completion_triggers.extend(triggers.iter().cloned());
2865 }
2866 self.send_operation(
2867 Operation::UpdateCompletionTriggers {
2868 triggers: triggers.into_iter().collect(),
2869 lamport_timestamp: self.completion_triggers_timestamp,
2870 server_id,
2871 },
2872 true,
2873 cx,
2874 );
2875 cx.notify();
2876 }
2877
2878 /// Returns a list of strings which trigger a completion menu for this language.
2879 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2880 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2881 &self.completion_triggers
2882 }
2883
2884 /// Call this directly after performing edits to prevent the preview tab
2885 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2886 /// to return false until there are additional edits.
2887 pub fn refresh_preview(&mut self) {
2888 self.preview_version = self.version.clone();
2889 }
2890
2891 /// Whether we should preserve the preview status of a tab containing this buffer.
2892 pub fn preserve_preview(&self) -> bool {
2893 !self.has_edits_since(&self.preview_version)
2894 }
2895}
2896
2897#[doc(hidden)]
2898#[cfg(any(test, feature = "test-support"))]
2899impl Buffer {
2900 pub fn edit_via_marked_text(
2901 &mut self,
2902 marked_string: &str,
2903 autoindent_mode: Option<AutoindentMode>,
2904 cx: &mut Context<Self>,
2905 ) {
2906 let edits = self.edits_for_marked_text(marked_string);
2907 self.edit(edits, autoindent_mode, cx);
2908 }
2909
2910 pub fn set_group_interval(&mut self, group_interval: Duration) {
2911 self.text.set_group_interval(group_interval);
2912 }
2913
2914 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2915 where
2916 T: rand::Rng,
2917 {
2918 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2919 let mut last_end = None;
2920 for _ in 0..old_range_count {
2921 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2922 break;
2923 }
2924
2925 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2926 let mut range = self.random_byte_range(new_start, rng);
2927 if rng.random_bool(0.2) {
2928 mem::swap(&mut range.start, &mut range.end);
2929 }
2930 last_end = Some(range.end);
2931
2932 let new_text_len = rng.random_range(0..10);
2933 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2934 new_text = new_text.to_uppercase();
2935
2936 edits.push((range, new_text));
2937 }
2938 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2939 self.edit(edits, None, cx);
2940 }
2941
2942 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2943 let was_dirty = self.is_dirty();
2944 let old_version = self.version.clone();
2945
2946 let ops = self.text.randomly_undo_redo(rng);
2947 if !ops.is_empty() {
2948 for op in ops {
2949 self.send_operation(Operation::Buffer(op), true, cx);
2950 self.did_edit(&old_version, was_dirty, cx);
2951 }
2952 }
2953 }
2954}
2955
2956impl EventEmitter<BufferEvent> for Buffer {}
2957
2958impl Deref for Buffer {
2959 type Target = TextBuffer;
2960
2961 fn deref(&self) -> &Self::Target {
2962 &self.text
2963 }
2964}
2965
2966impl BufferSnapshot {
2967 /// Returns [`IndentSize`] for a given line that respects user settings and
2968 /// language preferences.
2969 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2970 indent_size_for_line(self, row)
2971 }
2972
2973 /// Returns [`IndentSize`] for a given position that respects user settings
2974 /// and language preferences.
2975 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2976 let settings = language_settings(
2977 self.language_at(position).map(|l| l.name()),
2978 self.file(),
2979 cx,
2980 );
2981 if settings.hard_tabs {
2982 IndentSize::tab()
2983 } else {
2984 IndentSize::spaces(settings.tab_size.get())
2985 }
2986 }
2987
2988 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2989 /// is passed in as `single_indent_size`.
2990 pub fn suggested_indents(
2991 &self,
2992 rows: impl Iterator<Item = u32>,
2993 single_indent_size: IndentSize,
2994 ) -> BTreeMap<u32, IndentSize> {
2995 let mut result = BTreeMap::new();
2996
2997 for row_range in contiguous_ranges(rows, 10) {
2998 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2999 Some(suggestions) => suggestions,
3000 _ => break,
3001 };
3002
3003 for (row, suggestion) in row_range.zip(suggestions) {
3004 let indent_size = if let Some(suggestion) = suggestion {
3005 result
3006 .get(&suggestion.basis_row)
3007 .copied()
3008 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3009 .with_delta(suggestion.delta, single_indent_size)
3010 } else {
3011 self.indent_size_for_line(row)
3012 };
3013
3014 result.insert(row, indent_size);
3015 }
3016 }
3017
3018 result
3019 }
3020
3021 fn suggest_autoindents(
3022 &self,
3023 row_range: Range<u32>,
3024 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3025 let config = &self.language.as_ref()?.config;
3026 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3027
3028 #[derive(Debug, Clone)]
3029 struct StartPosition {
3030 start: Point,
3031 suffix: SharedString,
3032 }
3033
3034 // Find the suggested indentation ranges based on the syntax tree.
3035 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3036 let end = Point::new(row_range.end, 0);
3037 let range = (start..end).to_offset(&self.text);
3038 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3039 Some(&grammar.indents_config.as_ref()?.query)
3040 });
3041 let indent_configs = matches
3042 .grammars()
3043 .iter()
3044 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3045 .collect::<Vec<_>>();
3046
3047 let mut indent_ranges = Vec::<Range<Point>>::new();
3048 let mut start_positions = Vec::<StartPosition>::new();
3049 let mut outdent_positions = Vec::<Point>::new();
3050 while let Some(mat) = matches.peek() {
3051 let mut start: Option<Point> = None;
3052 let mut end: Option<Point> = None;
3053
3054 let config = indent_configs[mat.grammar_index];
3055 for capture in mat.captures {
3056 if capture.index == config.indent_capture_ix {
3057 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3058 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3059 } else if Some(capture.index) == config.start_capture_ix {
3060 start = Some(Point::from_ts_point(capture.node.end_position()));
3061 } else if Some(capture.index) == config.end_capture_ix {
3062 end = Some(Point::from_ts_point(capture.node.start_position()));
3063 } else if Some(capture.index) == config.outdent_capture_ix {
3064 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3065 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3066 start_positions.push(StartPosition {
3067 start: Point::from_ts_point(capture.node.start_position()),
3068 suffix: suffix.clone(),
3069 });
3070 }
3071 }
3072
3073 matches.advance();
3074 if let Some((start, end)) = start.zip(end) {
3075 if start.row == end.row {
3076 continue;
3077 }
3078 let range = start..end;
3079 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3080 Err(ix) => indent_ranges.insert(ix, range),
3081 Ok(ix) => {
3082 let prev_range = &mut indent_ranges[ix];
3083 prev_range.end = prev_range.end.max(range.end);
3084 }
3085 }
3086 }
3087 }
3088
3089 let mut error_ranges = Vec::<Range<Point>>::new();
3090 let mut matches = self
3091 .syntax
3092 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3093 while let Some(mat) = matches.peek() {
3094 let node = mat.captures[0].node;
3095 let start = Point::from_ts_point(node.start_position());
3096 let end = Point::from_ts_point(node.end_position());
3097 let range = start..end;
3098 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3099 Ok(ix) | Err(ix) => ix,
3100 };
3101 let mut end_ix = ix;
3102 while let Some(existing_range) = error_ranges.get(end_ix) {
3103 if existing_range.end < end {
3104 end_ix += 1;
3105 } else {
3106 break;
3107 }
3108 }
3109 error_ranges.splice(ix..end_ix, [range]);
3110 matches.advance();
3111 }
3112
3113 outdent_positions.sort();
3114 for outdent_position in outdent_positions {
3115 // find the innermost indent range containing this outdent_position
3116 // set its end to the outdent position
3117 if let Some(range_to_truncate) = indent_ranges
3118 .iter_mut()
3119 .filter(|indent_range| indent_range.contains(&outdent_position))
3120 .next_back()
3121 {
3122 range_to_truncate.end = outdent_position;
3123 }
3124 }
3125
3126 start_positions.sort_by_key(|b| b.start);
3127
3128 // Find the suggested indentation increases and decreased based on regexes.
3129 let mut regex_outdent_map = HashMap::default();
3130 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3131 let mut start_positions_iter = start_positions.iter().peekable();
3132
3133 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3134 self.for_each_line(
3135 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3136 ..Point::new(row_range.end, 0),
3137 |row, line| {
3138 if config
3139 .decrease_indent_pattern
3140 .as_ref()
3141 .is_some_and(|regex| regex.is_match(line))
3142 {
3143 indent_change_rows.push((row, Ordering::Less));
3144 }
3145 if config
3146 .increase_indent_pattern
3147 .as_ref()
3148 .is_some_and(|regex| regex.is_match(line))
3149 {
3150 indent_change_rows.push((row + 1, Ordering::Greater));
3151 }
3152 while let Some(pos) = start_positions_iter.peek() {
3153 if pos.start.row < row {
3154 let pos = start_positions_iter.next().unwrap();
3155 last_seen_suffix
3156 .entry(pos.suffix.to_string())
3157 .or_default()
3158 .push(pos.start);
3159 } else {
3160 break;
3161 }
3162 }
3163 for rule in &config.decrease_indent_patterns {
3164 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3165 let row_start_column = self.indent_size_for_line(row).len;
3166 let basis_row = rule
3167 .valid_after
3168 .iter()
3169 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3170 .flatten()
3171 .filter(|start_point| start_point.column <= row_start_column)
3172 .max_by_key(|start_point| start_point.row);
3173 if let Some(outdent_to_row) = basis_row {
3174 regex_outdent_map.insert(row, outdent_to_row.row);
3175 }
3176 break;
3177 }
3178 }
3179 },
3180 );
3181
3182 let mut indent_changes = indent_change_rows.into_iter().peekable();
3183 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3184 prev_non_blank_row.unwrap_or(0)
3185 } else {
3186 row_range.start.saturating_sub(1)
3187 };
3188
3189 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3190 Some(row_range.map(move |row| {
3191 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3192
3193 let mut indent_from_prev_row = false;
3194 let mut outdent_from_prev_row = false;
3195 let mut outdent_to_row = u32::MAX;
3196 let mut from_regex = false;
3197
3198 while let Some((indent_row, delta)) = indent_changes.peek() {
3199 match indent_row.cmp(&row) {
3200 Ordering::Equal => match delta {
3201 Ordering::Less => {
3202 from_regex = true;
3203 outdent_from_prev_row = true
3204 }
3205 Ordering::Greater => {
3206 indent_from_prev_row = true;
3207 from_regex = true
3208 }
3209 _ => {}
3210 },
3211
3212 Ordering::Greater => break,
3213 Ordering::Less => {}
3214 }
3215
3216 indent_changes.next();
3217 }
3218
3219 for range in &indent_ranges {
3220 if range.start.row >= row {
3221 break;
3222 }
3223 if range.start.row == prev_row && range.end > row_start {
3224 indent_from_prev_row = true;
3225 }
3226 if range.end > prev_row_start && range.end <= row_start {
3227 outdent_to_row = outdent_to_row.min(range.start.row);
3228 }
3229 }
3230
3231 if let Some(basis_row) = regex_outdent_map.get(&row) {
3232 indent_from_prev_row = false;
3233 outdent_to_row = *basis_row;
3234 from_regex = true;
3235 }
3236
3237 let within_error = error_ranges
3238 .iter()
3239 .any(|e| e.start.row < row && e.end > row_start);
3240
3241 let suggestion = if outdent_to_row == prev_row
3242 || (outdent_from_prev_row && indent_from_prev_row)
3243 {
3244 Some(IndentSuggestion {
3245 basis_row: prev_row,
3246 delta: Ordering::Equal,
3247 within_error: within_error && !from_regex,
3248 })
3249 } else if indent_from_prev_row {
3250 Some(IndentSuggestion {
3251 basis_row: prev_row,
3252 delta: Ordering::Greater,
3253 within_error: within_error && !from_regex,
3254 })
3255 } else if outdent_to_row < prev_row {
3256 Some(IndentSuggestion {
3257 basis_row: outdent_to_row,
3258 delta: Ordering::Equal,
3259 within_error: within_error && !from_regex,
3260 })
3261 } else if outdent_from_prev_row {
3262 Some(IndentSuggestion {
3263 basis_row: prev_row,
3264 delta: Ordering::Less,
3265 within_error: within_error && !from_regex,
3266 })
3267 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3268 {
3269 Some(IndentSuggestion {
3270 basis_row: prev_row,
3271 delta: Ordering::Equal,
3272 within_error: within_error && !from_regex,
3273 })
3274 } else {
3275 None
3276 };
3277
3278 prev_row = row;
3279 prev_row_start = row_start;
3280 suggestion
3281 }))
3282 }
3283
3284 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3285 while row > 0 {
3286 row -= 1;
3287 if !self.is_line_blank(row) {
3288 return Some(row);
3289 }
3290 }
3291 None
3292 }
3293
3294 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3295 let captures = self.syntax.captures(range, &self.text, |grammar| {
3296 grammar
3297 .highlights_config
3298 .as_ref()
3299 .map(|config| &config.query)
3300 });
3301 let highlight_maps = captures
3302 .grammars()
3303 .iter()
3304 .map(|grammar| grammar.highlight_map())
3305 .collect();
3306 (captures, highlight_maps)
3307 }
3308
3309 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3310 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3311 /// returned in chunks where each chunk has a single syntax highlighting style and
3312 /// diagnostic status.
3313 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3314 let range = range.start.to_offset(self)..range.end.to_offset(self);
3315
3316 let mut syntax = None;
3317 if language_aware {
3318 syntax = Some(self.get_highlights(range.clone()));
3319 }
3320 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3321 let diagnostics = language_aware;
3322 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3323 }
3324
3325 pub fn highlighted_text_for_range<T: ToOffset>(
3326 &self,
3327 range: Range<T>,
3328 override_style: Option<HighlightStyle>,
3329 syntax_theme: &SyntaxTheme,
3330 ) -> HighlightedText {
3331 HighlightedText::from_buffer_range(
3332 range,
3333 &self.text,
3334 &self.syntax,
3335 override_style,
3336 syntax_theme,
3337 )
3338 }
3339
3340 /// Invokes the given callback for each line of text in the given range of the buffer.
3341 /// Uses callback to avoid allocating a string for each line.
3342 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3343 let mut line = String::new();
3344 let mut row = range.start.row;
3345 for chunk in self
3346 .as_rope()
3347 .chunks_in_range(range.to_offset(self))
3348 .chain(["\n"])
3349 {
3350 for (newline_ix, text) in chunk.split('\n').enumerate() {
3351 if newline_ix > 0 {
3352 callback(row, &line);
3353 row += 1;
3354 line.clear();
3355 }
3356 line.push_str(text);
3357 }
3358 }
3359 }
3360
3361 /// Iterates over every [`SyntaxLayer`] in the buffer.
3362 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3363 self.syntax_layers_for_range(0..self.len(), true)
3364 }
3365
3366 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3367 let offset = position.to_offset(self);
3368 self.syntax_layers_for_range(offset..offset, false)
3369 .filter(|l| {
3370 if let Some(ranges) = l.included_sub_ranges {
3371 ranges.iter().any(|range| {
3372 let start = range.start.to_offset(self);
3373 start <= offset && {
3374 let end = range.end.to_offset(self);
3375 offset < end
3376 }
3377 })
3378 } else {
3379 l.node().start_byte() <= offset && l.node().end_byte() > offset
3380 }
3381 })
3382 .last()
3383 }
3384
3385 pub fn syntax_layers_for_range<D: ToOffset>(
3386 &self,
3387 range: Range<D>,
3388 include_hidden: bool,
3389 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3390 self.syntax
3391 .layers_for_range(range, &self.text, include_hidden)
3392 }
3393
3394 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3395 &self,
3396 range: Range<D>,
3397 ) -> Option<SyntaxLayer<'_>> {
3398 let range = range.to_offset(self);
3399 self.syntax
3400 .layers_for_range(range, &self.text, false)
3401 .max_by(|a, b| {
3402 if a.depth != b.depth {
3403 a.depth.cmp(&b.depth)
3404 } else if a.offset.0 != b.offset.0 {
3405 a.offset.0.cmp(&b.offset.0)
3406 } else {
3407 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3408 }
3409 })
3410 }
3411
3412 /// Returns the main [`Language`].
3413 pub fn language(&self) -> Option<&Arc<Language>> {
3414 self.language.as_ref()
3415 }
3416
3417 /// Returns the [`Language`] at the given location.
3418 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3419 self.syntax_layer_at(position)
3420 .map(|info| info.language)
3421 .or(self.language.as_ref())
3422 }
3423
3424 /// Returns the settings for the language at the given location.
3425 pub fn settings_at<'a, D: ToOffset>(
3426 &'a self,
3427 position: D,
3428 cx: &'a App,
3429 ) -> Cow<'a, LanguageSettings> {
3430 language_settings(
3431 self.language_at(position).map(|l| l.name()),
3432 self.file.as_ref(),
3433 cx,
3434 )
3435 }
3436
3437 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3438 CharClassifier::new(self.language_scope_at(point))
3439 }
3440
3441 /// Returns the [`LanguageScope`] at the given location.
3442 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3443 let offset = position.to_offset(self);
3444 let mut scope = None;
3445 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3446
3447 // Use the layer that has the smallest node intersecting the given point.
3448 for layer in self
3449 .syntax
3450 .layers_for_range(offset..offset, &self.text, false)
3451 {
3452 let mut cursor = layer.node().walk();
3453
3454 let mut range = None;
3455 loop {
3456 let child_range = cursor.node().byte_range();
3457 if !child_range.contains(&offset) {
3458 break;
3459 }
3460
3461 range = Some(child_range);
3462 if cursor.goto_first_child_for_byte(offset).is_none() {
3463 break;
3464 }
3465 }
3466
3467 if let Some(range) = range
3468 && smallest_range_and_depth.as_ref().is_none_or(
3469 |(smallest_range, smallest_range_depth)| {
3470 if layer.depth > *smallest_range_depth {
3471 true
3472 } else if layer.depth == *smallest_range_depth {
3473 range.len() < smallest_range.len()
3474 } else {
3475 false
3476 }
3477 },
3478 )
3479 {
3480 smallest_range_and_depth = Some((range, layer.depth));
3481 scope = Some(LanguageScope {
3482 language: layer.language.clone(),
3483 override_id: layer.override_id(offset, &self.text),
3484 });
3485 }
3486 }
3487
3488 scope.or_else(|| {
3489 self.language.clone().map(|language| LanguageScope {
3490 language,
3491 override_id: None,
3492 })
3493 })
3494 }
3495
3496 /// Returns a tuple of the range and character kind of the word
3497 /// surrounding the given position.
3498 pub fn surrounding_word<T: ToOffset>(
3499 &self,
3500 start: T,
3501 scope_context: Option<CharScopeContext>,
3502 ) -> (Range<usize>, Option<CharKind>) {
3503 let mut start = start.to_offset(self);
3504 let mut end = start;
3505 let mut next_chars = self.chars_at(start).take(128).peekable();
3506 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3507
3508 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3509 let word_kind = cmp::max(
3510 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3511 next_chars.peek().copied().map(|c| classifier.kind(c)),
3512 );
3513
3514 for ch in prev_chars {
3515 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3516 start -= ch.len_utf8();
3517 } else {
3518 break;
3519 }
3520 }
3521
3522 for ch in next_chars {
3523 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3524 end += ch.len_utf8();
3525 } else {
3526 break;
3527 }
3528 }
3529
3530 (start..end, word_kind)
3531 }
3532
3533 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3534 /// range. When `require_larger` is true, the node found must be larger than the query range.
3535 ///
3536 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3537 /// be moved to the root of the tree.
3538 fn goto_node_enclosing_range(
3539 cursor: &mut tree_sitter::TreeCursor,
3540 query_range: &Range<usize>,
3541 require_larger: bool,
3542 ) -> bool {
3543 let mut ascending = false;
3544 loop {
3545 let mut range = cursor.node().byte_range();
3546 if query_range.is_empty() {
3547 // When the query range is empty and the current node starts after it, move to the
3548 // previous sibling to find the node the containing node.
3549 if range.start > query_range.start {
3550 cursor.goto_previous_sibling();
3551 range = cursor.node().byte_range();
3552 }
3553 } else {
3554 // When the query range is non-empty and the current node ends exactly at the start,
3555 // move to the next sibling to find a node that extends beyond the start.
3556 if range.end == query_range.start {
3557 cursor.goto_next_sibling();
3558 range = cursor.node().byte_range();
3559 }
3560 }
3561
3562 let encloses = range.contains_inclusive(query_range)
3563 && (!require_larger || range.len() > query_range.len());
3564 if !encloses {
3565 ascending = true;
3566 if !cursor.goto_parent() {
3567 return false;
3568 }
3569 continue;
3570 } else if ascending {
3571 return true;
3572 }
3573
3574 // Descend into the current node.
3575 if cursor
3576 .goto_first_child_for_byte(query_range.start)
3577 .is_none()
3578 {
3579 return true;
3580 }
3581 }
3582 }
3583
3584 pub fn syntax_ancestor<'a, T: ToOffset>(
3585 &'a self,
3586 range: Range<T>,
3587 ) -> Option<tree_sitter::Node<'a>> {
3588 let range = range.start.to_offset(self)..range.end.to_offset(self);
3589 let mut result: Option<tree_sitter::Node<'a>> = None;
3590 for layer in self
3591 .syntax
3592 .layers_for_range(range.clone(), &self.text, true)
3593 {
3594 let mut cursor = layer.node().walk();
3595
3596 // Find the node that both contains the range and is larger than it.
3597 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3598 continue;
3599 }
3600
3601 let left_node = cursor.node();
3602 let mut layer_result = left_node;
3603
3604 // For an empty range, try to find another node immediately to the right of the range.
3605 if left_node.end_byte() == range.start {
3606 let mut right_node = None;
3607 while !cursor.goto_next_sibling() {
3608 if !cursor.goto_parent() {
3609 break;
3610 }
3611 }
3612
3613 while cursor.node().start_byte() == range.start {
3614 right_node = Some(cursor.node());
3615 if !cursor.goto_first_child() {
3616 break;
3617 }
3618 }
3619
3620 // If there is a candidate node on both sides of the (empty) range, then
3621 // decide between the two by favoring a named node over an anonymous token.
3622 // If both nodes are the same in that regard, favor the right one.
3623 if let Some(right_node) = right_node
3624 && (right_node.is_named() || !left_node.is_named())
3625 {
3626 layer_result = right_node;
3627 }
3628 }
3629
3630 if let Some(previous_result) = &result
3631 && previous_result.byte_range().len() < layer_result.byte_range().len()
3632 {
3633 continue;
3634 }
3635 result = Some(layer_result);
3636 }
3637
3638 result
3639 }
3640
3641 /// Find the previous sibling syntax node at the given range.
3642 ///
3643 /// This function locates the syntax node that precedes the node containing
3644 /// the given range. It searches hierarchically by:
3645 /// 1. Finding the node that contains the given range
3646 /// 2. Looking for the previous sibling at the same tree level
3647 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3648 ///
3649 /// Returns `None` if there is no previous sibling at any ancestor level.
3650 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3651 &'a self,
3652 range: Range<T>,
3653 ) -> Option<tree_sitter::Node<'a>> {
3654 let range = range.start.to_offset(self)..range.end.to_offset(self);
3655 let mut result: Option<tree_sitter::Node<'a>> = None;
3656
3657 for layer in self
3658 .syntax
3659 .layers_for_range(range.clone(), &self.text, true)
3660 {
3661 let mut cursor = layer.node().walk();
3662
3663 // Find the node that contains the range
3664 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3665 continue;
3666 }
3667
3668 // Look for the previous sibling, moving up ancestor levels if needed
3669 loop {
3670 if cursor.goto_previous_sibling() {
3671 let layer_result = cursor.node();
3672
3673 if let Some(previous_result) = &result {
3674 if previous_result.byte_range().end < layer_result.byte_range().end {
3675 continue;
3676 }
3677 }
3678 result = Some(layer_result);
3679 break;
3680 }
3681
3682 // No sibling found at this level, try moving up to parent
3683 if !cursor.goto_parent() {
3684 break;
3685 }
3686 }
3687 }
3688
3689 result
3690 }
3691
3692 /// Find the next sibling syntax node at the given range.
3693 ///
3694 /// This function locates the syntax node that follows the node containing
3695 /// the given range. It searches hierarchically by:
3696 /// 1. Finding the node that contains the given range
3697 /// 2. Looking for the next sibling at the same tree level
3698 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3699 ///
3700 /// Returns `None` if there is no next sibling at any ancestor level.
3701 pub fn syntax_next_sibling<'a, T: ToOffset>(
3702 &'a self,
3703 range: Range<T>,
3704 ) -> Option<tree_sitter::Node<'a>> {
3705 let range = range.start.to_offset(self)..range.end.to_offset(self);
3706 let mut result: Option<tree_sitter::Node<'a>> = None;
3707
3708 for layer in self
3709 .syntax
3710 .layers_for_range(range.clone(), &self.text, true)
3711 {
3712 let mut cursor = layer.node().walk();
3713
3714 // Find the node that contains the range
3715 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3716 continue;
3717 }
3718
3719 // Look for the next sibling, moving up ancestor levels if needed
3720 loop {
3721 if cursor.goto_next_sibling() {
3722 let layer_result = cursor.node();
3723
3724 if let Some(previous_result) = &result {
3725 if previous_result.byte_range().start > layer_result.byte_range().start {
3726 continue;
3727 }
3728 }
3729 result = Some(layer_result);
3730 break;
3731 }
3732
3733 // No sibling found at this level, try moving up to parent
3734 if !cursor.goto_parent() {
3735 break;
3736 }
3737 }
3738 }
3739
3740 result
3741 }
3742
3743 /// Returns the root syntax node within the given row
3744 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3745 let start_offset = position.to_offset(self);
3746
3747 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3748
3749 let layer = self
3750 .syntax
3751 .layers_for_range(start_offset..start_offset, &self.text, true)
3752 .next()?;
3753
3754 let mut cursor = layer.node().walk();
3755
3756 // Descend to the first leaf that touches the start of the range.
3757 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3758 if cursor.node().end_byte() == start_offset {
3759 cursor.goto_next_sibling();
3760 }
3761 }
3762
3763 // Ascend to the root node within the same row.
3764 while cursor.goto_parent() {
3765 if cursor.node().start_position().row != row {
3766 break;
3767 }
3768 }
3769
3770 Some(cursor.node())
3771 }
3772
3773 /// Returns the outline for the buffer.
3774 ///
3775 /// This method allows passing an optional [`SyntaxTheme`] to
3776 /// syntax-highlight the returned symbols.
3777 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3778 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3779 }
3780
3781 /// Returns all the symbols that contain the given position.
3782 ///
3783 /// This method allows passing an optional [`SyntaxTheme`] to
3784 /// syntax-highlight the returned symbols.
3785 pub fn symbols_containing<T: ToOffset>(
3786 &self,
3787 position: T,
3788 theme: Option<&SyntaxTheme>,
3789 ) -> Vec<OutlineItem<Anchor>> {
3790 let position = position.to_offset(self);
3791 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3792 let end = self.clip_offset(position + 1, Bias::Right);
3793 let mut items = self.outline_items_containing(start..end, false, theme);
3794 let mut prev_depth = None;
3795 items.retain(|item| {
3796 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3797 prev_depth = Some(item.depth);
3798 result
3799 });
3800 items
3801 }
3802
3803 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3804 let range = range.to_offset(self);
3805 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3806 grammar.outline_config.as_ref().map(|c| &c.query)
3807 });
3808 let configs = matches
3809 .grammars()
3810 .iter()
3811 .map(|g| g.outline_config.as_ref().unwrap())
3812 .collect::<Vec<_>>();
3813
3814 while let Some(mat) = matches.peek() {
3815 let config = &configs[mat.grammar_index];
3816 let containing_item_node = maybe!({
3817 let item_node = mat.captures.iter().find_map(|cap| {
3818 if cap.index == config.item_capture_ix {
3819 Some(cap.node)
3820 } else {
3821 None
3822 }
3823 })?;
3824
3825 let item_byte_range = item_node.byte_range();
3826 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3827 None
3828 } else {
3829 Some(item_node)
3830 }
3831 });
3832
3833 if let Some(item_node) = containing_item_node {
3834 return Some(
3835 Point::from_ts_point(item_node.start_position())
3836 ..Point::from_ts_point(item_node.end_position()),
3837 );
3838 }
3839
3840 matches.advance();
3841 }
3842 None
3843 }
3844
3845 pub fn outline_items_containing<T: ToOffset>(
3846 &self,
3847 range: Range<T>,
3848 include_extra_context: bool,
3849 theme: Option<&SyntaxTheme>,
3850 ) -> Vec<OutlineItem<Anchor>> {
3851 self.outline_items_containing_internal(
3852 range,
3853 include_extra_context,
3854 theme,
3855 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3856 )
3857 }
3858
3859 pub fn outline_items_as_points_containing<T: ToOffset>(
3860 &self,
3861 range: Range<T>,
3862 include_extra_context: bool,
3863 theme: Option<&SyntaxTheme>,
3864 ) -> Vec<OutlineItem<Point>> {
3865 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3866 range
3867 })
3868 }
3869
3870 fn outline_items_containing_internal<T: ToOffset, U>(
3871 &self,
3872 range: Range<T>,
3873 include_extra_context: bool,
3874 theme: Option<&SyntaxTheme>,
3875 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3876 ) -> Vec<OutlineItem<U>> {
3877 let range = range.to_offset(self);
3878 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3879 grammar.outline_config.as_ref().map(|c| &c.query)
3880 });
3881
3882 let mut items = Vec::new();
3883 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3884 while let Some(mat) = matches.peek() {
3885 let config = matches.grammars()[mat.grammar_index]
3886 .outline_config
3887 .as_ref()
3888 .unwrap();
3889 if let Some(item) =
3890 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3891 {
3892 items.push(item);
3893 } else if let Some(capture) = mat
3894 .captures
3895 .iter()
3896 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3897 {
3898 let capture_range = capture.node.start_position()..capture.node.end_position();
3899 let mut capture_row_range =
3900 capture_range.start.row as u32..capture_range.end.row as u32;
3901 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3902 {
3903 capture_row_range.end -= 1;
3904 }
3905 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3906 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3907 last_row_range.end = capture_row_range.end;
3908 } else {
3909 annotation_row_ranges.push(capture_row_range);
3910 }
3911 } else {
3912 annotation_row_ranges.push(capture_row_range);
3913 }
3914 }
3915 matches.advance();
3916 }
3917
3918 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3919
3920 // Assign depths based on containment relationships and convert to anchors.
3921 let mut item_ends_stack = Vec::<Point>::new();
3922 let mut anchor_items = Vec::new();
3923 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3924 for item in items {
3925 while let Some(last_end) = item_ends_stack.last().copied() {
3926 if last_end < item.range.end {
3927 item_ends_stack.pop();
3928 } else {
3929 break;
3930 }
3931 }
3932
3933 let mut annotation_row_range = None;
3934 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3935 let row_preceding_item = item.range.start.row.saturating_sub(1);
3936 if next_annotation_row_range.end < row_preceding_item {
3937 annotation_row_ranges.next();
3938 } else {
3939 if next_annotation_row_range.end == row_preceding_item {
3940 annotation_row_range = Some(next_annotation_row_range.clone());
3941 annotation_row_ranges.next();
3942 }
3943 break;
3944 }
3945 }
3946
3947 anchor_items.push(OutlineItem {
3948 depth: item_ends_stack.len(),
3949 range: range_callback(self, item.range.clone()),
3950 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3951 text: item.text,
3952 highlight_ranges: item.highlight_ranges,
3953 name_ranges: item.name_ranges,
3954 body_range: item.body_range.map(|r| range_callback(self, r)),
3955 annotation_range: annotation_row_range.map(|annotation_range| {
3956 let point_range = Point::new(annotation_range.start, 0)
3957 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3958 range_callback(self, point_range)
3959 }),
3960 });
3961 item_ends_stack.push(item.range.end);
3962 }
3963
3964 anchor_items
3965 }
3966
3967 fn next_outline_item(
3968 &self,
3969 config: &OutlineConfig,
3970 mat: &SyntaxMapMatch,
3971 range: &Range<usize>,
3972 include_extra_context: bool,
3973 theme: Option<&SyntaxTheme>,
3974 ) -> Option<OutlineItem<Point>> {
3975 let item_node = mat.captures.iter().find_map(|cap| {
3976 if cap.index == config.item_capture_ix {
3977 Some(cap.node)
3978 } else {
3979 None
3980 }
3981 })?;
3982
3983 let item_byte_range = item_node.byte_range();
3984 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3985 return None;
3986 }
3987 let item_point_range = Point::from_ts_point(item_node.start_position())
3988 ..Point::from_ts_point(item_node.end_position());
3989
3990 let mut open_point = None;
3991 let mut close_point = None;
3992
3993 let mut buffer_ranges = Vec::new();
3994 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3995 let mut range = node.start_byte()..node.end_byte();
3996 let start = node.start_position();
3997 if node.end_position().row > start.row {
3998 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3999 }
4000
4001 if !range.is_empty() {
4002 buffer_ranges.push((range, node_is_name));
4003 }
4004 };
4005
4006 for capture in mat.captures {
4007 if capture.index == config.name_capture_ix {
4008 add_to_buffer_ranges(capture.node, true);
4009 } else if Some(capture.index) == config.context_capture_ix
4010 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4011 {
4012 add_to_buffer_ranges(capture.node, false);
4013 } else {
4014 if Some(capture.index) == config.open_capture_ix {
4015 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4016 } else if Some(capture.index) == config.close_capture_ix {
4017 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4018 }
4019 }
4020 }
4021
4022 if buffer_ranges.is_empty() {
4023 return None;
4024 }
4025 let source_range_for_text =
4026 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4027
4028 let mut text = String::new();
4029 let mut highlight_ranges = Vec::new();
4030 let mut name_ranges = Vec::new();
4031 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4032 let mut last_buffer_range_end = 0;
4033 for (buffer_range, is_name) in buffer_ranges {
4034 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4035 if space_added {
4036 text.push(' ');
4037 }
4038 let before_append_len = text.len();
4039 let mut offset = buffer_range.start;
4040 chunks.seek(buffer_range.clone());
4041 for mut chunk in chunks.by_ref() {
4042 if chunk.text.len() > buffer_range.end - offset {
4043 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4044 offset = buffer_range.end;
4045 } else {
4046 offset += chunk.text.len();
4047 }
4048 let style = chunk
4049 .syntax_highlight_id
4050 .zip(theme)
4051 .and_then(|(highlight, theme)| highlight.style(theme));
4052 if let Some(style) = style {
4053 let start = text.len();
4054 let end = start + chunk.text.len();
4055 highlight_ranges.push((start..end, style));
4056 }
4057 text.push_str(chunk.text);
4058 if offset >= buffer_range.end {
4059 break;
4060 }
4061 }
4062 if is_name {
4063 let after_append_len = text.len();
4064 let start = if space_added && !name_ranges.is_empty() {
4065 before_append_len - 1
4066 } else {
4067 before_append_len
4068 };
4069 name_ranges.push(start..after_append_len);
4070 }
4071 last_buffer_range_end = buffer_range.end;
4072 }
4073
4074 Some(OutlineItem {
4075 depth: 0, // We'll calculate the depth later
4076 range: item_point_range,
4077 source_range_for_text: source_range_for_text.to_point(self),
4078 text,
4079 highlight_ranges,
4080 name_ranges,
4081 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4082 annotation_range: None,
4083 })
4084 }
4085
4086 pub fn function_body_fold_ranges<T: ToOffset>(
4087 &self,
4088 within: Range<T>,
4089 ) -> impl Iterator<Item = Range<usize>> + '_ {
4090 self.text_object_ranges(within, TreeSitterOptions::default())
4091 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4092 }
4093
4094 /// For each grammar in the language, runs the provided
4095 /// [`tree_sitter::Query`] against the given range.
4096 pub fn matches(
4097 &self,
4098 range: Range<usize>,
4099 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4100 ) -> SyntaxMapMatches<'_> {
4101 self.syntax.matches(range, self, query)
4102 }
4103
4104 pub fn all_bracket_ranges(
4105 &self,
4106 range: Range<usize>,
4107 ) -> impl Iterator<Item = BracketMatch> + '_ {
4108 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4109 grammar.brackets_config.as_ref().map(|c| &c.query)
4110 });
4111 let configs = matches
4112 .grammars()
4113 .iter()
4114 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4115 .collect::<Vec<_>>();
4116
4117 iter::from_fn(move || {
4118 while let Some(mat) = matches.peek() {
4119 let mut open = None;
4120 let mut close = None;
4121 let config = &configs[mat.grammar_index];
4122 let pattern = &config.patterns[mat.pattern_index];
4123 for capture in mat.captures {
4124 if capture.index == config.open_capture_ix {
4125 open = Some(capture.node.byte_range());
4126 } else if capture.index == config.close_capture_ix {
4127 close = Some(capture.node.byte_range());
4128 }
4129 }
4130
4131 matches.advance();
4132
4133 let Some((open_range, close_range)) = open.zip(close) else {
4134 continue;
4135 };
4136
4137 let bracket_range = open_range.start..=close_range.end;
4138 if !bracket_range.overlaps(&range) {
4139 continue;
4140 }
4141
4142 return Some(BracketMatch {
4143 open_range,
4144 close_range,
4145 newline_only: pattern.newline_only,
4146 });
4147 }
4148 None
4149 })
4150 }
4151
4152 /// Returns bracket range pairs overlapping or adjacent to `range`
4153 pub fn bracket_ranges<T: ToOffset>(
4154 &self,
4155 range: Range<T>,
4156 ) -> impl Iterator<Item = BracketMatch> + '_ {
4157 // Find bracket pairs that *inclusively* contain the given range.
4158 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4159 self.all_bracket_ranges(range)
4160 .filter(|pair| !pair.newline_only)
4161 }
4162
4163 pub fn debug_variables_query<T: ToOffset>(
4164 &self,
4165 range: Range<T>,
4166 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4167 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4168
4169 let mut matches = self.syntax.matches_with_options(
4170 range.clone(),
4171 &self.text,
4172 TreeSitterOptions::default(),
4173 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4174 );
4175
4176 let configs = matches
4177 .grammars()
4178 .iter()
4179 .map(|grammar| grammar.debug_variables_config.as_ref())
4180 .collect::<Vec<_>>();
4181
4182 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4183
4184 iter::from_fn(move || {
4185 loop {
4186 while let Some(capture) = captures.pop() {
4187 if capture.0.overlaps(&range) {
4188 return Some(capture);
4189 }
4190 }
4191
4192 let mat = matches.peek()?;
4193
4194 let Some(config) = configs[mat.grammar_index].as_ref() else {
4195 matches.advance();
4196 continue;
4197 };
4198
4199 for capture in mat.captures {
4200 let Some(ix) = config
4201 .objects_by_capture_ix
4202 .binary_search_by_key(&capture.index, |e| e.0)
4203 .ok()
4204 else {
4205 continue;
4206 };
4207 let text_object = config.objects_by_capture_ix[ix].1;
4208 let byte_range = capture.node.byte_range();
4209
4210 let mut found = false;
4211 for (range, existing) in captures.iter_mut() {
4212 if existing == &text_object {
4213 range.start = range.start.min(byte_range.start);
4214 range.end = range.end.max(byte_range.end);
4215 found = true;
4216 break;
4217 }
4218 }
4219
4220 if !found {
4221 captures.push((byte_range, text_object));
4222 }
4223 }
4224
4225 matches.advance();
4226 }
4227 })
4228 }
4229
4230 pub fn text_object_ranges<T: ToOffset>(
4231 &self,
4232 range: Range<T>,
4233 options: TreeSitterOptions,
4234 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4235 let range =
4236 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4237
4238 let mut matches =
4239 self.syntax
4240 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4241 grammar.text_object_config.as_ref().map(|c| &c.query)
4242 });
4243
4244 let configs = matches
4245 .grammars()
4246 .iter()
4247 .map(|grammar| grammar.text_object_config.as_ref())
4248 .collect::<Vec<_>>();
4249
4250 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4251
4252 iter::from_fn(move || {
4253 loop {
4254 while let Some(capture) = captures.pop() {
4255 if capture.0.overlaps(&range) {
4256 return Some(capture);
4257 }
4258 }
4259
4260 let mat = matches.peek()?;
4261
4262 let Some(config) = configs[mat.grammar_index].as_ref() else {
4263 matches.advance();
4264 continue;
4265 };
4266
4267 for capture in mat.captures {
4268 let Some(ix) = config
4269 .text_objects_by_capture_ix
4270 .binary_search_by_key(&capture.index, |e| e.0)
4271 .ok()
4272 else {
4273 continue;
4274 };
4275 let text_object = config.text_objects_by_capture_ix[ix].1;
4276 let byte_range = capture.node.byte_range();
4277
4278 let mut found = false;
4279 for (range, existing) in captures.iter_mut() {
4280 if existing == &text_object {
4281 range.start = range.start.min(byte_range.start);
4282 range.end = range.end.max(byte_range.end);
4283 found = true;
4284 break;
4285 }
4286 }
4287
4288 if !found {
4289 captures.push((byte_range, text_object));
4290 }
4291 }
4292
4293 matches.advance();
4294 }
4295 })
4296 }
4297
4298 /// Returns enclosing bracket ranges containing the given range
4299 pub fn enclosing_bracket_ranges<T: ToOffset>(
4300 &self,
4301 range: Range<T>,
4302 ) -> impl Iterator<Item = BracketMatch> + '_ {
4303 let range = range.start.to_offset(self)..range.end.to_offset(self);
4304
4305 self.bracket_ranges(range.clone()).filter(move |pair| {
4306 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4307 })
4308 }
4309
4310 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4311 ///
4312 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4313 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4314 &self,
4315 range: Range<T>,
4316 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4317 ) -> Option<(Range<usize>, Range<usize>)> {
4318 let range = range.start.to_offset(self)..range.end.to_offset(self);
4319
4320 // Get the ranges of the innermost pair of brackets.
4321 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4322
4323 for pair in self.enclosing_bracket_ranges(range) {
4324 if let Some(range_filter) = range_filter
4325 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4326 {
4327 continue;
4328 }
4329
4330 let len = pair.close_range.end - pair.open_range.start;
4331
4332 if let Some((existing_open, existing_close)) = &result {
4333 let existing_len = existing_close.end - existing_open.start;
4334 if len > existing_len {
4335 continue;
4336 }
4337 }
4338
4339 result = Some((pair.open_range, pair.close_range));
4340 }
4341
4342 result
4343 }
4344
4345 /// Returns anchor ranges for any matches of the redaction query.
4346 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4347 /// will be run on the relevant section of the buffer.
4348 pub fn redacted_ranges<T: ToOffset>(
4349 &self,
4350 range: Range<T>,
4351 ) -> impl Iterator<Item = Range<usize>> + '_ {
4352 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4353 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4354 grammar
4355 .redactions_config
4356 .as_ref()
4357 .map(|config| &config.query)
4358 });
4359
4360 let configs = syntax_matches
4361 .grammars()
4362 .iter()
4363 .map(|grammar| grammar.redactions_config.as_ref())
4364 .collect::<Vec<_>>();
4365
4366 iter::from_fn(move || {
4367 let redacted_range = syntax_matches
4368 .peek()
4369 .and_then(|mat| {
4370 configs[mat.grammar_index].and_then(|config| {
4371 mat.captures
4372 .iter()
4373 .find(|capture| capture.index == config.redaction_capture_ix)
4374 })
4375 })
4376 .map(|mat| mat.node.byte_range());
4377 syntax_matches.advance();
4378 redacted_range
4379 })
4380 }
4381
4382 pub fn injections_intersecting_range<T: ToOffset>(
4383 &self,
4384 range: Range<T>,
4385 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4386 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4387
4388 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4389 grammar
4390 .injection_config
4391 .as_ref()
4392 .map(|config| &config.query)
4393 });
4394
4395 let configs = syntax_matches
4396 .grammars()
4397 .iter()
4398 .map(|grammar| grammar.injection_config.as_ref())
4399 .collect::<Vec<_>>();
4400
4401 iter::from_fn(move || {
4402 let ranges = syntax_matches.peek().and_then(|mat| {
4403 let config = &configs[mat.grammar_index]?;
4404 let content_capture_range = mat.captures.iter().find_map(|capture| {
4405 if capture.index == config.content_capture_ix {
4406 Some(capture.node.byte_range())
4407 } else {
4408 None
4409 }
4410 })?;
4411 let language = self.language_at(content_capture_range.start)?;
4412 Some((content_capture_range, language))
4413 });
4414 syntax_matches.advance();
4415 ranges
4416 })
4417 }
4418
4419 pub fn runnable_ranges(
4420 &self,
4421 offset_range: Range<usize>,
4422 ) -> impl Iterator<Item = RunnableRange> + '_ {
4423 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4424 grammar.runnable_config.as_ref().map(|config| &config.query)
4425 });
4426
4427 let test_configs = syntax_matches
4428 .grammars()
4429 .iter()
4430 .map(|grammar| grammar.runnable_config.as_ref())
4431 .collect::<Vec<_>>();
4432
4433 iter::from_fn(move || {
4434 loop {
4435 let mat = syntax_matches.peek()?;
4436
4437 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4438 let mut run_range = None;
4439 let full_range = mat.captures.iter().fold(
4440 Range {
4441 start: usize::MAX,
4442 end: 0,
4443 },
4444 |mut acc, next| {
4445 let byte_range = next.node.byte_range();
4446 if acc.start > byte_range.start {
4447 acc.start = byte_range.start;
4448 }
4449 if acc.end < byte_range.end {
4450 acc.end = byte_range.end;
4451 }
4452 acc
4453 },
4454 );
4455 if full_range.start > full_range.end {
4456 // We did not find a full spanning range of this match.
4457 return None;
4458 }
4459 let extra_captures: SmallVec<[_; 1]> =
4460 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4461 test_configs
4462 .extra_captures
4463 .get(capture.index as usize)
4464 .cloned()
4465 .and_then(|tag_name| match tag_name {
4466 RunnableCapture::Named(name) => {
4467 Some((capture.node.byte_range(), name))
4468 }
4469 RunnableCapture::Run => {
4470 let _ = run_range.insert(capture.node.byte_range());
4471 None
4472 }
4473 })
4474 }));
4475 let run_range = run_range?;
4476 let tags = test_configs
4477 .query
4478 .property_settings(mat.pattern_index)
4479 .iter()
4480 .filter_map(|property| {
4481 if *property.key == *"tag" {
4482 property
4483 .value
4484 .as_ref()
4485 .map(|value| RunnableTag(value.to_string().into()))
4486 } else {
4487 None
4488 }
4489 })
4490 .collect();
4491 let extra_captures = extra_captures
4492 .into_iter()
4493 .map(|(range, name)| {
4494 (
4495 name.to_string(),
4496 self.text_for_range(range).collect::<String>(),
4497 )
4498 })
4499 .collect();
4500 // All tags should have the same range.
4501 Some(RunnableRange {
4502 run_range,
4503 full_range,
4504 runnable: Runnable {
4505 tags,
4506 language: mat.language,
4507 buffer: self.remote_id(),
4508 },
4509 extra_captures,
4510 buffer_id: self.remote_id(),
4511 })
4512 });
4513
4514 syntax_matches.advance();
4515 if test_range.is_some() {
4516 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4517 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4518 return test_range;
4519 }
4520 }
4521 })
4522 }
4523
4524 /// Returns selections for remote peers intersecting the given range.
4525 #[allow(clippy::type_complexity)]
4526 pub fn selections_in_range(
4527 &self,
4528 range: Range<Anchor>,
4529 include_local: bool,
4530 ) -> impl Iterator<
4531 Item = (
4532 ReplicaId,
4533 bool,
4534 CursorShape,
4535 impl Iterator<Item = &Selection<Anchor>> + '_,
4536 ),
4537 > + '_ {
4538 self.remote_selections
4539 .iter()
4540 .filter(move |(replica_id, set)| {
4541 (include_local || **replica_id != self.text.replica_id())
4542 && !set.selections.is_empty()
4543 })
4544 .map(move |(replica_id, set)| {
4545 let start_ix = match set.selections.binary_search_by(|probe| {
4546 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4547 }) {
4548 Ok(ix) | Err(ix) => ix,
4549 };
4550 let end_ix = match set.selections.binary_search_by(|probe| {
4551 probe.start.cmp(&range.end, self).then(Ordering::Less)
4552 }) {
4553 Ok(ix) | Err(ix) => ix,
4554 };
4555
4556 (
4557 *replica_id,
4558 set.line_mode,
4559 set.cursor_shape,
4560 set.selections[start_ix..end_ix].iter(),
4561 )
4562 })
4563 }
4564
4565 /// Returns if the buffer contains any diagnostics.
4566 pub fn has_diagnostics(&self) -> bool {
4567 !self.diagnostics.is_empty()
4568 }
4569
4570 /// Returns all the diagnostics intersecting the given range.
4571 pub fn diagnostics_in_range<'a, T, O>(
4572 &'a self,
4573 search_range: Range<T>,
4574 reversed: bool,
4575 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4576 where
4577 T: 'a + Clone + ToOffset,
4578 O: 'a + FromAnchor,
4579 {
4580 let mut iterators: Vec<_> = self
4581 .diagnostics
4582 .iter()
4583 .map(|(_, collection)| {
4584 collection
4585 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4586 .peekable()
4587 })
4588 .collect();
4589
4590 std::iter::from_fn(move || {
4591 let (next_ix, _) = iterators
4592 .iter_mut()
4593 .enumerate()
4594 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4595 .min_by(|(_, a), (_, b)| {
4596 let cmp = a
4597 .range
4598 .start
4599 .cmp(&b.range.start, self)
4600 // when range is equal, sort by diagnostic severity
4601 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4602 // and stabilize order with group_id
4603 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4604 if reversed { cmp.reverse() } else { cmp }
4605 })?;
4606 iterators[next_ix]
4607 .next()
4608 .map(
4609 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4610 diagnostic,
4611 range: FromAnchor::from_anchor(&range.start, self)
4612 ..FromAnchor::from_anchor(&range.end, self),
4613 },
4614 )
4615 })
4616 }
4617
4618 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4619 /// should be used instead.
4620 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4621 &self.diagnostics
4622 }
4623
4624 /// Returns all the diagnostic groups associated with the given
4625 /// language server ID. If no language server ID is provided,
4626 /// all diagnostics groups are returned.
4627 pub fn diagnostic_groups(
4628 &self,
4629 language_server_id: Option<LanguageServerId>,
4630 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4631 let mut groups = Vec::new();
4632
4633 if let Some(language_server_id) = language_server_id {
4634 if let Ok(ix) = self
4635 .diagnostics
4636 .binary_search_by_key(&language_server_id, |e| e.0)
4637 {
4638 self.diagnostics[ix]
4639 .1
4640 .groups(language_server_id, &mut groups, self);
4641 }
4642 } else {
4643 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4644 diagnostics.groups(*language_server_id, &mut groups, self);
4645 }
4646 }
4647
4648 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4649 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4650 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4651 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4652 });
4653
4654 groups
4655 }
4656
4657 /// Returns an iterator over the diagnostics for the given group.
4658 pub fn diagnostic_group<O>(
4659 &self,
4660 group_id: usize,
4661 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4662 where
4663 O: FromAnchor + 'static,
4664 {
4665 self.diagnostics
4666 .iter()
4667 .flat_map(move |(_, set)| set.group(group_id, self))
4668 }
4669
4670 /// An integer version number that accounts for all updates besides
4671 /// the buffer's text itself (which is versioned via a version vector).
4672 pub fn non_text_state_update_count(&self) -> usize {
4673 self.non_text_state_update_count
4674 }
4675
4676 /// An integer version that changes when the buffer's syntax changes.
4677 pub fn syntax_update_count(&self) -> usize {
4678 self.syntax.update_count()
4679 }
4680
4681 /// Returns a snapshot of underlying file.
4682 pub fn file(&self) -> Option<&Arc<dyn File>> {
4683 self.file.as_ref()
4684 }
4685
4686 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4687 if let Some(file) = self.file() {
4688 if file.path().file_name().is_none() || include_root {
4689 Some(file.full_path(cx).to_string_lossy().into_owned())
4690 } else {
4691 Some(file.path().display(file.path_style(cx)).to_string())
4692 }
4693 } else {
4694 None
4695 }
4696 }
4697
4698 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4699 let query_str = query.fuzzy_contents;
4700 if query_str.is_some_and(|query| query.is_empty()) {
4701 return BTreeMap::default();
4702 }
4703
4704 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4705 language,
4706 override_id: None,
4707 }));
4708
4709 let mut query_ix = 0;
4710 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4711 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4712
4713 let mut words = BTreeMap::default();
4714 let mut current_word_start_ix = None;
4715 let mut chunk_ix = query.range.start;
4716 for chunk in self.chunks(query.range, false) {
4717 for (i, c) in chunk.text.char_indices() {
4718 let ix = chunk_ix + i;
4719 if classifier.is_word(c) {
4720 if current_word_start_ix.is_none() {
4721 current_word_start_ix = Some(ix);
4722 }
4723
4724 if let Some(query_chars) = &query_chars
4725 && query_ix < query_len
4726 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4727 {
4728 query_ix += 1;
4729 }
4730 continue;
4731 } else if let Some(word_start) = current_word_start_ix.take()
4732 && query_ix == query_len
4733 {
4734 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4735 let mut word_text = self.text_for_range(word_start..ix).peekable();
4736 let first_char = word_text
4737 .peek()
4738 .and_then(|first_chunk| first_chunk.chars().next());
4739 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4740 if !query.skip_digits
4741 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4742 {
4743 words.insert(word_text.collect(), word_range);
4744 }
4745 }
4746 query_ix = 0;
4747 }
4748 chunk_ix += chunk.text.len();
4749 }
4750
4751 words
4752 }
4753}
4754
4755pub struct WordsQuery<'a> {
4756 /// Only returns words with all chars from the fuzzy string in them.
4757 pub fuzzy_contents: Option<&'a str>,
4758 /// Skips words that start with a digit.
4759 pub skip_digits: bool,
4760 /// Buffer offset range, to look for words.
4761 pub range: Range<usize>,
4762}
4763
4764fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4765 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4766}
4767
4768fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4769 let mut result = IndentSize::spaces(0);
4770 for c in text {
4771 let kind = match c {
4772 ' ' => IndentKind::Space,
4773 '\t' => IndentKind::Tab,
4774 _ => break,
4775 };
4776 if result.len == 0 {
4777 result.kind = kind;
4778 }
4779 result.len += 1;
4780 }
4781 result
4782}
4783
4784impl Clone for BufferSnapshot {
4785 fn clone(&self) -> Self {
4786 Self {
4787 text: self.text.clone(),
4788 syntax: self.syntax.clone(),
4789 file: self.file.clone(),
4790 remote_selections: self.remote_selections.clone(),
4791 diagnostics: self.diagnostics.clone(),
4792 language: self.language.clone(),
4793 non_text_state_update_count: self.non_text_state_update_count,
4794 }
4795 }
4796}
4797
4798impl Deref for BufferSnapshot {
4799 type Target = text::BufferSnapshot;
4800
4801 fn deref(&self) -> &Self::Target {
4802 &self.text
4803 }
4804}
4805
4806unsafe impl Send for BufferChunks<'_> {}
4807
4808impl<'a> BufferChunks<'a> {
4809 pub(crate) fn new(
4810 text: &'a Rope,
4811 range: Range<usize>,
4812 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4813 diagnostics: bool,
4814 buffer_snapshot: Option<&'a BufferSnapshot>,
4815 ) -> Self {
4816 let mut highlights = None;
4817 if let Some((captures, highlight_maps)) = syntax {
4818 highlights = Some(BufferChunkHighlights {
4819 captures,
4820 next_capture: None,
4821 stack: Default::default(),
4822 highlight_maps,
4823 })
4824 }
4825
4826 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4827 let chunks = text.chunks_in_range(range.clone());
4828
4829 let mut this = BufferChunks {
4830 range,
4831 buffer_snapshot,
4832 chunks,
4833 diagnostic_endpoints,
4834 error_depth: 0,
4835 warning_depth: 0,
4836 information_depth: 0,
4837 hint_depth: 0,
4838 unnecessary_depth: 0,
4839 underline: true,
4840 highlights,
4841 };
4842 this.initialize_diagnostic_endpoints();
4843 this
4844 }
4845
4846 /// Seeks to the given byte offset in the buffer.
4847 pub fn seek(&mut self, range: Range<usize>) {
4848 let old_range = std::mem::replace(&mut self.range, range.clone());
4849 self.chunks.set_range(self.range.clone());
4850 if let Some(highlights) = self.highlights.as_mut() {
4851 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4852 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4853 highlights
4854 .stack
4855 .retain(|(end_offset, _)| *end_offset > range.start);
4856 if let Some(capture) = &highlights.next_capture
4857 && range.start >= capture.node.start_byte()
4858 {
4859 let next_capture_end = capture.node.end_byte();
4860 if range.start < next_capture_end {
4861 highlights.stack.push((
4862 next_capture_end,
4863 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4864 ));
4865 }
4866 highlights.next_capture.take();
4867 }
4868 } else if let Some(snapshot) = self.buffer_snapshot {
4869 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4870 *highlights = BufferChunkHighlights {
4871 captures,
4872 next_capture: None,
4873 stack: Default::default(),
4874 highlight_maps,
4875 };
4876 } else {
4877 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4878 // Seeking such BufferChunks is not supported.
4879 debug_assert!(
4880 false,
4881 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4882 );
4883 }
4884
4885 highlights.captures.set_byte_range(self.range.clone());
4886 self.initialize_diagnostic_endpoints();
4887 }
4888 }
4889
4890 fn initialize_diagnostic_endpoints(&mut self) {
4891 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4892 && let Some(buffer) = self.buffer_snapshot
4893 {
4894 let mut diagnostic_endpoints = Vec::new();
4895 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4896 diagnostic_endpoints.push(DiagnosticEndpoint {
4897 offset: entry.range.start,
4898 is_start: true,
4899 severity: entry.diagnostic.severity,
4900 is_unnecessary: entry.diagnostic.is_unnecessary,
4901 underline: entry.diagnostic.underline,
4902 });
4903 diagnostic_endpoints.push(DiagnosticEndpoint {
4904 offset: entry.range.end,
4905 is_start: false,
4906 severity: entry.diagnostic.severity,
4907 is_unnecessary: entry.diagnostic.is_unnecessary,
4908 underline: entry.diagnostic.underline,
4909 });
4910 }
4911 diagnostic_endpoints
4912 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4913 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4914 self.hint_depth = 0;
4915 self.error_depth = 0;
4916 self.warning_depth = 0;
4917 self.information_depth = 0;
4918 }
4919 }
4920
4921 /// The current byte offset in the buffer.
4922 pub fn offset(&self) -> usize {
4923 self.range.start
4924 }
4925
4926 pub fn range(&self) -> Range<usize> {
4927 self.range.clone()
4928 }
4929
4930 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4931 let depth = match endpoint.severity {
4932 DiagnosticSeverity::ERROR => &mut self.error_depth,
4933 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4934 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4935 DiagnosticSeverity::HINT => &mut self.hint_depth,
4936 _ => return,
4937 };
4938 if endpoint.is_start {
4939 *depth += 1;
4940 } else {
4941 *depth -= 1;
4942 }
4943
4944 if endpoint.is_unnecessary {
4945 if endpoint.is_start {
4946 self.unnecessary_depth += 1;
4947 } else {
4948 self.unnecessary_depth -= 1;
4949 }
4950 }
4951 }
4952
4953 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4954 if self.error_depth > 0 {
4955 Some(DiagnosticSeverity::ERROR)
4956 } else if self.warning_depth > 0 {
4957 Some(DiagnosticSeverity::WARNING)
4958 } else if self.information_depth > 0 {
4959 Some(DiagnosticSeverity::INFORMATION)
4960 } else if self.hint_depth > 0 {
4961 Some(DiagnosticSeverity::HINT)
4962 } else {
4963 None
4964 }
4965 }
4966
4967 fn current_code_is_unnecessary(&self) -> bool {
4968 self.unnecessary_depth > 0
4969 }
4970}
4971
4972impl<'a> Iterator for BufferChunks<'a> {
4973 type Item = Chunk<'a>;
4974
4975 fn next(&mut self) -> Option<Self::Item> {
4976 let mut next_capture_start = usize::MAX;
4977 let mut next_diagnostic_endpoint = usize::MAX;
4978
4979 if let Some(highlights) = self.highlights.as_mut() {
4980 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4981 if *parent_capture_end <= self.range.start {
4982 highlights.stack.pop();
4983 } else {
4984 break;
4985 }
4986 }
4987
4988 if highlights.next_capture.is_none() {
4989 highlights.next_capture = highlights.captures.next();
4990 }
4991
4992 while let Some(capture) = highlights.next_capture.as_ref() {
4993 if self.range.start < capture.node.start_byte() {
4994 next_capture_start = capture.node.start_byte();
4995 break;
4996 } else {
4997 let highlight_id =
4998 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4999 highlights
5000 .stack
5001 .push((capture.node.end_byte(), highlight_id));
5002 highlights.next_capture = highlights.captures.next();
5003 }
5004 }
5005 }
5006
5007 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5008 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5009 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5010 if endpoint.offset <= self.range.start {
5011 self.update_diagnostic_depths(endpoint);
5012 diagnostic_endpoints.next();
5013 self.underline = endpoint.underline;
5014 } else {
5015 next_diagnostic_endpoint = endpoint.offset;
5016 break;
5017 }
5018 }
5019 }
5020 self.diagnostic_endpoints = diagnostic_endpoints;
5021
5022 if let Some(ChunkBitmaps {
5023 text: chunk,
5024 chars: chars_map,
5025 tabs,
5026 }) = self.chunks.peek_with_bitmaps()
5027 {
5028 let chunk_start = self.range.start;
5029 let mut chunk_end = (self.chunks.offset() + chunk.len())
5030 .min(next_capture_start)
5031 .min(next_diagnostic_endpoint);
5032 let mut highlight_id = None;
5033 if let Some(highlights) = self.highlights.as_ref()
5034 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5035 {
5036 chunk_end = chunk_end.min(*parent_capture_end);
5037 highlight_id = Some(*parent_highlight_id);
5038 }
5039 let bit_start = chunk_start - self.chunks.offset();
5040 let bit_end = chunk_end - self.chunks.offset();
5041
5042 let slice = &chunk[bit_start..bit_end];
5043
5044 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5045 let tabs = (tabs >> bit_start) & mask;
5046 let chars = (chars_map >> bit_start) & mask;
5047
5048 self.range.start = chunk_end;
5049 if self.range.start == self.chunks.offset() + chunk.len() {
5050 self.chunks.next().unwrap();
5051 }
5052
5053 Some(Chunk {
5054 text: slice,
5055 syntax_highlight_id: highlight_id,
5056 underline: self.underline,
5057 diagnostic_severity: self.current_diagnostic_severity(),
5058 is_unnecessary: self.current_code_is_unnecessary(),
5059 tabs,
5060 chars,
5061 ..Chunk::default()
5062 })
5063 } else {
5064 None
5065 }
5066 }
5067}
5068
5069impl operation_queue::Operation for Operation {
5070 fn lamport_timestamp(&self) -> clock::Lamport {
5071 match self {
5072 Operation::Buffer(_) => {
5073 unreachable!("buffer operations should never be deferred at this layer")
5074 }
5075 Operation::UpdateDiagnostics {
5076 lamport_timestamp, ..
5077 }
5078 | Operation::UpdateSelections {
5079 lamport_timestamp, ..
5080 }
5081 | Operation::UpdateCompletionTriggers {
5082 lamport_timestamp, ..
5083 }
5084 | Operation::UpdateLineEnding {
5085 lamport_timestamp, ..
5086 } => *lamport_timestamp,
5087 }
5088 }
5089}
5090
5091impl Default for Diagnostic {
5092 fn default() -> Self {
5093 Self {
5094 source: Default::default(),
5095 source_kind: DiagnosticSourceKind::Other,
5096 code: None,
5097 code_description: None,
5098 severity: DiagnosticSeverity::ERROR,
5099 message: Default::default(),
5100 markdown: None,
5101 group_id: 0,
5102 is_primary: false,
5103 is_disk_based: false,
5104 is_unnecessary: false,
5105 underline: true,
5106 data: None,
5107 }
5108 }
5109}
5110
5111impl IndentSize {
5112 /// Returns an [`IndentSize`] representing the given spaces.
5113 pub fn spaces(len: u32) -> Self {
5114 Self {
5115 len,
5116 kind: IndentKind::Space,
5117 }
5118 }
5119
5120 /// Returns an [`IndentSize`] representing a tab.
5121 pub fn tab() -> Self {
5122 Self {
5123 len: 1,
5124 kind: IndentKind::Tab,
5125 }
5126 }
5127
5128 /// An iterator over the characters represented by this [`IndentSize`].
5129 pub fn chars(&self) -> impl Iterator<Item = char> {
5130 iter::repeat(self.char()).take(self.len as usize)
5131 }
5132
5133 /// The character representation of this [`IndentSize`].
5134 pub fn char(&self) -> char {
5135 match self.kind {
5136 IndentKind::Space => ' ',
5137 IndentKind::Tab => '\t',
5138 }
5139 }
5140
5141 /// Consumes the current [`IndentSize`] and returns a new one that has
5142 /// been shrunk or enlarged by the given size along the given direction.
5143 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5144 match direction {
5145 Ordering::Less => {
5146 if self.kind == size.kind && self.len >= size.len {
5147 self.len -= size.len;
5148 }
5149 }
5150 Ordering::Equal => {}
5151 Ordering::Greater => {
5152 if self.len == 0 {
5153 self = size;
5154 } else if self.kind == size.kind {
5155 self.len += size.len;
5156 }
5157 }
5158 }
5159 self
5160 }
5161
5162 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5163 match self.kind {
5164 IndentKind::Space => self.len as usize,
5165 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5166 }
5167 }
5168}
5169
5170#[cfg(any(test, feature = "test-support"))]
5171pub struct TestFile {
5172 pub path: Arc<RelPath>,
5173 pub root_name: String,
5174 pub local_root: Option<PathBuf>,
5175}
5176
5177#[cfg(any(test, feature = "test-support"))]
5178impl File for TestFile {
5179 fn path(&self) -> &Arc<RelPath> {
5180 &self.path
5181 }
5182
5183 fn full_path(&self, _: &gpui::App) -> PathBuf {
5184 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5185 }
5186
5187 fn as_local(&self) -> Option<&dyn LocalFile> {
5188 if self.local_root.is_some() {
5189 Some(self)
5190 } else {
5191 None
5192 }
5193 }
5194
5195 fn disk_state(&self) -> DiskState {
5196 unimplemented!()
5197 }
5198
5199 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5200 self.path().file_name().unwrap_or(self.root_name.as_ref())
5201 }
5202
5203 fn worktree_id(&self, _: &App) -> WorktreeId {
5204 WorktreeId::from_usize(0)
5205 }
5206
5207 fn to_proto(&self, _: &App) -> rpc::proto::File {
5208 unimplemented!()
5209 }
5210
5211 fn is_private(&self) -> bool {
5212 false
5213 }
5214
5215 fn path_style(&self, _cx: &App) -> PathStyle {
5216 PathStyle::local()
5217 }
5218}
5219
5220#[cfg(any(test, feature = "test-support"))]
5221impl LocalFile for TestFile {
5222 fn abs_path(&self, _cx: &App) -> PathBuf {
5223 PathBuf::from(self.local_root.as_ref().unwrap())
5224 .join(&self.root_name)
5225 .join(self.path.as_std_path())
5226 }
5227
5228 fn load(&self, _cx: &App) -> Task<Result<String>> {
5229 unimplemented!()
5230 }
5231
5232 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5233 unimplemented!()
5234 }
5235}
5236
5237pub(crate) fn contiguous_ranges(
5238 values: impl Iterator<Item = u32>,
5239 max_len: usize,
5240) -> impl Iterator<Item = Range<u32>> {
5241 let mut values = values;
5242 let mut current_range: Option<Range<u32>> = None;
5243 std::iter::from_fn(move || {
5244 loop {
5245 if let Some(value) = values.next() {
5246 if let Some(range) = &mut current_range
5247 && value == range.end
5248 && range.len() < max_len
5249 {
5250 range.end += 1;
5251 continue;
5252 }
5253
5254 let prev_range = current_range.clone();
5255 current_range = Some(value..(value + 1));
5256 if prev_range.is_some() {
5257 return prev_range;
5258 }
5259 } else {
5260 return current_range.take();
5261 }
5262 }
5263 })
5264}
5265
5266#[derive(Default, Debug)]
5267pub struct CharClassifier {
5268 scope: Option<LanguageScope>,
5269 scope_context: Option<CharScopeContext>,
5270 ignore_punctuation: bool,
5271}
5272
5273impl CharClassifier {
5274 pub fn new(scope: Option<LanguageScope>) -> Self {
5275 Self {
5276 scope,
5277 scope_context: None,
5278 ignore_punctuation: false,
5279 }
5280 }
5281
5282 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5283 Self {
5284 scope_context,
5285 ..self
5286 }
5287 }
5288
5289 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5290 Self {
5291 ignore_punctuation,
5292 ..self
5293 }
5294 }
5295
5296 pub fn is_whitespace(&self, c: char) -> bool {
5297 self.kind(c) == CharKind::Whitespace
5298 }
5299
5300 pub fn is_word(&self, c: char) -> bool {
5301 self.kind(c) == CharKind::Word
5302 }
5303
5304 pub fn is_punctuation(&self, c: char) -> bool {
5305 self.kind(c) == CharKind::Punctuation
5306 }
5307
5308 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5309 if c.is_alphanumeric() || c == '_' {
5310 return CharKind::Word;
5311 }
5312
5313 if let Some(scope) = &self.scope {
5314 let characters = match self.scope_context {
5315 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5316 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5317 None => scope.word_characters(),
5318 };
5319 if let Some(characters) = characters
5320 && characters.contains(&c)
5321 {
5322 return CharKind::Word;
5323 }
5324 }
5325
5326 if c.is_whitespace() {
5327 return CharKind::Whitespace;
5328 }
5329
5330 if ignore_punctuation {
5331 CharKind::Word
5332 } else {
5333 CharKind::Punctuation
5334 }
5335 }
5336
5337 pub fn kind(&self, c: char) -> CharKind {
5338 self.kind_with(c, self.ignore_punctuation)
5339 }
5340}
5341
5342/// Find all of the ranges of whitespace that occur at the ends of lines
5343/// in the given rope.
5344///
5345/// This could also be done with a regex search, but this implementation
5346/// avoids copying text.
5347pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5348 let mut ranges = Vec::new();
5349
5350 let mut offset = 0;
5351 let mut prev_chunk_trailing_whitespace_range = 0..0;
5352 for chunk in rope.chunks() {
5353 let mut prev_line_trailing_whitespace_range = 0..0;
5354 for (i, line) in chunk.split('\n').enumerate() {
5355 let line_end_offset = offset + line.len();
5356 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5357 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5358
5359 if i == 0 && trimmed_line_len == 0 {
5360 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5361 }
5362 if !prev_line_trailing_whitespace_range.is_empty() {
5363 ranges.push(prev_line_trailing_whitespace_range);
5364 }
5365
5366 offset = line_end_offset + 1;
5367 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5368 }
5369
5370 offset -= 1;
5371 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5372 }
5373
5374 if !prev_chunk_trailing_whitespace_range.is_empty() {
5375 ranges.push(prev_chunk_trailing_whitespace_range);
5376 }
5377
5378 ranges
5379}