1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, String)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
751
752 let unchanged_range_in_preview_snapshot =
753 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
754 if !unchanged_range_in_preview_snapshot.is_empty() {
755 highlighted_text.add_text_from_buffer_range(
756 unchanged_range_in_preview_snapshot,
757 &self.applied_edits_snapshot,
758 &self.syntax_snapshot,
759 None,
760 syntax_theme,
761 );
762 }
763
764 let range_in_current_snapshot = range.to_offset(current_snapshot);
765 if include_deletions && !range_in_current_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 range_in_current_snapshot,
768 ¤t_snapshot.text,
769 ¤t_snapshot.syntax,
770 Some(deletion_highlight_style),
771 syntax_theme,
772 );
773 }
774
775 if !edit_text.is_empty() {
776 highlighted_text.add_text_from_buffer_range(
777 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
778 &self.applied_edits_snapshot,
779 &self.syntax_snapshot,
780 Some(insertion_highlight_style),
781 syntax_theme,
782 );
783 }
784
785 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
786 }
787
788 highlighted_text.add_text_from_buffer_range(
789 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
790 &self.applied_edits_snapshot,
791 &self.syntax_snapshot,
792 None,
793 syntax_theme,
794 );
795
796 highlighted_text.build()
797 }
798
799 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
800 let (first, _) = edits.first()?;
801 let (last, _) = edits.last()?;
802
803 let start = first
804 .start
805 .bias_left(&self.old_snapshot)
806 .to_point(&self.applied_edits_snapshot);
807 let end = last
808 .end
809 .bias_right(&self.old_snapshot)
810 .to_point(&self.applied_edits_snapshot);
811
812 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
813 let range = Point::new(start.row, 0)
814 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
815
816 Some(range.to_offset(&self.applied_edits_snapshot))
817 }
818}
819
820#[derive(Clone, Debug, PartialEq, Eq)]
821pub struct BracketMatch {
822 pub open_range: Range<usize>,
823 pub close_range: Range<usize>,
824 pub newline_only: bool,
825}
826
827impl Buffer {
828 /// Create a new buffer with the given base text.
829 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
830 Self::build(
831 TextBuffer::new(
832 ReplicaId::LOCAL,
833 cx.entity_id().as_non_zero_u64().into(),
834 base_text.into(),
835 ),
836 None,
837 Capability::ReadWrite,
838 )
839 }
840
841 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
842 pub fn local_normalized(
843 base_text_normalized: Rope,
844 line_ending: LineEnding,
845 cx: &Context<Self>,
846 ) -> Self {
847 Self::build(
848 TextBuffer::new_normalized(
849 ReplicaId::LOCAL,
850 cx.entity_id().as_non_zero_u64().into(),
851 line_ending,
852 base_text_normalized,
853 ),
854 None,
855 Capability::ReadWrite,
856 )
857 }
858
859 /// Create a new buffer that is a replica of a remote buffer.
860 pub fn remote(
861 remote_id: BufferId,
862 replica_id: ReplicaId,
863 capability: Capability,
864 base_text: impl Into<String>,
865 ) -> Self {
866 Self::build(
867 TextBuffer::new(replica_id, remote_id, base_text.into()),
868 None,
869 capability,
870 )
871 }
872
873 /// Create a new buffer that is a replica of a remote buffer, populating its
874 /// state from the given protobuf message.
875 pub fn from_proto(
876 replica_id: ReplicaId,
877 capability: Capability,
878 message: proto::BufferState,
879 file: Option<Arc<dyn File>>,
880 ) -> Result<Self> {
881 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
882 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
883 let mut this = Self::build(buffer, file, capability);
884 this.text.set_line_ending(proto::deserialize_line_ending(
885 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
886 ));
887 this.saved_version = proto::deserialize_version(&message.saved_version);
888 this.saved_mtime = message.saved_mtime.map(|time| time.into());
889 Ok(this)
890 }
891
892 /// Serialize the buffer's state to a protobuf message.
893 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
894 proto::BufferState {
895 id: self.remote_id().into(),
896 file: self.file.as_ref().map(|f| f.to_proto(cx)),
897 base_text: self.base_text().to_string(),
898 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
899 saved_version: proto::serialize_version(&self.saved_version),
900 saved_mtime: self.saved_mtime.map(|time| time.into()),
901 }
902 }
903
904 /// Serialize as protobufs all of the changes to the buffer since the given version.
905 pub fn serialize_ops(
906 &self,
907 since: Option<clock::Global>,
908 cx: &App,
909 ) -> Task<Vec<proto::Operation>> {
910 let mut operations = Vec::new();
911 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
912
913 operations.extend(self.remote_selections.iter().map(|(_, set)| {
914 proto::serialize_operation(&Operation::UpdateSelections {
915 selections: set.selections.clone(),
916 lamport_timestamp: set.lamport_timestamp,
917 line_mode: set.line_mode,
918 cursor_shape: set.cursor_shape,
919 })
920 }));
921
922 for (server_id, diagnostics) in &self.diagnostics {
923 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
924 lamport_timestamp: self.diagnostics_timestamp,
925 server_id: *server_id,
926 diagnostics: diagnostics.iter().cloned().collect(),
927 }));
928 }
929
930 for (server_id, completions) in &self.completion_triggers_per_language_server {
931 operations.push(proto::serialize_operation(
932 &Operation::UpdateCompletionTriggers {
933 triggers: completions.iter().cloned().collect(),
934 lamport_timestamp: self.completion_triggers_timestamp,
935 server_id: *server_id,
936 },
937 ));
938 }
939
940 let text_operations = self.text.operations().clone();
941 cx.background_spawn(async move {
942 let since = since.unwrap_or_default();
943 operations.extend(
944 text_operations
945 .iter()
946 .filter(|(_, op)| !since.observed(op.timestamp()))
947 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
948 );
949 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
950 operations
951 })
952 }
953
954 /// Assign a language to the buffer, returning the buffer.
955 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
956 self.set_language(Some(language), cx);
957 self
958 }
959
960 /// Returns the [`Capability`] of this buffer.
961 pub fn capability(&self) -> Capability {
962 self.capability
963 }
964
965 /// Whether this buffer can only be read.
966 pub fn read_only(&self) -> bool {
967 self.capability == Capability::ReadOnly
968 }
969
970 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
971 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
972 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
973 let snapshot = buffer.snapshot();
974 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
975 Self {
976 saved_mtime,
977 saved_version: buffer.version(),
978 preview_version: buffer.version(),
979 reload_task: None,
980 transaction_depth: 0,
981 was_dirty_before_starting_transaction: None,
982 has_unsaved_edits: Cell::new((buffer.version(), false)),
983 text: buffer,
984 branch_state: None,
985 file,
986 capability,
987 syntax_map,
988 reparse: None,
989 non_text_state_update_count: 0,
990 sync_parse_timeout: Duration::from_millis(1),
991 parse_status: watch::channel(ParseStatus::Idle),
992 autoindent_requests: Default::default(),
993 wait_for_autoindent_txs: Default::default(),
994 pending_autoindent: Default::default(),
995 language: None,
996 remote_selections: Default::default(),
997 diagnostics: Default::default(),
998 diagnostics_timestamp: Lamport::MIN,
999 completion_triggers: Default::default(),
1000 completion_triggers_per_language_server: Default::default(),
1001 completion_triggers_timestamp: Lamport::MIN,
1002 deferred_ops: OperationQueue::new(),
1003 has_conflict: false,
1004 change_bits: Default::default(),
1005 _subscriptions: Vec::new(),
1006 }
1007 }
1008
1009 pub fn build_snapshot(
1010 text: Rope,
1011 language: Option<Arc<Language>>,
1012 language_registry: Option<Arc<LanguageRegistry>>,
1013 cx: &mut App,
1014 ) -> impl Future<Output = BufferSnapshot> + use<> {
1015 let entity_id = cx.reserve_entity::<Self>().entity_id();
1016 let buffer_id = entity_id.as_non_zero_u64().into();
1017 async move {
1018 let text =
1019 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1020 .snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let language_registry = language_registry.clone();
1024 syntax.reparse(&text, language_registry, language);
1025 }
1026 BufferSnapshot {
1027 text,
1028 syntax,
1029 file: None,
1030 diagnostics: Default::default(),
1031 remote_selections: Default::default(),
1032 language,
1033 non_text_state_update_count: 0,
1034 }
1035 }
1036 }
1037
1038 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1039 let entity_id = cx.reserve_entity::<Self>().entity_id();
1040 let buffer_id = entity_id.as_non_zero_u64().into();
1041 let text = TextBuffer::new_normalized(
1042 ReplicaId::LOCAL,
1043 buffer_id,
1044 Default::default(),
1045 Rope::new(),
1046 )
1047 .snapshot();
1048 let syntax = SyntaxMap::new(&text).snapshot();
1049 BufferSnapshot {
1050 text,
1051 syntax,
1052 file: None,
1053 diagnostics: Default::default(),
1054 remote_selections: Default::default(),
1055 language: None,
1056 non_text_state_update_count: 0,
1057 }
1058 }
1059
1060 #[cfg(any(test, feature = "test-support"))]
1061 pub fn build_snapshot_sync(
1062 text: Rope,
1063 language: Option<Arc<Language>>,
1064 language_registry: Option<Arc<LanguageRegistry>>,
1065 cx: &mut App,
1066 ) -> BufferSnapshot {
1067 let entity_id = cx.reserve_entity::<Self>().entity_id();
1068 let buffer_id = entity_id.as_non_zero_u64().into();
1069 let text =
1070 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1071 .snapshot();
1072 let mut syntax = SyntaxMap::new(&text).snapshot();
1073 if let Some(language) = language.clone() {
1074 syntax.reparse(&text, language_registry, language);
1075 }
1076 BufferSnapshot {
1077 text,
1078 syntax,
1079 file: None,
1080 diagnostics: Default::default(),
1081 remote_selections: Default::default(),
1082 language,
1083 non_text_state_update_count: 0,
1084 }
1085 }
1086
1087 /// Retrieve a snapshot of the buffer's current state. This is computationally
1088 /// cheap, and allows reading from the buffer on a background thread.
1089 pub fn snapshot(&self) -> BufferSnapshot {
1090 let text = self.text.snapshot();
1091 let mut syntax_map = self.syntax_map.lock();
1092 syntax_map.interpolate(&text);
1093 let syntax = syntax_map.snapshot();
1094
1095 BufferSnapshot {
1096 text,
1097 syntax,
1098 file: self.file.clone(),
1099 remote_selections: self.remote_selections.clone(),
1100 diagnostics: self.diagnostics.clone(),
1101 language: self.language.clone(),
1102 non_text_state_update_count: self.non_text_state_update_count,
1103 }
1104 }
1105
1106 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1107 let this = cx.entity();
1108 cx.new(|cx| {
1109 let mut branch = Self {
1110 branch_state: Some(BufferBranchState {
1111 base_buffer: this.clone(),
1112 merged_operations: Default::default(),
1113 }),
1114 language: self.language.clone(),
1115 has_conflict: self.has_conflict,
1116 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1117 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1118 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1119 };
1120 if let Some(language_registry) = self.language_registry() {
1121 branch.set_language_registry(language_registry);
1122 }
1123
1124 // Reparse the branch buffer so that we get syntax highlighting immediately.
1125 branch.reparse(cx);
1126
1127 branch
1128 })
1129 }
1130
1131 pub fn preview_edits(
1132 &self,
1133 edits: Arc<[(Range<Anchor>, String)]>,
1134 cx: &App,
1135 ) -> Task<EditPreview> {
1136 let registry = self.language_registry();
1137 let language = self.language().cloned();
1138 let old_snapshot = self.text.snapshot();
1139 let mut branch_buffer = self.text.branch();
1140 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1141 cx.background_spawn(async move {
1142 if !edits.is_empty() {
1143 if let Some(language) = language.clone() {
1144 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1145 }
1146
1147 branch_buffer.edit(edits.iter().cloned());
1148 let snapshot = branch_buffer.snapshot();
1149 syntax_snapshot.interpolate(&snapshot);
1150
1151 if let Some(language) = language {
1152 syntax_snapshot.reparse(&snapshot, registry, language);
1153 }
1154 }
1155 EditPreview {
1156 old_snapshot,
1157 applied_edits_snapshot: branch_buffer.snapshot(),
1158 syntax_snapshot,
1159 }
1160 })
1161 }
1162
1163 /// Applies all of the changes in this buffer that intersect any of the
1164 /// given `ranges` to its base buffer.
1165 ///
1166 /// If `ranges` is empty, then all changes will be applied. This buffer must
1167 /// be a branch buffer to call this method.
1168 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1169 let Some(base_buffer) = self.base_buffer() else {
1170 debug_panic!("not a branch buffer");
1171 return;
1172 };
1173
1174 let mut ranges = if ranges.is_empty() {
1175 &[0..usize::MAX]
1176 } else {
1177 ranges.as_slice()
1178 }
1179 .iter()
1180 .peekable();
1181
1182 let mut edits = Vec::new();
1183 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1184 let mut is_included = false;
1185 while let Some(range) = ranges.peek() {
1186 if range.end < edit.new.start {
1187 ranges.next().unwrap();
1188 } else {
1189 if range.start <= edit.new.end {
1190 is_included = true;
1191 }
1192 break;
1193 }
1194 }
1195
1196 if is_included {
1197 edits.push((
1198 edit.old.clone(),
1199 self.text_for_range(edit.new.clone()).collect::<String>(),
1200 ));
1201 }
1202 }
1203
1204 let operation = base_buffer.update(cx, |base_buffer, cx| {
1205 // cx.emit(BufferEvent::DiffBaseChanged);
1206 base_buffer.edit(edits, None, cx)
1207 });
1208
1209 if let Some(operation) = operation
1210 && let Some(BufferBranchState {
1211 merged_operations, ..
1212 }) = &mut self.branch_state
1213 {
1214 merged_operations.push(operation);
1215 }
1216 }
1217
1218 fn on_base_buffer_event(
1219 &mut self,
1220 _: Entity<Buffer>,
1221 event: &BufferEvent,
1222 cx: &mut Context<Self>,
1223 ) {
1224 let BufferEvent::Operation { operation, .. } = event else {
1225 return;
1226 };
1227 let Some(BufferBranchState {
1228 merged_operations, ..
1229 }) = &mut self.branch_state
1230 else {
1231 return;
1232 };
1233
1234 let mut operation_to_undo = None;
1235 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1236 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1237 {
1238 merged_operations.remove(ix);
1239 operation_to_undo = Some(operation.timestamp);
1240 }
1241
1242 self.apply_ops([operation.clone()], cx);
1243
1244 if let Some(timestamp) = operation_to_undo {
1245 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1246 self.undo_operations(counts, cx);
1247 }
1248 }
1249
1250 #[cfg(test)]
1251 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1252 &self.text
1253 }
1254
1255 /// Retrieve a snapshot of the buffer's raw text, without any
1256 /// language-related state like the syntax tree or diagnostics.
1257 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1258 self.text.snapshot()
1259 }
1260
1261 /// The file associated with the buffer, if any.
1262 pub fn file(&self) -> Option<&Arc<dyn File>> {
1263 self.file.as_ref()
1264 }
1265
1266 /// The version of the buffer that was last saved or reloaded from disk.
1267 pub fn saved_version(&self) -> &clock::Global {
1268 &self.saved_version
1269 }
1270
1271 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1272 pub fn saved_mtime(&self) -> Option<MTime> {
1273 self.saved_mtime
1274 }
1275
1276 /// Assign a language to the buffer.
1277 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1278 self.non_text_state_update_count += 1;
1279 self.syntax_map.lock().clear(&self.text);
1280 self.language = language;
1281 self.was_changed();
1282 self.reparse(cx);
1283 cx.emit(BufferEvent::LanguageChanged);
1284 }
1285
1286 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1287 /// other languages if parts of the buffer are written in different languages.
1288 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1289 self.syntax_map
1290 .lock()
1291 .set_language_registry(language_registry);
1292 }
1293
1294 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1295 self.syntax_map.lock().language_registry()
1296 }
1297
1298 /// Assign the line ending type to the buffer.
1299 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1300 self.text.set_line_ending(line_ending);
1301
1302 let lamport_timestamp = self.text.lamport_clock.tick();
1303 self.send_operation(
1304 Operation::UpdateLineEnding {
1305 line_ending,
1306 lamport_timestamp,
1307 },
1308 true,
1309 cx,
1310 );
1311 }
1312
1313 /// Assign the buffer a new [`Capability`].
1314 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1315 if self.capability != capability {
1316 self.capability = capability;
1317 cx.emit(BufferEvent::CapabilityChanged)
1318 }
1319 }
1320
1321 /// This method is called to signal that the buffer has been saved.
1322 pub fn did_save(
1323 &mut self,
1324 version: clock::Global,
1325 mtime: Option<MTime>,
1326 cx: &mut Context<Self>,
1327 ) {
1328 self.saved_version = version.clone();
1329 self.has_unsaved_edits.set((version, false));
1330 self.has_conflict = false;
1331 self.saved_mtime = mtime;
1332 self.was_changed();
1333 cx.emit(BufferEvent::Saved);
1334 cx.notify();
1335 }
1336
1337 /// Reloads the contents of the buffer from disk.
1338 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1339 let (tx, rx) = futures::channel::oneshot::channel();
1340 let prev_version = self.text.version();
1341 self.reload_task = Some(cx.spawn(async move |this, cx| {
1342 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1343 let file = this.file.as_ref()?.as_local()?;
1344
1345 Some((file.disk_state().mtime(), file.load(cx)))
1346 })?
1347 else {
1348 return Ok(());
1349 };
1350
1351 let new_text = new_text.await?;
1352 let diff = this
1353 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1354 .await;
1355 this.update(cx, |this, cx| {
1356 if this.version() == diff.base_version {
1357 this.finalize_last_transaction();
1358 this.apply_diff(diff, cx);
1359 tx.send(this.finalize_last_transaction().cloned()).ok();
1360 this.has_conflict = false;
1361 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1362 } else {
1363 if !diff.edits.is_empty()
1364 || this
1365 .edits_since::<usize>(&diff.base_version)
1366 .next()
1367 .is_some()
1368 {
1369 this.has_conflict = true;
1370 }
1371
1372 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1373 }
1374
1375 this.reload_task.take();
1376 })
1377 }));
1378 rx
1379 }
1380
1381 /// This method is called to signal that the buffer has been reloaded.
1382 pub fn did_reload(
1383 &mut self,
1384 version: clock::Global,
1385 line_ending: LineEnding,
1386 mtime: Option<MTime>,
1387 cx: &mut Context<Self>,
1388 ) {
1389 self.saved_version = version;
1390 self.has_unsaved_edits
1391 .set((self.saved_version.clone(), false));
1392 self.text.set_line_ending(line_ending);
1393 self.saved_mtime = mtime;
1394 cx.emit(BufferEvent::Reloaded);
1395 cx.notify();
1396 }
1397
1398 /// Updates the [`File`] backing this buffer. This should be called when
1399 /// the file has changed or has been deleted.
1400 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1401 let was_dirty = self.is_dirty();
1402 let mut file_changed = false;
1403
1404 if let Some(old_file) = self.file.as_ref() {
1405 if new_file.path() != old_file.path() {
1406 file_changed = true;
1407 }
1408
1409 let old_state = old_file.disk_state();
1410 let new_state = new_file.disk_state();
1411 if old_state != new_state {
1412 file_changed = true;
1413 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1414 cx.emit(BufferEvent::ReloadNeeded)
1415 }
1416 }
1417 } else {
1418 file_changed = true;
1419 };
1420
1421 self.file = Some(new_file);
1422 if file_changed {
1423 self.was_changed();
1424 self.non_text_state_update_count += 1;
1425 if was_dirty != self.is_dirty() {
1426 cx.emit(BufferEvent::DirtyChanged);
1427 }
1428 cx.emit(BufferEvent::FileHandleChanged);
1429 cx.notify();
1430 }
1431 }
1432
1433 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1434 Some(self.branch_state.as_ref()?.base_buffer.clone())
1435 }
1436
1437 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1438 pub fn language(&self) -> Option<&Arc<Language>> {
1439 self.language.as_ref()
1440 }
1441
1442 /// Returns the [`Language`] at the given location.
1443 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1444 let offset = position.to_offset(self);
1445 let mut is_first = true;
1446 let start_anchor = self.anchor_before(offset);
1447 let end_anchor = self.anchor_after(offset);
1448 self.syntax_map
1449 .lock()
1450 .layers_for_range(offset..offset, &self.text, false)
1451 .filter(|layer| {
1452 if is_first {
1453 is_first = false;
1454 return true;
1455 }
1456
1457 layer
1458 .included_sub_ranges
1459 .map(|sub_ranges| {
1460 sub_ranges.iter().any(|sub_range| {
1461 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1462 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1463 !is_before_start && !is_after_end
1464 })
1465 })
1466 .unwrap_or(true)
1467 })
1468 .last()
1469 .map(|info| info.language.clone())
1470 .or_else(|| self.language.clone())
1471 }
1472
1473 /// Returns each [`Language`] for the active syntax layers at the given location.
1474 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1475 let offset = position.to_offset(self);
1476 let mut languages: Vec<Arc<Language>> = self
1477 .syntax_map
1478 .lock()
1479 .layers_for_range(offset..offset, &self.text, false)
1480 .map(|info| info.language.clone())
1481 .collect();
1482
1483 if languages.is_empty()
1484 && let Some(buffer_language) = self.language()
1485 {
1486 languages.push(buffer_language.clone());
1487 }
1488
1489 languages
1490 }
1491
1492 /// An integer version number that accounts for all updates besides
1493 /// the buffer's text itself (which is versioned via a version vector).
1494 pub fn non_text_state_update_count(&self) -> usize {
1495 self.non_text_state_update_count
1496 }
1497
1498 /// Whether the buffer is being parsed in the background.
1499 #[cfg(any(test, feature = "test-support"))]
1500 pub fn is_parsing(&self) -> bool {
1501 self.reparse.is_some()
1502 }
1503
1504 /// Indicates whether the buffer contains any regions that may be
1505 /// written in a language that hasn't been loaded yet.
1506 pub fn contains_unknown_injections(&self) -> bool {
1507 self.syntax_map.lock().contains_unknown_injections()
1508 }
1509
1510 #[cfg(any(test, feature = "test-support"))]
1511 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1512 self.sync_parse_timeout = timeout;
1513 }
1514
1515 /// Called after an edit to synchronize the buffer's main parse tree with
1516 /// the buffer's new underlying state.
1517 ///
1518 /// Locks the syntax map and interpolates the edits since the last reparse
1519 /// into the foreground syntax tree.
1520 ///
1521 /// Then takes a stable snapshot of the syntax map before unlocking it.
1522 /// The snapshot with the interpolated edits is sent to a background thread,
1523 /// where we ask Tree-sitter to perform an incremental parse.
1524 ///
1525 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1526 /// waiting on the parse to complete. As soon as it completes, we proceed
1527 /// synchronously, unless a 1ms timeout elapses.
1528 ///
1529 /// If we time out waiting on the parse, we spawn a second task waiting
1530 /// until the parse does complete and return with the interpolated tree still
1531 /// in the foreground. When the background parse completes, call back into
1532 /// the main thread and assign the foreground parse state.
1533 ///
1534 /// If the buffer or grammar changed since the start of the background parse,
1535 /// initiate an additional reparse recursively. To avoid concurrent parses
1536 /// for the same buffer, we only initiate a new parse if we are not already
1537 /// parsing in the background.
1538 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1539 if self.reparse.is_some() {
1540 return;
1541 }
1542 let language = if let Some(language) = self.language.clone() {
1543 language
1544 } else {
1545 return;
1546 };
1547
1548 let text = self.text_snapshot();
1549 let parsed_version = self.version();
1550
1551 let mut syntax_map = self.syntax_map.lock();
1552 syntax_map.interpolate(&text);
1553 let language_registry = syntax_map.language_registry();
1554 let mut syntax_snapshot = syntax_map.snapshot();
1555 drop(syntax_map);
1556
1557 let parse_task = cx.background_spawn({
1558 let language = language.clone();
1559 let language_registry = language_registry.clone();
1560 async move {
1561 syntax_snapshot.reparse(&text, language_registry, language);
1562 syntax_snapshot
1563 }
1564 });
1565
1566 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1567 match cx
1568 .background_executor()
1569 .block_with_timeout(self.sync_parse_timeout, parse_task)
1570 {
1571 Ok(new_syntax_snapshot) => {
1572 self.did_finish_parsing(new_syntax_snapshot, cx);
1573 self.reparse = None;
1574 }
1575 Err(parse_task) => {
1576 self.reparse = Some(cx.spawn(async move |this, cx| {
1577 let new_syntax_map = parse_task.await;
1578 this.update(cx, move |this, cx| {
1579 let grammar_changed =
1580 this.language.as_ref().is_none_or(|current_language| {
1581 !Arc::ptr_eq(&language, current_language)
1582 });
1583 let language_registry_changed = new_syntax_map
1584 .contains_unknown_injections()
1585 && language_registry.is_some_and(|registry| {
1586 registry.version() != new_syntax_map.language_registry_version()
1587 });
1588 let parse_again = language_registry_changed
1589 || grammar_changed
1590 || this.version.changed_since(&parsed_version);
1591 this.did_finish_parsing(new_syntax_map, cx);
1592 this.reparse = None;
1593 if parse_again {
1594 this.reparse(cx);
1595 }
1596 })
1597 .ok();
1598 }));
1599 }
1600 }
1601 }
1602
1603 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1604 self.was_changed();
1605 self.non_text_state_update_count += 1;
1606 self.syntax_map.lock().did_parse(syntax_snapshot);
1607 self.request_autoindent(cx);
1608 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1609 cx.emit(BufferEvent::Reparsed);
1610 cx.notify();
1611 }
1612
1613 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1614 self.parse_status.1.clone()
1615 }
1616
1617 /// Assign to the buffer a set of diagnostics created by a given language server.
1618 pub fn update_diagnostics(
1619 &mut self,
1620 server_id: LanguageServerId,
1621 diagnostics: DiagnosticSet,
1622 cx: &mut Context<Self>,
1623 ) {
1624 let lamport_timestamp = self.text.lamport_clock.tick();
1625 let op = Operation::UpdateDiagnostics {
1626 server_id,
1627 diagnostics: diagnostics.iter().cloned().collect(),
1628 lamport_timestamp,
1629 };
1630
1631 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1632 self.send_operation(op, true, cx);
1633 }
1634
1635 pub fn buffer_diagnostics(
1636 &self,
1637 for_server: Option<LanguageServerId>,
1638 ) -> Vec<&DiagnosticEntry<Anchor>> {
1639 match for_server {
1640 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1641 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1642 Err(_) => Vec::new(),
1643 },
1644 None => self
1645 .diagnostics
1646 .iter()
1647 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1648 .collect(),
1649 }
1650 }
1651
1652 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1653 if let Some(indent_sizes) = self.compute_autoindents() {
1654 let indent_sizes = cx.background_spawn(indent_sizes);
1655 match cx
1656 .background_executor()
1657 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1658 {
1659 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1660 Err(indent_sizes) => {
1661 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1662 let indent_sizes = indent_sizes.await;
1663 this.update(cx, |this, cx| {
1664 this.apply_autoindents(indent_sizes, cx);
1665 })
1666 .ok();
1667 }));
1668 }
1669 }
1670 } else {
1671 self.autoindent_requests.clear();
1672 for tx in self.wait_for_autoindent_txs.drain(..) {
1673 tx.send(()).ok();
1674 }
1675 }
1676 }
1677
1678 fn compute_autoindents(
1679 &self,
1680 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1681 let max_rows_between_yields = 100;
1682 let snapshot = self.snapshot();
1683 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1684 return None;
1685 }
1686
1687 let autoindent_requests = self.autoindent_requests.clone();
1688 Some(async move {
1689 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1690 for request in autoindent_requests {
1691 // Resolve each edited range to its row in the current buffer and in the
1692 // buffer before this batch of edits.
1693 let mut row_ranges = Vec::new();
1694 let mut old_to_new_rows = BTreeMap::new();
1695 let mut language_indent_sizes_by_new_row = Vec::new();
1696 for entry in &request.entries {
1697 let position = entry.range.start;
1698 let new_row = position.to_point(&snapshot).row;
1699 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1700 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1701
1702 if !entry.first_line_is_new {
1703 let old_row = position.to_point(&request.before_edit).row;
1704 old_to_new_rows.insert(old_row, new_row);
1705 }
1706 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1707 }
1708
1709 // Build a map containing the suggested indentation for each of the edited lines
1710 // with respect to the state of the buffer before these edits. This map is keyed
1711 // by the rows for these lines in the current state of the buffer.
1712 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1713 let old_edited_ranges =
1714 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1715 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1716 let mut language_indent_size = IndentSize::default();
1717 for old_edited_range in old_edited_ranges {
1718 let suggestions = request
1719 .before_edit
1720 .suggest_autoindents(old_edited_range.clone())
1721 .into_iter()
1722 .flatten();
1723 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1724 if let Some(suggestion) = suggestion {
1725 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1726
1727 // Find the indent size based on the language for this row.
1728 while let Some((row, size)) = language_indent_sizes.peek() {
1729 if *row > new_row {
1730 break;
1731 }
1732 language_indent_size = *size;
1733 language_indent_sizes.next();
1734 }
1735
1736 let suggested_indent = old_to_new_rows
1737 .get(&suggestion.basis_row)
1738 .and_then(|from_row| {
1739 Some(old_suggestions.get(from_row).copied()?.0)
1740 })
1741 .unwrap_or_else(|| {
1742 request
1743 .before_edit
1744 .indent_size_for_line(suggestion.basis_row)
1745 })
1746 .with_delta(suggestion.delta, language_indent_size);
1747 old_suggestions
1748 .insert(new_row, (suggested_indent, suggestion.within_error));
1749 }
1750 }
1751 yield_now().await;
1752 }
1753
1754 // Compute new suggestions for each line, but only include them in the result
1755 // if they differ from the old suggestion for that line.
1756 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1757 let mut language_indent_size = IndentSize::default();
1758 for (row_range, original_indent_column) in row_ranges {
1759 let new_edited_row_range = if request.is_block_mode {
1760 row_range.start..row_range.start + 1
1761 } else {
1762 row_range.clone()
1763 };
1764
1765 let suggestions = snapshot
1766 .suggest_autoindents(new_edited_row_range.clone())
1767 .into_iter()
1768 .flatten();
1769 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1770 if let Some(suggestion) = suggestion {
1771 // Find the indent size based on the language for this row.
1772 while let Some((row, size)) = language_indent_sizes.peek() {
1773 if *row > new_row {
1774 break;
1775 }
1776 language_indent_size = *size;
1777 language_indent_sizes.next();
1778 }
1779
1780 let suggested_indent = indent_sizes
1781 .get(&suggestion.basis_row)
1782 .copied()
1783 .map(|e| e.0)
1784 .unwrap_or_else(|| {
1785 snapshot.indent_size_for_line(suggestion.basis_row)
1786 })
1787 .with_delta(suggestion.delta, language_indent_size);
1788
1789 if old_suggestions.get(&new_row).is_none_or(
1790 |(old_indentation, was_within_error)| {
1791 suggested_indent != *old_indentation
1792 && (!suggestion.within_error || *was_within_error)
1793 },
1794 ) {
1795 indent_sizes.insert(
1796 new_row,
1797 (suggested_indent, request.ignore_empty_lines),
1798 );
1799 }
1800 }
1801 }
1802
1803 if let (true, Some(original_indent_column)) =
1804 (request.is_block_mode, original_indent_column)
1805 {
1806 let new_indent =
1807 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1808 *indent
1809 } else {
1810 snapshot.indent_size_for_line(row_range.start)
1811 };
1812 let delta = new_indent.len as i64 - original_indent_column as i64;
1813 if delta != 0 {
1814 for row in row_range.skip(1) {
1815 indent_sizes.entry(row).or_insert_with(|| {
1816 let mut size = snapshot.indent_size_for_line(row);
1817 if size.kind == new_indent.kind {
1818 match delta.cmp(&0) {
1819 Ordering::Greater => size.len += delta as u32,
1820 Ordering::Less => {
1821 size.len = size.len.saturating_sub(-delta as u32)
1822 }
1823 Ordering::Equal => {}
1824 }
1825 }
1826 (size, request.ignore_empty_lines)
1827 });
1828 }
1829 }
1830 }
1831
1832 yield_now().await;
1833 }
1834 }
1835
1836 indent_sizes
1837 .into_iter()
1838 .filter_map(|(row, (indent, ignore_empty_lines))| {
1839 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1840 None
1841 } else {
1842 Some((row, indent))
1843 }
1844 })
1845 .collect()
1846 })
1847 }
1848
1849 fn apply_autoindents(
1850 &mut self,
1851 indent_sizes: BTreeMap<u32, IndentSize>,
1852 cx: &mut Context<Self>,
1853 ) {
1854 self.autoindent_requests.clear();
1855 for tx in self.wait_for_autoindent_txs.drain(..) {
1856 tx.send(()).ok();
1857 }
1858
1859 let edits: Vec<_> = indent_sizes
1860 .into_iter()
1861 .filter_map(|(row, indent_size)| {
1862 let current_size = indent_size_for_line(self, row);
1863 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1864 })
1865 .collect();
1866
1867 let preserve_preview = self.preserve_preview();
1868 self.edit(edits, None, cx);
1869 if preserve_preview {
1870 self.refresh_preview();
1871 }
1872 }
1873
1874 /// Create a minimal edit that will cause the given row to be indented
1875 /// with the given size. After applying this edit, the length of the line
1876 /// will always be at least `new_size.len`.
1877 pub fn edit_for_indent_size_adjustment(
1878 row: u32,
1879 current_size: IndentSize,
1880 new_size: IndentSize,
1881 ) -> Option<(Range<Point>, String)> {
1882 if new_size.kind == current_size.kind {
1883 match new_size.len.cmp(¤t_size.len) {
1884 Ordering::Greater => {
1885 let point = Point::new(row, 0);
1886 Some((
1887 point..point,
1888 iter::repeat(new_size.char())
1889 .take((new_size.len - current_size.len) as usize)
1890 .collect::<String>(),
1891 ))
1892 }
1893
1894 Ordering::Less => Some((
1895 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1896 String::new(),
1897 )),
1898
1899 Ordering::Equal => None,
1900 }
1901 } else {
1902 Some((
1903 Point::new(row, 0)..Point::new(row, current_size.len),
1904 iter::repeat(new_size.char())
1905 .take(new_size.len as usize)
1906 .collect::<String>(),
1907 ))
1908 }
1909 }
1910
1911 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1912 /// and the given new text.
1913 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1914 let old_text = self.as_rope().clone();
1915 let base_version = self.version();
1916 cx.background_executor()
1917 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1918 let old_text = old_text.to_string();
1919 let line_ending = LineEnding::detect(&new_text);
1920 LineEnding::normalize(&mut new_text);
1921 let edits = text_diff(&old_text, &new_text);
1922 Diff {
1923 base_version,
1924 line_ending,
1925 edits,
1926 }
1927 })
1928 }
1929
1930 /// Spawns a background task that searches the buffer for any whitespace
1931 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1932 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1933 let old_text = self.as_rope().clone();
1934 let line_ending = self.line_ending();
1935 let base_version = self.version();
1936 cx.background_spawn(async move {
1937 let ranges = trailing_whitespace_ranges(&old_text);
1938 let empty = Arc::<str>::from("");
1939 Diff {
1940 base_version,
1941 line_ending,
1942 edits: ranges
1943 .into_iter()
1944 .map(|range| (range, empty.clone()))
1945 .collect(),
1946 }
1947 })
1948 }
1949
1950 /// Ensures that the buffer ends with a single newline character, and
1951 /// no other whitespace. Skips if the buffer is empty.
1952 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1953 let len = self.len();
1954 if len == 0 {
1955 return;
1956 }
1957 let mut offset = len;
1958 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1959 let non_whitespace_len = chunk
1960 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1961 .len();
1962 offset -= chunk.len();
1963 offset += non_whitespace_len;
1964 if non_whitespace_len != 0 {
1965 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1966 return;
1967 }
1968 break;
1969 }
1970 }
1971 self.edit([(offset..len, "\n")], None, cx);
1972 }
1973
1974 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1975 /// calculated, then adjust the diff to account for those changes, and discard any
1976 /// parts of the diff that conflict with those changes.
1977 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1978 let snapshot = self.snapshot();
1979 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1980 let mut delta = 0;
1981 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1982 while let Some(edit_since) = edits_since.peek() {
1983 // If the edit occurs after a diff hunk, then it does not
1984 // affect that hunk.
1985 if edit_since.old.start > range.end {
1986 break;
1987 }
1988 // If the edit precedes the diff hunk, then adjust the hunk
1989 // to reflect the edit.
1990 else if edit_since.old.end < range.start {
1991 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1992 edits_since.next();
1993 }
1994 // If the edit intersects a diff hunk, then discard that hunk.
1995 else {
1996 return None;
1997 }
1998 }
1999
2000 let start = (range.start as i64 + delta) as usize;
2001 let end = (range.end as i64 + delta) as usize;
2002 Some((start..end, new_text))
2003 });
2004
2005 self.start_transaction();
2006 self.text.set_line_ending(diff.line_ending);
2007 self.edit(adjusted_edits, None, cx);
2008 self.end_transaction(cx)
2009 }
2010
2011 fn has_unsaved_edits(&self) -> bool {
2012 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2013
2014 if last_version == self.version {
2015 self.has_unsaved_edits
2016 .set((last_version, has_unsaved_edits));
2017 return has_unsaved_edits;
2018 }
2019
2020 let has_edits = self.has_edits_since(&self.saved_version);
2021 self.has_unsaved_edits
2022 .set((self.version.clone(), has_edits));
2023 has_edits
2024 }
2025
2026 /// Checks if the buffer has unsaved changes.
2027 pub fn is_dirty(&self) -> bool {
2028 if self.capability == Capability::ReadOnly {
2029 return false;
2030 }
2031 if self.has_conflict {
2032 return true;
2033 }
2034 match self.file.as_ref().map(|f| f.disk_state()) {
2035 Some(DiskState::New) | Some(DiskState::Deleted) => {
2036 !self.is_empty() && self.has_unsaved_edits()
2037 }
2038 _ => self.has_unsaved_edits(),
2039 }
2040 }
2041
2042 /// Checks if the buffer and its file have both changed since the buffer
2043 /// was last saved or reloaded.
2044 pub fn has_conflict(&self) -> bool {
2045 if self.has_conflict {
2046 return true;
2047 }
2048 let Some(file) = self.file.as_ref() else {
2049 return false;
2050 };
2051 match file.disk_state() {
2052 DiskState::New => false,
2053 DiskState::Present { mtime } => match self.saved_mtime {
2054 Some(saved_mtime) => {
2055 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2056 }
2057 None => true,
2058 },
2059 DiskState::Deleted => false,
2060 }
2061 }
2062
2063 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2064 pub fn subscribe(&mut self) -> Subscription {
2065 self.text.subscribe()
2066 }
2067
2068 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2069 ///
2070 /// This allows downstream code to check if the buffer's text has changed without
2071 /// waiting for an effect cycle, which would be required if using eents.
2072 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2073 if let Err(ix) = self
2074 .change_bits
2075 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2076 {
2077 self.change_bits.insert(ix, bit);
2078 }
2079 }
2080
2081 fn was_changed(&mut self) {
2082 self.change_bits.retain(|change_bit| {
2083 change_bit.upgrade().is_some_and(|bit| {
2084 bit.replace(true);
2085 true
2086 })
2087 });
2088 }
2089
2090 /// Starts a transaction, if one is not already in-progress. When undoing or
2091 /// redoing edits, all of the edits performed within a transaction are undone
2092 /// or redone together.
2093 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2094 self.start_transaction_at(Instant::now())
2095 }
2096
2097 /// Starts a transaction, providing the current time. Subsequent transactions
2098 /// that occur within a short period of time will be grouped together. This
2099 /// is controlled by the buffer's undo grouping duration.
2100 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2101 self.transaction_depth += 1;
2102 if self.was_dirty_before_starting_transaction.is_none() {
2103 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2104 }
2105 self.text.start_transaction_at(now)
2106 }
2107
2108 /// Terminates the current transaction, if this is the outermost transaction.
2109 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2110 self.end_transaction_at(Instant::now(), cx)
2111 }
2112
2113 /// Terminates the current transaction, providing the current time. Subsequent transactions
2114 /// that occur within a short period of time will be grouped together. This
2115 /// is controlled by the buffer's undo grouping duration.
2116 pub fn end_transaction_at(
2117 &mut self,
2118 now: Instant,
2119 cx: &mut Context<Self>,
2120 ) -> Option<TransactionId> {
2121 assert!(self.transaction_depth > 0);
2122 self.transaction_depth -= 1;
2123 let was_dirty = if self.transaction_depth == 0 {
2124 self.was_dirty_before_starting_transaction.take().unwrap()
2125 } else {
2126 false
2127 };
2128 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2129 self.did_edit(&start_version, was_dirty, cx);
2130 Some(transaction_id)
2131 } else {
2132 None
2133 }
2134 }
2135
2136 /// Manually add a transaction to the buffer's undo history.
2137 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2138 self.text.push_transaction(transaction, now);
2139 }
2140
2141 /// Differs from `push_transaction` in that it does not clear the redo
2142 /// stack. Intended to be used to create a parent transaction to merge
2143 /// potential child transactions into.
2144 ///
2145 /// The caller is responsible for removing it from the undo history using
2146 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2147 /// are merged into this transaction, the caller is responsible for ensuring
2148 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2149 /// cleared is to create transactions with the usual `start_transaction` and
2150 /// `end_transaction` methods and merging the resulting transactions into
2151 /// the transaction created by this method
2152 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2153 self.text.push_empty_transaction(now)
2154 }
2155
2156 /// Prevent the last transaction from being grouped with any subsequent transactions,
2157 /// even if they occur with the buffer's undo grouping duration.
2158 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2159 self.text.finalize_last_transaction()
2160 }
2161
2162 /// Manually group all changes since a given transaction.
2163 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2164 self.text.group_until_transaction(transaction_id);
2165 }
2166
2167 /// Manually remove a transaction from the buffer's undo history
2168 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2169 self.text.forget_transaction(transaction_id)
2170 }
2171
2172 /// Retrieve a transaction from the buffer's undo history
2173 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2174 self.text.get_transaction(transaction_id)
2175 }
2176
2177 /// Manually merge two transactions in the buffer's undo history.
2178 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2179 self.text.merge_transactions(transaction, destination);
2180 }
2181
2182 /// Waits for the buffer to receive operations with the given timestamps.
2183 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2184 &mut self,
2185 edit_ids: It,
2186 ) -> impl Future<Output = Result<()>> + use<It> {
2187 self.text.wait_for_edits(edit_ids)
2188 }
2189
2190 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2191 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2192 &mut self,
2193 anchors: It,
2194 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2195 self.text.wait_for_anchors(anchors)
2196 }
2197
2198 /// Waits for the buffer to receive operations up to the given version.
2199 pub fn wait_for_version(
2200 &mut self,
2201 version: clock::Global,
2202 ) -> impl Future<Output = Result<()>> + use<> {
2203 self.text.wait_for_version(version)
2204 }
2205
2206 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2207 /// [`Buffer::wait_for_version`] to resolve with an error.
2208 pub fn give_up_waiting(&mut self) {
2209 self.text.give_up_waiting();
2210 }
2211
2212 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2213 let mut rx = None;
2214 if !self.autoindent_requests.is_empty() {
2215 let channel = oneshot::channel();
2216 self.wait_for_autoindent_txs.push(channel.0);
2217 rx = Some(channel.1);
2218 }
2219 rx
2220 }
2221
2222 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2223 pub fn set_active_selections(
2224 &mut self,
2225 selections: Arc<[Selection<Anchor>]>,
2226 line_mode: bool,
2227 cursor_shape: CursorShape,
2228 cx: &mut Context<Self>,
2229 ) {
2230 let lamport_timestamp = self.text.lamport_clock.tick();
2231 self.remote_selections.insert(
2232 self.text.replica_id(),
2233 SelectionSet {
2234 selections: selections.clone(),
2235 lamport_timestamp,
2236 line_mode,
2237 cursor_shape,
2238 },
2239 );
2240 self.send_operation(
2241 Operation::UpdateSelections {
2242 selections,
2243 line_mode,
2244 lamport_timestamp,
2245 cursor_shape,
2246 },
2247 true,
2248 cx,
2249 );
2250 self.non_text_state_update_count += 1;
2251 cx.notify();
2252 }
2253
2254 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2255 /// this replica.
2256 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2257 if self
2258 .remote_selections
2259 .get(&self.text.replica_id())
2260 .is_none_or(|set| !set.selections.is_empty())
2261 {
2262 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2263 }
2264 }
2265
2266 pub fn set_agent_selections(
2267 &mut self,
2268 selections: Arc<[Selection<Anchor>]>,
2269 line_mode: bool,
2270 cursor_shape: CursorShape,
2271 cx: &mut Context<Self>,
2272 ) {
2273 let lamport_timestamp = self.text.lamport_clock.tick();
2274 self.remote_selections.insert(
2275 ReplicaId::AGENT,
2276 SelectionSet {
2277 selections,
2278 lamport_timestamp,
2279 line_mode,
2280 cursor_shape,
2281 },
2282 );
2283 self.non_text_state_update_count += 1;
2284 cx.notify();
2285 }
2286
2287 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2288 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2289 }
2290
2291 /// Replaces the buffer's entire text.
2292 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2293 where
2294 T: Into<Arc<str>>,
2295 {
2296 self.autoindent_requests.clear();
2297 self.edit([(0..self.len(), text)], None, cx)
2298 }
2299
2300 /// Appends the given text to the end of the buffer.
2301 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2302 where
2303 T: Into<Arc<str>>,
2304 {
2305 self.edit([(self.len()..self.len(), text)], None, cx)
2306 }
2307
2308 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2309 /// delete, and a string of text to insert at that location.
2310 ///
2311 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2312 /// request for the edited ranges, which will be processed when the buffer finishes
2313 /// parsing.
2314 ///
2315 /// Parsing takes place at the end of a transaction, and may compute synchronously
2316 /// or asynchronously, depending on the changes.
2317 pub fn edit<I, S, T>(
2318 &mut self,
2319 edits_iter: I,
2320 autoindent_mode: Option<AutoindentMode>,
2321 cx: &mut Context<Self>,
2322 ) -> Option<clock::Lamport>
2323 where
2324 I: IntoIterator<Item = (Range<S>, T)>,
2325 S: ToOffset,
2326 T: Into<Arc<str>>,
2327 {
2328 // Skip invalid edits and coalesce contiguous ones.
2329 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2330
2331 for (range, new_text) in edits_iter {
2332 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2333
2334 if range.start > range.end {
2335 mem::swap(&mut range.start, &mut range.end);
2336 }
2337 let new_text = new_text.into();
2338 if !new_text.is_empty() || !range.is_empty() {
2339 if let Some((prev_range, prev_text)) = edits.last_mut()
2340 && prev_range.end >= range.start
2341 {
2342 prev_range.end = cmp::max(prev_range.end, range.end);
2343 *prev_text = format!("{prev_text}{new_text}").into();
2344 } else {
2345 edits.push((range, new_text));
2346 }
2347 }
2348 }
2349 if edits.is_empty() {
2350 return None;
2351 }
2352
2353 self.start_transaction();
2354 self.pending_autoindent.take();
2355 let autoindent_request = autoindent_mode
2356 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2357
2358 let edit_operation = self.text.edit(edits.iter().cloned());
2359 let edit_id = edit_operation.timestamp();
2360
2361 if let Some((before_edit, mode)) = autoindent_request {
2362 let mut delta = 0isize;
2363 let mut previous_setting = None;
2364 let entries: Vec<_> = edits
2365 .into_iter()
2366 .enumerate()
2367 .zip(&edit_operation.as_edit().unwrap().new_text)
2368 .filter(|((_, (range, _)), _)| {
2369 let language = before_edit.language_at(range.start);
2370 let language_id = language.map(|l| l.id());
2371 if let Some((cached_language_id, auto_indent)) = previous_setting
2372 && cached_language_id == language_id
2373 {
2374 auto_indent
2375 } else {
2376 // The auto-indent setting is not present in editorconfigs, hence
2377 // we can avoid passing the file here.
2378 let auto_indent =
2379 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2380 previous_setting = Some((language_id, auto_indent));
2381 auto_indent
2382 }
2383 })
2384 .map(|((ix, (range, _)), new_text)| {
2385 let new_text_length = new_text.len();
2386 let old_start = range.start.to_point(&before_edit);
2387 let new_start = (delta + range.start as isize) as usize;
2388 let range_len = range.end - range.start;
2389 delta += new_text_length as isize - range_len as isize;
2390
2391 // Decide what range of the insertion to auto-indent, and whether
2392 // the first line of the insertion should be considered a newly-inserted line
2393 // or an edit to an existing line.
2394 let mut range_of_insertion_to_indent = 0..new_text_length;
2395 let mut first_line_is_new = true;
2396
2397 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2398 let old_line_end = before_edit.line_len(old_start.row);
2399
2400 if old_start.column > old_line_start {
2401 first_line_is_new = false;
2402 }
2403
2404 if !new_text.contains('\n')
2405 && (old_start.column + (range_len as u32) < old_line_end
2406 || old_line_end == old_line_start)
2407 {
2408 first_line_is_new = false;
2409 }
2410
2411 // When inserting text starting with a newline, avoid auto-indenting the
2412 // previous line.
2413 if new_text.starts_with('\n') {
2414 range_of_insertion_to_indent.start += 1;
2415 first_line_is_new = true;
2416 }
2417
2418 let mut original_indent_column = None;
2419 if let AutoindentMode::Block {
2420 original_indent_columns,
2421 } = &mode
2422 {
2423 original_indent_column = Some(if new_text.starts_with('\n') {
2424 indent_size_for_text(
2425 new_text[range_of_insertion_to_indent.clone()].chars(),
2426 )
2427 .len
2428 } else {
2429 original_indent_columns
2430 .get(ix)
2431 .copied()
2432 .flatten()
2433 .unwrap_or_else(|| {
2434 indent_size_for_text(
2435 new_text[range_of_insertion_to_indent.clone()].chars(),
2436 )
2437 .len
2438 })
2439 });
2440
2441 // Avoid auto-indenting the line after the edit.
2442 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2443 range_of_insertion_to_indent.end -= 1;
2444 }
2445 }
2446
2447 AutoindentRequestEntry {
2448 first_line_is_new,
2449 original_indent_column,
2450 indent_size: before_edit.language_indent_size_at(range.start, cx),
2451 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2452 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2453 }
2454 })
2455 .collect();
2456
2457 if !entries.is_empty() {
2458 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2459 before_edit,
2460 entries,
2461 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2462 ignore_empty_lines: false,
2463 }));
2464 }
2465 }
2466
2467 self.end_transaction(cx);
2468 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2469 Some(edit_id)
2470 }
2471
2472 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2473 self.was_changed();
2474
2475 if self.edits_since::<usize>(old_version).next().is_none() {
2476 return;
2477 }
2478
2479 self.reparse(cx);
2480 cx.emit(BufferEvent::Edited);
2481 if was_dirty != self.is_dirty() {
2482 cx.emit(BufferEvent::DirtyChanged);
2483 }
2484 cx.notify();
2485 }
2486
2487 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2488 where
2489 I: IntoIterator<Item = Range<T>>,
2490 T: ToOffset + Copy,
2491 {
2492 let before_edit = self.snapshot();
2493 let entries = ranges
2494 .into_iter()
2495 .map(|range| AutoindentRequestEntry {
2496 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2497 first_line_is_new: true,
2498 indent_size: before_edit.language_indent_size_at(range.start, cx),
2499 original_indent_column: None,
2500 })
2501 .collect();
2502 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2503 before_edit,
2504 entries,
2505 is_block_mode: false,
2506 ignore_empty_lines: true,
2507 }));
2508 self.request_autoindent(cx);
2509 }
2510
2511 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2512 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2513 pub fn insert_empty_line(
2514 &mut self,
2515 position: impl ToPoint,
2516 space_above: bool,
2517 space_below: bool,
2518 cx: &mut Context<Self>,
2519 ) -> Point {
2520 let mut position = position.to_point(self);
2521
2522 self.start_transaction();
2523
2524 self.edit(
2525 [(position..position, "\n")],
2526 Some(AutoindentMode::EachLine),
2527 cx,
2528 );
2529
2530 if position.column > 0 {
2531 position += Point::new(1, 0);
2532 }
2533
2534 if !self.is_line_blank(position.row) {
2535 self.edit(
2536 [(position..position, "\n")],
2537 Some(AutoindentMode::EachLine),
2538 cx,
2539 );
2540 }
2541
2542 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2543 self.edit(
2544 [(position..position, "\n")],
2545 Some(AutoindentMode::EachLine),
2546 cx,
2547 );
2548 position.row += 1;
2549 }
2550
2551 if space_below
2552 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2553 {
2554 self.edit(
2555 [(position..position, "\n")],
2556 Some(AutoindentMode::EachLine),
2557 cx,
2558 );
2559 }
2560
2561 self.end_transaction(cx);
2562
2563 position
2564 }
2565
2566 /// Applies the given remote operations to the buffer.
2567 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2568 self.pending_autoindent.take();
2569 let was_dirty = self.is_dirty();
2570 let old_version = self.version.clone();
2571 let mut deferred_ops = Vec::new();
2572 let buffer_ops = ops
2573 .into_iter()
2574 .filter_map(|op| match op {
2575 Operation::Buffer(op) => Some(op),
2576 _ => {
2577 if self.can_apply_op(&op) {
2578 self.apply_op(op, cx);
2579 } else {
2580 deferred_ops.push(op);
2581 }
2582 None
2583 }
2584 })
2585 .collect::<Vec<_>>();
2586 for operation in buffer_ops.iter() {
2587 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2588 }
2589 self.text.apply_ops(buffer_ops);
2590 self.deferred_ops.insert(deferred_ops);
2591 self.flush_deferred_ops(cx);
2592 self.did_edit(&old_version, was_dirty, cx);
2593 // Notify independently of whether the buffer was edited as the operations could include a
2594 // selection update.
2595 cx.notify();
2596 }
2597
2598 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2599 let mut deferred_ops = Vec::new();
2600 for op in self.deferred_ops.drain().iter().cloned() {
2601 if self.can_apply_op(&op) {
2602 self.apply_op(op, cx);
2603 } else {
2604 deferred_ops.push(op);
2605 }
2606 }
2607 self.deferred_ops.insert(deferred_ops);
2608 }
2609
2610 pub fn has_deferred_ops(&self) -> bool {
2611 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2612 }
2613
2614 fn can_apply_op(&self, operation: &Operation) -> bool {
2615 match operation {
2616 Operation::Buffer(_) => {
2617 unreachable!("buffer operations should never be applied at this layer")
2618 }
2619 Operation::UpdateDiagnostics {
2620 diagnostics: diagnostic_set,
2621 ..
2622 } => diagnostic_set.iter().all(|diagnostic| {
2623 self.text.can_resolve(&diagnostic.range.start)
2624 && self.text.can_resolve(&diagnostic.range.end)
2625 }),
2626 Operation::UpdateSelections { selections, .. } => selections
2627 .iter()
2628 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2629 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2630 }
2631 }
2632
2633 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2634 match operation {
2635 Operation::Buffer(_) => {
2636 unreachable!("buffer operations should never be applied at this layer")
2637 }
2638 Operation::UpdateDiagnostics {
2639 server_id,
2640 diagnostics: diagnostic_set,
2641 lamport_timestamp,
2642 } => {
2643 let snapshot = self.snapshot();
2644 self.apply_diagnostic_update(
2645 server_id,
2646 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2647 lamport_timestamp,
2648 cx,
2649 );
2650 }
2651 Operation::UpdateSelections {
2652 selections,
2653 lamport_timestamp,
2654 line_mode,
2655 cursor_shape,
2656 } => {
2657 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2658 && set.lamport_timestamp > lamport_timestamp
2659 {
2660 return;
2661 }
2662
2663 self.remote_selections.insert(
2664 lamport_timestamp.replica_id,
2665 SelectionSet {
2666 selections,
2667 lamport_timestamp,
2668 line_mode,
2669 cursor_shape,
2670 },
2671 );
2672 self.text.lamport_clock.observe(lamport_timestamp);
2673 self.non_text_state_update_count += 1;
2674 }
2675 Operation::UpdateCompletionTriggers {
2676 triggers,
2677 lamport_timestamp,
2678 server_id,
2679 } => {
2680 if triggers.is_empty() {
2681 self.completion_triggers_per_language_server
2682 .remove(&server_id);
2683 self.completion_triggers = self
2684 .completion_triggers_per_language_server
2685 .values()
2686 .flat_map(|triggers| triggers.iter().cloned())
2687 .collect();
2688 } else {
2689 self.completion_triggers_per_language_server
2690 .insert(server_id, triggers.iter().cloned().collect());
2691 self.completion_triggers.extend(triggers);
2692 }
2693 self.text.lamport_clock.observe(lamport_timestamp);
2694 }
2695 Operation::UpdateLineEnding {
2696 line_ending,
2697 lamport_timestamp,
2698 } => {
2699 self.text.set_line_ending(line_ending);
2700 self.text.lamport_clock.observe(lamport_timestamp);
2701 }
2702 }
2703 }
2704
2705 fn apply_diagnostic_update(
2706 &mut self,
2707 server_id: LanguageServerId,
2708 diagnostics: DiagnosticSet,
2709 lamport_timestamp: clock::Lamport,
2710 cx: &mut Context<Self>,
2711 ) {
2712 if lamport_timestamp > self.diagnostics_timestamp {
2713 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2714 if diagnostics.is_empty() {
2715 if let Ok(ix) = ix {
2716 self.diagnostics.remove(ix);
2717 }
2718 } else {
2719 match ix {
2720 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2721 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2722 };
2723 }
2724 self.diagnostics_timestamp = lamport_timestamp;
2725 self.non_text_state_update_count += 1;
2726 self.text.lamport_clock.observe(lamport_timestamp);
2727 cx.notify();
2728 cx.emit(BufferEvent::DiagnosticsUpdated);
2729 }
2730 }
2731
2732 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2733 self.was_changed();
2734 cx.emit(BufferEvent::Operation {
2735 operation,
2736 is_local,
2737 });
2738 }
2739
2740 /// Removes the selections for a given peer.
2741 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2742 self.remote_selections.remove(&replica_id);
2743 cx.notify();
2744 }
2745
2746 /// Undoes the most recent transaction.
2747 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2748 let was_dirty = self.is_dirty();
2749 let old_version = self.version.clone();
2750
2751 if let Some((transaction_id, operation)) = self.text.undo() {
2752 self.send_operation(Operation::Buffer(operation), true, cx);
2753 self.did_edit(&old_version, was_dirty, cx);
2754 Some(transaction_id)
2755 } else {
2756 None
2757 }
2758 }
2759
2760 /// Manually undoes a specific transaction in the buffer's undo history.
2761 pub fn undo_transaction(
2762 &mut self,
2763 transaction_id: TransactionId,
2764 cx: &mut Context<Self>,
2765 ) -> bool {
2766 let was_dirty = self.is_dirty();
2767 let old_version = self.version.clone();
2768 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2769 self.send_operation(Operation::Buffer(operation), true, cx);
2770 self.did_edit(&old_version, was_dirty, cx);
2771 true
2772 } else {
2773 false
2774 }
2775 }
2776
2777 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2778 pub fn undo_to_transaction(
2779 &mut self,
2780 transaction_id: TransactionId,
2781 cx: &mut Context<Self>,
2782 ) -> bool {
2783 let was_dirty = self.is_dirty();
2784 let old_version = self.version.clone();
2785
2786 let operations = self.text.undo_to_transaction(transaction_id);
2787 let undone = !operations.is_empty();
2788 for operation in operations {
2789 self.send_operation(Operation::Buffer(operation), true, cx);
2790 }
2791 if undone {
2792 self.did_edit(&old_version, was_dirty, cx)
2793 }
2794 undone
2795 }
2796
2797 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2798 let was_dirty = self.is_dirty();
2799 let operation = self.text.undo_operations(counts);
2800 let old_version = self.version.clone();
2801 self.send_operation(Operation::Buffer(operation), true, cx);
2802 self.did_edit(&old_version, was_dirty, cx);
2803 }
2804
2805 /// Manually redoes a specific transaction in the buffer's redo history.
2806 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2807 let was_dirty = self.is_dirty();
2808 let old_version = self.version.clone();
2809
2810 if let Some((transaction_id, operation)) = self.text.redo() {
2811 self.send_operation(Operation::Buffer(operation), true, cx);
2812 self.did_edit(&old_version, was_dirty, cx);
2813 Some(transaction_id)
2814 } else {
2815 None
2816 }
2817 }
2818
2819 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2820 pub fn redo_to_transaction(
2821 &mut self,
2822 transaction_id: TransactionId,
2823 cx: &mut Context<Self>,
2824 ) -> bool {
2825 let was_dirty = self.is_dirty();
2826 let old_version = self.version.clone();
2827
2828 let operations = self.text.redo_to_transaction(transaction_id);
2829 let redone = !operations.is_empty();
2830 for operation in operations {
2831 self.send_operation(Operation::Buffer(operation), true, cx);
2832 }
2833 if redone {
2834 self.did_edit(&old_version, was_dirty, cx)
2835 }
2836 redone
2837 }
2838
2839 /// Override current completion triggers with the user-provided completion triggers.
2840 pub fn set_completion_triggers(
2841 &mut self,
2842 server_id: LanguageServerId,
2843 triggers: BTreeSet<String>,
2844 cx: &mut Context<Self>,
2845 ) {
2846 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2847 if triggers.is_empty() {
2848 self.completion_triggers_per_language_server
2849 .remove(&server_id);
2850 self.completion_triggers = self
2851 .completion_triggers_per_language_server
2852 .values()
2853 .flat_map(|triggers| triggers.iter().cloned())
2854 .collect();
2855 } else {
2856 self.completion_triggers_per_language_server
2857 .insert(server_id, triggers.clone());
2858 self.completion_triggers.extend(triggers.iter().cloned());
2859 }
2860 self.send_operation(
2861 Operation::UpdateCompletionTriggers {
2862 triggers: triggers.into_iter().collect(),
2863 lamport_timestamp: self.completion_triggers_timestamp,
2864 server_id,
2865 },
2866 true,
2867 cx,
2868 );
2869 cx.notify();
2870 }
2871
2872 /// Returns a list of strings which trigger a completion menu for this language.
2873 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2874 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2875 &self.completion_triggers
2876 }
2877
2878 /// Call this directly after performing edits to prevent the preview tab
2879 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2880 /// to return false until there are additional edits.
2881 pub fn refresh_preview(&mut self) {
2882 self.preview_version = self.version.clone();
2883 }
2884
2885 /// Whether we should preserve the preview status of a tab containing this buffer.
2886 pub fn preserve_preview(&self) -> bool {
2887 !self.has_edits_since(&self.preview_version)
2888 }
2889}
2890
2891#[doc(hidden)]
2892#[cfg(any(test, feature = "test-support"))]
2893impl Buffer {
2894 pub fn edit_via_marked_text(
2895 &mut self,
2896 marked_string: &str,
2897 autoindent_mode: Option<AutoindentMode>,
2898 cx: &mut Context<Self>,
2899 ) {
2900 let edits = self.edits_for_marked_text(marked_string);
2901 self.edit(edits, autoindent_mode, cx);
2902 }
2903
2904 pub fn set_group_interval(&mut self, group_interval: Duration) {
2905 self.text.set_group_interval(group_interval);
2906 }
2907
2908 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2909 where
2910 T: rand::Rng,
2911 {
2912 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2913 let mut last_end = None;
2914 for _ in 0..old_range_count {
2915 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2916 break;
2917 }
2918
2919 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2920 let mut range = self.random_byte_range(new_start, rng);
2921 if rng.random_bool(0.2) {
2922 mem::swap(&mut range.start, &mut range.end);
2923 }
2924 last_end = Some(range.end);
2925
2926 let new_text_len = rng.random_range(0..10);
2927 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2928 new_text = new_text.to_uppercase();
2929
2930 edits.push((range, new_text));
2931 }
2932 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2933 self.edit(edits, None, cx);
2934 }
2935
2936 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2937 let was_dirty = self.is_dirty();
2938 let old_version = self.version.clone();
2939
2940 let ops = self.text.randomly_undo_redo(rng);
2941 if !ops.is_empty() {
2942 for op in ops {
2943 self.send_operation(Operation::Buffer(op), true, cx);
2944 self.did_edit(&old_version, was_dirty, cx);
2945 }
2946 }
2947 }
2948}
2949
2950impl EventEmitter<BufferEvent> for Buffer {}
2951
2952impl Deref for Buffer {
2953 type Target = TextBuffer;
2954
2955 fn deref(&self) -> &Self::Target {
2956 &self.text
2957 }
2958}
2959
2960impl BufferSnapshot {
2961 /// Returns [`IndentSize`] for a given line that respects user settings and
2962 /// language preferences.
2963 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2964 indent_size_for_line(self, row)
2965 }
2966
2967 /// Returns [`IndentSize`] for a given position that respects user settings
2968 /// and language preferences.
2969 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2970 let settings = language_settings(
2971 self.language_at(position).map(|l| l.name()),
2972 self.file(),
2973 cx,
2974 );
2975 if settings.hard_tabs {
2976 IndentSize::tab()
2977 } else {
2978 IndentSize::spaces(settings.tab_size.get())
2979 }
2980 }
2981
2982 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2983 /// is passed in as `single_indent_size`.
2984 pub fn suggested_indents(
2985 &self,
2986 rows: impl Iterator<Item = u32>,
2987 single_indent_size: IndentSize,
2988 ) -> BTreeMap<u32, IndentSize> {
2989 let mut result = BTreeMap::new();
2990
2991 for row_range in contiguous_ranges(rows, 10) {
2992 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2993 Some(suggestions) => suggestions,
2994 _ => break,
2995 };
2996
2997 for (row, suggestion) in row_range.zip(suggestions) {
2998 let indent_size = if let Some(suggestion) = suggestion {
2999 result
3000 .get(&suggestion.basis_row)
3001 .copied()
3002 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3003 .with_delta(suggestion.delta, single_indent_size)
3004 } else {
3005 self.indent_size_for_line(row)
3006 };
3007
3008 result.insert(row, indent_size);
3009 }
3010 }
3011
3012 result
3013 }
3014
3015 fn suggest_autoindents(
3016 &self,
3017 row_range: Range<u32>,
3018 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3019 let config = &self.language.as_ref()?.config;
3020 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3021
3022 #[derive(Debug, Clone)]
3023 struct StartPosition {
3024 start: Point,
3025 suffix: SharedString,
3026 }
3027
3028 // Find the suggested indentation ranges based on the syntax tree.
3029 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3030 let end = Point::new(row_range.end, 0);
3031 let range = (start..end).to_offset(&self.text);
3032 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3033 Some(&grammar.indents_config.as_ref()?.query)
3034 });
3035 let indent_configs = matches
3036 .grammars()
3037 .iter()
3038 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3039 .collect::<Vec<_>>();
3040
3041 let mut indent_ranges = Vec::<Range<Point>>::new();
3042 let mut start_positions = Vec::<StartPosition>::new();
3043 let mut outdent_positions = Vec::<Point>::new();
3044 while let Some(mat) = matches.peek() {
3045 let mut start: Option<Point> = None;
3046 let mut end: Option<Point> = None;
3047
3048 let config = indent_configs[mat.grammar_index];
3049 for capture in mat.captures {
3050 if capture.index == config.indent_capture_ix {
3051 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3052 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3053 } else if Some(capture.index) == config.start_capture_ix {
3054 start = Some(Point::from_ts_point(capture.node.end_position()));
3055 } else if Some(capture.index) == config.end_capture_ix {
3056 end = Some(Point::from_ts_point(capture.node.start_position()));
3057 } else if Some(capture.index) == config.outdent_capture_ix {
3058 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3059 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3060 start_positions.push(StartPosition {
3061 start: Point::from_ts_point(capture.node.start_position()),
3062 suffix: suffix.clone(),
3063 });
3064 }
3065 }
3066
3067 matches.advance();
3068 if let Some((start, end)) = start.zip(end) {
3069 if start.row == end.row {
3070 continue;
3071 }
3072 let range = start..end;
3073 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3074 Err(ix) => indent_ranges.insert(ix, range),
3075 Ok(ix) => {
3076 let prev_range = &mut indent_ranges[ix];
3077 prev_range.end = prev_range.end.max(range.end);
3078 }
3079 }
3080 }
3081 }
3082
3083 let mut error_ranges = Vec::<Range<Point>>::new();
3084 let mut matches = self
3085 .syntax
3086 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3087 while let Some(mat) = matches.peek() {
3088 let node = mat.captures[0].node;
3089 let start = Point::from_ts_point(node.start_position());
3090 let end = Point::from_ts_point(node.end_position());
3091 let range = start..end;
3092 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3093 Ok(ix) | Err(ix) => ix,
3094 };
3095 let mut end_ix = ix;
3096 while let Some(existing_range) = error_ranges.get(end_ix) {
3097 if existing_range.end < end {
3098 end_ix += 1;
3099 } else {
3100 break;
3101 }
3102 }
3103 error_ranges.splice(ix..end_ix, [range]);
3104 matches.advance();
3105 }
3106
3107 outdent_positions.sort();
3108 for outdent_position in outdent_positions {
3109 // find the innermost indent range containing this outdent_position
3110 // set its end to the outdent position
3111 if let Some(range_to_truncate) = indent_ranges
3112 .iter_mut()
3113 .filter(|indent_range| indent_range.contains(&outdent_position))
3114 .next_back()
3115 {
3116 range_to_truncate.end = outdent_position;
3117 }
3118 }
3119
3120 start_positions.sort_by_key(|b| b.start);
3121
3122 // Find the suggested indentation increases and decreased based on regexes.
3123 let mut regex_outdent_map = HashMap::default();
3124 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3125 let mut start_positions_iter = start_positions.iter().peekable();
3126
3127 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3128 self.for_each_line(
3129 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3130 ..Point::new(row_range.end, 0),
3131 |row, line| {
3132 if config
3133 .decrease_indent_pattern
3134 .as_ref()
3135 .is_some_and(|regex| regex.is_match(line))
3136 {
3137 indent_change_rows.push((row, Ordering::Less));
3138 }
3139 if config
3140 .increase_indent_pattern
3141 .as_ref()
3142 .is_some_and(|regex| regex.is_match(line))
3143 {
3144 indent_change_rows.push((row + 1, Ordering::Greater));
3145 }
3146 while let Some(pos) = start_positions_iter.peek() {
3147 if pos.start.row < row {
3148 let pos = start_positions_iter.next().unwrap();
3149 last_seen_suffix
3150 .entry(pos.suffix.to_string())
3151 .or_default()
3152 .push(pos.start);
3153 } else {
3154 break;
3155 }
3156 }
3157 for rule in &config.decrease_indent_patterns {
3158 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3159 let row_start_column = self.indent_size_for_line(row).len;
3160 let basis_row = rule
3161 .valid_after
3162 .iter()
3163 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3164 .flatten()
3165 .filter(|start_point| start_point.column <= row_start_column)
3166 .max_by_key(|start_point| start_point.row);
3167 if let Some(outdent_to_row) = basis_row {
3168 regex_outdent_map.insert(row, outdent_to_row.row);
3169 }
3170 break;
3171 }
3172 }
3173 },
3174 );
3175
3176 let mut indent_changes = indent_change_rows.into_iter().peekable();
3177 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3178 prev_non_blank_row.unwrap_or(0)
3179 } else {
3180 row_range.start.saturating_sub(1)
3181 };
3182
3183 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3184 Some(row_range.map(move |row| {
3185 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3186
3187 let mut indent_from_prev_row = false;
3188 let mut outdent_from_prev_row = false;
3189 let mut outdent_to_row = u32::MAX;
3190 let mut from_regex = false;
3191
3192 while let Some((indent_row, delta)) = indent_changes.peek() {
3193 match indent_row.cmp(&row) {
3194 Ordering::Equal => match delta {
3195 Ordering::Less => {
3196 from_regex = true;
3197 outdent_from_prev_row = true
3198 }
3199 Ordering::Greater => {
3200 indent_from_prev_row = true;
3201 from_regex = true
3202 }
3203 _ => {}
3204 },
3205
3206 Ordering::Greater => break,
3207 Ordering::Less => {}
3208 }
3209
3210 indent_changes.next();
3211 }
3212
3213 for range in &indent_ranges {
3214 if range.start.row >= row {
3215 break;
3216 }
3217 if range.start.row == prev_row && range.end > row_start {
3218 indent_from_prev_row = true;
3219 }
3220 if range.end > prev_row_start && range.end <= row_start {
3221 outdent_to_row = outdent_to_row.min(range.start.row);
3222 }
3223 }
3224
3225 if let Some(basis_row) = regex_outdent_map.get(&row) {
3226 indent_from_prev_row = false;
3227 outdent_to_row = *basis_row;
3228 from_regex = true;
3229 }
3230
3231 let within_error = error_ranges
3232 .iter()
3233 .any(|e| e.start.row < row && e.end > row_start);
3234
3235 let suggestion = if outdent_to_row == prev_row
3236 || (outdent_from_prev_row && indent_from_prev_row)
3237 {
3238 Some(IndentSuggestion {
3239 basis_row: prev_row,
3240 delta: Ordering::Equal,
3241 within_error: within_error && !from_regex,
3242 })
3243 } else if indent_from_prev_row {
3244 Some(IndentSuggestion {
3245 basis_row: prev_row,
3246 delta: Ordering::Greater,
3247 within_error: within_error && !from_regex,
3248 })
3249 } else if outdent_to_row < prev_row {
3250 Some(IndentSuggestion {
3251 basis_row: outdent_to_row,
3252 delta: Ordering::Equal,
3253 within_error: within_error && !from_regex,
3254 })
3255 } else if outdent_from_prev_row {
3256 Some(IndentSuggestion {
3257 basis_row: prev_row,
3258 delta: Ordering::Less,
3259 within_error: within_error && !from_regex,
3260 })
3261 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3262 {
3263 Some(IndentSuggestion {
3264 basis_row: prev_row,
3265 delta: Ordering::Equal,
3266 within_error: within_error && !from_regex,
3267 })
3268 } else {
3269 None
3270 };
3271
3272 prev_row = row;
3273 prev_row_start = row_start;
3274 suggestion
3275 }))
3276 }
3277
3278 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3279 while row > 0 {
3280 row -= 1;
3281 if !self.is_line_blank(row) {
3282 return Some(row);
3283 }
3284 }
3285 None
3286 }
3287
3288 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3289 let captures = self.syntax.captures(range, &self.text, |grammar| {
3290 grammar
3291 .highlights_config
3292 .as_ref()
3293 .map(|config| &config.query)
3294 });
3295 let highlight_maps = captures
3296 .grammars()
3297 .iter()
3298 .map(|grammar| grammar.highlight_map())
3299 .collect();
3300 (captures, highlight_maps)
3301 }
3302
3303 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3304 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3305 /// returned in chunks where each chunk has a single syntax highlighting style and
3306 /// diagnostic status.
3307 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3308 let range = range.start.to_offset(self)..range.end.to_offset(self);
3309
3310 let mut syntax = None;
3311 if language_aware {
3312 syntax = Some(self.get_highlights(range.clone()));
3313 }
3314 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3315 let diagnostics = language_aware;
3316 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3317 }
3318
3319 pub fn highlighted_text_for_range<T: ToOffset>(
3320 &self,
3321 range: Range<T>,
3322 override_style: Option<HighlightStyle>,
3323 syntax_theme: &SyntaxTheme,
3324 ) -> HighlightedText {
3325 HighlightedText::from_buffer_range(
3326 range,
3327 &self.text,
3328 &self.syntax,
3329 override_style,
3330 syntax_theme,
3331 )
3332 }
3333
3334 /// Invokes the given callback for each line of text in the given range of the buffer.
3335 /// Uses callback to avoid allocating a string for each line.
3336 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3337 let mut line = String::new();
3338 let mut row = range.start.row;
3339 for chunk in self
3340 .as_rope()
3341 .chunks_in_range(range.to_offset(self))
3342 .chain(["\n"])
3343 {
3344 for (newline_ix, text) in chunk.split('\n').enumerate() {
3345 if newline_ix > 0 {
3346 callback(row, &line);
3347 row += 1;
3348 line.clear();
3349 }
3350 line.push_str(text);
3351 }
3352 }
3353 }
3354
3355 /// Iterates over every [`SyntaxLayer`] in the buffer.
3356 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3357 self.syntax_layers_for_range(0..self.len(), true)
3358 }
3359
3360 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3361 let offset = position.to_offset(self);
3362 self.syntax_layers_for_range(offset..offset, false)
3363 .filter(|l| l.node().end_byte() > offset)
3364 .last()
3365 }
3366
3367 pub fn syntax_layers_for_range<D: ToOffset>(
3368 &self,
3369 range: Range<D>,
3370 include_hidden: bool,
3371 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3372 self.syntax
3373 .layers_for_range(range, &self.text, include_hidden)
3374 }
3375
3376 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3377 &self,
3378 range: Range<D>,
3379 ) -> Option<SyntaxLayer<'_>> {
3380 let range = range.to_offset(self);
3381 self.syntax
3382 .layers_for_range(range, &self.text, false)
3383 .max_by(|a, b| {
3384 if a.depth != b.depth {
3385 a.depth.cmp(&b.depth)
3386 } else if a.offset.0 != b.offset.0 {
3387 a.offset.0.cmp(&b.offset.0)
3388 } else {
3389 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3390 }
3391 })
3392 }
3393
3394 /// Returns the main [`Language`].
3395 pub fn language(&self) -> Option<&Arc<Language>> {
3396 self.language.as_ref()
3397 }
3398
3399 /// Returns the [`Language`] at the given location.
3400 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3401 self.syntax_layer_at(position)
3402 .map(|info| info.language)
3403 .or(self.language.as_ref())
3404 }
3405
3406 /// Returns the settings for the language at the given location.
3407 pub fn settings_at<'a, D: ToOffset>(
3408 &'a self,
3409 position: D,
3410 cx: &'a App,
3411 ) -> Cow<'a, LanguageSettings> {
3412 language_settings(
3413 self.language_at(position).map(|l| l.name()),
3414 self.file.as_ref(),
3415 cx,
3416 )
3417 }
3418
3419 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3420 CharClassifier::new(self.language_scope_at(point))
3421 }
3422
3423 /// Returns the [`LanguageScope`] at the given location.
3424 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3425 let offset = position.to_offset(self);
3426 let mut scope = None;
3427 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3428
3429 // Use the layer that has the smallest node intersecting the given point.
3430 for layer in self
3431 .syntax
3432 .layers_for_range(offset..offset, &self.text, false)
3433 {
3434 let mut cursor = layer.node().walk();
3435
3436 let mut range = None;
3437 loop {
3438 let child_range = cursor.node().byte_range();
3439 if !child_range.contains(&offset) {
3440 break;
3441 }
3442
3443 range = Some(child_range);
3444 if cursor.goto_first_child_for_byte(offset).is_none() {
3445 break;
3446 }
3447 }
3448
3449 if let Some(range) = range
3450 && smallest_range_and_depth.as_ref().is_none_or(
3451 |(smallest_range, smallest_range_depth)| {
3452 if layer.depth > *smallest_range_depth {
3453 true
3454 } else if layer.depth == *smallest_range_depth {
3455 range.len() < smallest_range.len()
3456 } else {
3457 false
3458 }
3459 },
3460 )
3461 {
3462 smallest_range_and_depth = Some((range, layer.depth));
3463 scope = Some(LanguageScope {
3464 language: layer.language.clone(),
3465 override_id: layer.override_id(offset, &self.text),
3466 });
3467 }
3468 }
3469
3470 scope.or_else(|| {
3471 self.language.clone().map(|language| LanguageScope {
3472 language,
3473 override_id: None,
3474 })
3475 })
3476 }
3477
3478 /// Returns a tuple of the range and character kind of the word
3479 /// surrounding the given position.
3480 pub fn surrounding_word<T: ToOffset>(
3481 &self,
3482 start: T,
3483 scope_context: Option<CharScopeContext>,
3484 ) -> (Range<usize>, Option<CharKind>) {
3485 let mut start = start.to_offset(self);
3486 let mut end = start;
3487 let mut next_chars = self.chars_at(start).take(128).peekable();
3488 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3489
3490 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3491 let word_kind = cmp::max(
3492 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3493 next_chars.peek().copied().map(|c| classifier.kind(c)),
3494 );
3495
3496 for ch in prev_chars {
3497 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3498 start -= ch.len_utf8();
3499 } else {
3500 break;
3501 }
3502 }
3503
3504 for ch in next_chars {
3505 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3506 end += ch.len_utf8();
3507 } else {
3508 break;
3509 }
3510 }
3511
3512 (start..end, word_kind)
3513 }
3514
3515 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3516 /// range. When `require_larger` is true, the node found must be larger than the query range.
3517 ///
3518 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3519 /// be moved to the root of the tree.
3520 fn goto_node_enclosing_range(
3521 cursor: &mut tree_sitter::TreeCursor,
3522 query_range: &Range<usize>,
3523 require_larger: bool,
3524 ) -> bool {
3525 let mut ascending = false;
3526 loop {
3527 let mut range = cursor.node().byte_range();
3528 if query_range.is_empty() {
3529 // When the query range is empty and the current node starts after it, move to the
3530 // previous sibling to find the node the containing node.
3531 if range.start > query_range.start {
3532 cursor.goto_previous_sibling();
3533 range = cursor.node().byte_range();
3534 }
3535 } else {
3536 // When the query range is non-empty and the current node ends exactly at the start,
3537 // move to the next sibling to find a node that extends beyond the start.
3538 if range.end == query_range.start {
3539 cursor.goto_next_sibling();
3540 range = cursor.node().byte_range();
3541 }
3542 }
3543
3544 let encloses = range.contains_inclusive(query_range)
3545 && (!require_larger || range.len() > query_range.len());
3546 if !encloses {
3547 ascending = true;
3548 if !cursor.goto_parent() {
3549 return false;
3550 }
3551 continue;
3552 } else if ascending {
3553 return true;
3554 }
3555
3556 // Descend into the current node.
3557 if cursor
3558 .goto_first_child_for_byte(query_range.start)
3559 .is_none()
3560 {
3561 return true;
3562 }
3563 }
3564 }
3565
3566 pub fn syntax_ancestor<'a, T: ToOffset>(
3567 &'a self,
3568 range: Range<T>,
3569 ) -> Option<tree_sitter::Node<'a>> {
3570 let range = range.start.to_offset(self)..range.end.to_offset(self);
3571 let mut result: Option<tree_sitter::Node<'a>> = None;
3572 for layer in self
3573 .syntax
3574 .layers_for_range(range.clone(), &self.text, true)
3575 {
3576 let mut cursor = layer.node().walk();
3577
3578 // Find the node that both contains the range and is larger than it.
3579 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3580 continue;
3581 }
3582
3583 let left_node = cursor.node();
3584 let mut layer_result = left_node;
3585
3586 // For an empty range, try to find another node immediately to the right of the range.
3587 if left_node.end_byte() == range.start {
3588 let mut right_node = None;
3589 while !cursor.goto_next_sibling() {
3590 if !cursor.goto_parent() {
3591 break;
3592 }
3593 }
3594
3595 while cursor.node().start_byte() == range.start {
3596 right_node = Some(cursor.node());
3597 if !cursor.goto_first_child() {
3598 break;
3599 }
3600 }
3601
3602 // If there is a candidate node on both sides of the (empty) range, then
3603 // decide between the two by favoring a named node over an anonymous token.
3604 // If both nodes are the same in that regard, favor the right one.
3605 if let Some(right_node) = right_node
3606 && (right_node.is_named() || !left_node.is_named())
3607 {
3608 layer_result = right_node;
3609 }
3610 }
3611
3612 if let Some(previous_result) = &result
3613 && previous_result.byte_range().len() < layer_result.byte_range().len()
3614 {
3615 continue;
3616 }
3617 result = Some(layer_result);
3618 }
3619
3620 result
3621 }
3622
3623 /// Find the previous sibling syntax node at the given range.
3624 ///
3625 /// This function locates the syntax node that precedes the node containing
3626 /// the given range. It searches hierarchically by:
3627 /// 1. Finding the node that contains the given range
3628 /// 2. Looking for the previous sibling at the same tree level
3629 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3630 ///
3631 /// Returns `None` if there is no previous sibling at any ancestor level.
3632 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3633 &'a self,
3634 range: Range<T>,
3635 ) -> Option<tree_sitter::Node<'a>> {
3636 let range = range.start.to_offset(self)..range.end.to_offset(self);
3637 let mut result: Option<tree_sitter::Node<'a>> = None;
3638
3639 for layer in self
3640 .syntax
3641 .layers_for_range(range.clone(), &self.text, true)
3642 {
3643 let mut cursor = layer.node().walk();
3644
3645 // Find the node that contains the range
3646 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3647 continue;
3648 }
3649
3650 // Look for the previous sibling, moving up ancestor levels if needed
3651 loop {
3652 if cursor.goto_previous_sibling() {
3653 let layer_result = cursor.node();
3654
3655 if let Some(previous_result) = &result {
3656 if previous_result.byte_range().end < layer_result.byte_range().end {
3657 continue;
3658 }
3659 }
3660 result = Some(layer_result);
3661 break;
3662 }
3663
3664 // No sibling found at this level, try moving up to parent
3665 if !cursor.goto_parent() {
3666 break;
3667 }
3668 }
3669 }
3670
3671 result
3672 }
3673
3674 /// Find the next sibling syntax node at the given range.
3675 ///
3676 /// This function locates the syntax node that follows the node containing
3677 /// the given range. It searches hierarchically by:
3678 /// 1. Finding the node that contains the given range
3679 /// 2. Looking for the next sibling at the same tree level
3680 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3681 ///
3682 /// Returns `None` if there is no next sibling at any ancestor level.
3683 pub fn syntax_next_sibling<'a, T: ToOffset>(
3684 &'a self,
3685 range: Range<T>,
3686 ) -> Option<tree_sitter::Node<'a>> {
3687 let range = range.start.to_offset(self)..range.end.to_offset(self);
3688 let mut result: Option<tree_sitter::Node<'a>> = None;
3689
3690 for layer in self
3691 .syntax
3692 .layers_for_range(range.clone(), &self.text, true)
3693 {
3694 let mut cursor = layer.node().walk();
3695
3696 // Find the node that contains the range
3697 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3698 continue;
3699 }
3700
3701 // Look for the next sibling, moving up ancestor levels if needed
3702 loop {
3703 if cursor.goto_next_sibling() {
3704 let layer_result = cursor.node();
3705
3706 if let Some(previous_result) = &result {
3707 if previous_result.byte_range().start > layer_result.byte_range().start {
3708 continue;
3709 }
3710 }
3711 result = Some(layer_result);
3712 break;
3713 }
3714
3715 // No sibling found at this level, try moving up to parent
3716 if !cursor.goto_parent() {
3717 break;
3718 }
3719 }
3720 }
3721
3722 result
3723 }
3724
3725 /// Returns the root syntax node within the given row
3726 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3727 let start_offset = position.to_offset(self);
3728
3729 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3730
3731 let layer = self
3732 .syntax
3733 .layers_for_range(start_offset..start_offset, &self.text, true)
3734 .next()?;
3735
3736 let mut cursor = layer.node().walk();
3737
3738 // Descend to the first leaf that touches the start of the range.
3739 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3740 if cursor.node().end_byte() == start_offset {
3741 cursor.goto_next_sibling();
3742 }
3743 }
3744
3745 // Ascend to the root node within the same row.
3746 while cursor.goto_parent() {
3747 if cursor.node().start_position().row != row {
3748 break;
3749 }
3750 }
3751
3752 Some(cursor.node())
3753 }
3754
3755 /// Returns the outline for the buffer.
3756 ///
3757 /// This method allows passing an optional [`SyntaxTheme`] to
3758 /// syntax-highlight the returned symbols.
3759 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3760 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3761 }
3762
3763 /// Returns all the symbols that contain the given position.
3764 ///
3765 /// This method allows passing an optional [`SyntaxTheme`] to
3766 /// syntax-highlight the returned symbols.
3767 pub fn symbols_containing<T: ToOffset>(
3768 &self,
3769 position: T,
3770 theme: Option<&SyntaxTheme>,
3771 ) -> Vec<OutlineItem<Anchor>> {
3772 let position = position.to_offset(self);
3773 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3774 let end = self.clip_offset(position + 1, Bias::Right);
3775 let mut items = self.outline_items_containing(start..end, false, theme);
3776 let mut prev_depth = None;
3777 items.retain(|item| {
3778 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3779 prev_depth = Some(item.depth);
3780 result
3781 });
3782 items
3783 }
3784
3785 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3786 let range = range.to_offset(self);
3787 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3788 grammar.outline_config.as_ref().map(|c| &c.query)
3789 });
3790 let configs = matches
3791 .grammars()
3792 .iter()
3793 .map(|g| g.outline_config.as_ref().unwrap())
3794 .collect::<Vec<_>>();
3795
3796 while let Some(mat) = matches.peek() {
3797 let config = &configs[mat.grammar_index];
3798 let containing_item_node = maybe!({
3799 let item_node = mat.captures.iter().find_map(|cap| {
3800 if cap.index == config.item_capture_ix {
3801 Some(cap.node)
3802 } else {
3803 None
3804 }
3805 })?;
3806
3807 let item_byte_range = item_node.byte_range();
3808 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3809 None
3810 } else {
3811 Some(item_node)
3812 }
3813 });
3814
3815 if let Some(item_node) = containing_item_node {
3816 return Some(
3817 Point::from_ts_point(item_node.start_position())
3818 ..Point::from_ts_point(item_node.end_position()),
3819 );
3820 }
3821
3822 matches.advance();
3823 }
3824 None
3825 }
3826
3827 pub fn outline_items_containing<T: ToOffset>(
3828 &self,
3829 range: Range<T>,
3830 include_extra_context: bool,
3831 theme: Option<&SyntaxTheme>,
3832 ) -> Vec<OutlineItem<Anchor>> {
3833 let range = range.to_offset(self);
3834 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3835 grammar.outline_config.as_ref().map(|c| &c.query)
3836 });
3837
3838 let mut items = Vec::new();
3839 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3840 while let Some(mat) = matches.peek() {
3841 let config = matches.grammars()[mat.grammar_index]
3842 .outline_config
3843 .as_ref()
3844 .unwrap();
3845 if let Some(item) =
3846 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3847 {
3848 items.push(item);
3849 } else if let Some(capture) = mat
3850 .captures
3851 .iter()
3852 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3853 {
3854 let capture_range = capture.node.start_position()..capture.node.end_position();
3855 let mut capture_row_range =
3856 capture_range.start.row as u32..capture_range.end.row as u32;
3857 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3858 {
3859 capture_row_range.end -= 1;
3860 }
3861 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3862 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3863 last_row_range.end = capture_row_range.end;
3864 } else {
3865 annotation_row_ranges.push(capture_row_range);
3866 }
3867 } else {
3868 annotation_row_ranges.push(capture_row_range);
3869 }
3870 }
3871 matches.advance();
3872 }
3873
3874 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3875
3876 // Assign depths based on containment relationships and convert to anchors.
3877 let mut item_ends_stack = Vec::<Point>::new();
3878 let mut anchor_items = Vec::new();
3879 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3880 for item in items {
3881 while let Some(last_end) = item_ends_stack.last().copied() {
3882 if last_end < item.range.end {
3883 item_ends_stack.pop();
3884 } else {
3885 break;
3886 }
3887 }
3888
3889 let mut annotation_row_range = None;
3890 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3891 let row_preceding_item = item.range.start.row.saturating_sub(1);
3892 if next_annotation_row_range.end < row_preceding_item {
3893 annotation_row_ranges.next();
3894 } else {
3895 if next_annotation_row_range.end == row_preceding_item {
3896 annotation_row_range = Some(next_annotation_row_range.clone());
3897 annotation_row_ranges.next();
3898 }
3899 break;
3900 }
3901 }
3902
3903 anchor_items.push(OutlineItem {
3904 depth: item_ends_stack.len(),
3905 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3906 text: item.text,
3907 highlight_ranges: item.highlight_ranges,
3908 name_ranges: item.name_ranges,
3909 body_range: item
3910 .body_range
3911 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3912 annotation_range: annotation_row_range.map(|annotation_range| {
3913 self.anchor_after(Point::new(annotation_range.start, 0))
3914 ..self.anchor_before(Point::new(
3915 annotation_range.end,
3916 self.line_len(annotation_range.end),
3917 ))
3918 }),
3919 });
3920 item_ends_stack.push(item.range.end);
3921 }
3922
3923 anchor_items
3924 }
3925
3926 fn next_outline_item(
3927 &self,
3928 config: &OutlineConfig,
3929 mat: &SyntaxMapMatch,
3930 range: &Range<usize>,
3931 include_extra_context: bool,
3932 theme: Option<&SyntaxTheme>,
3933 ) -> Option<OutlineItem<Point>> {
3934 let item_node = mat.captures.iter().find_map(|cap| {
3935 if cap.index == config.item_capture_ix {
3936 Some(cap.node)
3937 } else {
3938 None
3939 }
3940 })?;
3941
3942 let item_byte_range = item_node.byte_range();
3943 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3944 return None;
3945 }
3946 let item_point_range = Point::from_ts_point(item_node.start_position())
3947 ..Point::from_ts_point(item_node.end_position());
3948
3949 let mut open_point = None;
3950 let mut close_point = None;
3951
3952 let mut buffer_ranges = Vec::new();
3953 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3954 let mut range = node.start_byte()..node.end_byte();
3955 let start = node.start_position();
3956 if node.end_position().row > start.row {
3957 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3958 }
3959
3960 if !range.is_empty() {
3961 buffer_ranges.push((range, node_is_name));
3962 }
3963 };
3964
3965 for capture in mat.captures {
3966 if capture.index == config.name_capture_ix {
3967 add_to_buffer_ranges(capture.node, true);
3968 } else if Some(capture.index) == config.context_capture_ix
3969 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3970 {
3971 add_to_buffer_ranges(capture.node, false);
3972 } else {
3973 if Some(capture.index) == config.open_capture_ix {
3974 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3975 } else if Some(capture.index) == config.close_capture_ix {
3976 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3977 }
3978 }
3979 }
3980
3981 if buffer_ranges.is_empty() {
3982 return None;
3983 }
3984
3985 let mut text = String::new();
3986 let mut highlight_ranges = Vec::new();
3987 let mut name_ranges = Vec::new();
3988 let mut chunks = self.chunks(
3989 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3990 true,
3991 );
3992 let mut last_buffer_range_end = 0;
3993 for (buffer_range, is_name) in buffer_ranges {
3994 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3995 if space_added {
3996 text.push(' ');
3997 }
3998 let before_append_len = text.len();
3999 let mut offset = buffer_range.start;
4000 chunks.seek(buffer_range.clone());
4001 for mut chunk in chunks.by_ref() {
4002 if chunk.text.len() > buffer_range.end - offset {
4003 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4004 offset = buffer_range.end;
4005 } else {
4006 offset += chunk.text.len();
4007 }
4008 let style = chunk
4009 .syntax_highlight_id
4010 .zip(theme)
4011 .and_then(|(highlight, theme)| highlight.style(theme));
4012 if let Some(style) = style {
4013 let start = text.len();
4014 let end = start + chunk.text.len();
4015 highlight_ranges.push((start..end, style));
4016 }
4017 text.push_str(chunk.text);
4018 if offset >= buffer_range.end {
4019 break;
4020 }
4021 }
4022 if is_name {
4023 let after_append_len = text.len();
4024 let start = if space_added && !name_ranges.is_empty() {
4025 before_append_len - 1
4026 } else {
4027 before_append_len
4028 };
4029 name_ranges.push(start..after_append_len);
4030 }
4031 last_buffer_range_end = buffer_range.end;
4032 }
4033
4034 Some(OutlineItem {
4035 depth: 0, // We'll calculate the depth later
4036 range: item_point_range,
4037 text,
4038 highlight_ranges,
4039 name_ranges,
4040 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4041 annotation_range: None,
4042 })
4043 }
4044
4045 pub fn function_body_fold_ranges<T: ToOffset>(
4046 &self,
4047 within: Range<T>,
4048 ) -> impl Iterator<Item = Range<usize>> + '_ {
4049 self.text_object_ranges(within, TreeSitterOptions::default())
4050 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4051 }
4052
4053 /// For each grammar in the language, runs the provided
4054 /// [`tree_sitter::Query`] against the given range.
4055 pub fn matches(
4056 &self,
4057 range: Range<usize>,
4058 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4059 ) -> SyntaxMapMatches<'_> {
4060 self.syntax.matches(range, self, query)
4061 }
4062
4063 pub fn all_bracket_ranges(
4064 &self,
4065 range: Range<usize>,
4066 ) -> impl Iterator<Item = BracketMatch> + '_ {
4067 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4068 grammar.brackets_config.as_ref().map(|c| &c.query)
4069 });
4070 let configs = matches
4071 .grammars()
4072 .iter()
4073 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4074 .collect::<Vec<_>>();
4075
4076 iter::from_fn(move || {
4077 while let Some(mat) = matches.peek() {
4078 let mut open = None;
4079 let mut close = None;
4080 let config = &configs[mat.grammar_index];
4081 let pattern = &config.patterns[mat.pattern_index];
4082 for capture in mat.captures {
4083 if capture.index == config.open_capture_ix {
4084 open = Some(capture.node.byte_range());
4085 } else if capture.index == config.close_capture_ix {
4086 close = Some(capture.node.byte_range());
4087 }
4088 }
4089
4090 matches.advance();
4091
4092 let Some((open_range, close_range)) = open.zip(close) else {
4093 continue;
4094 };
4095
4096 let bracket_range = open_range.start..=close_range.end;
4097 if !bracket_range.overlaps(&range) {
4098 continue;
4099 }
4100
4101 return Some(BracketMatch {
4102 open_range,
4103 close_range,
4104 newline_only: pattern.newline_only,
4105 });
4106 }
4107 None
4108 })
4109 }
4110
4111 /// Returns bracket range pairs overlapping or adjacent to `range`
4112 pub fn bracket_ranges<T: ToOffset>(
4113 &self,
4114 range: Range<T>,
4115 ) -> impl Iterator<Item = BracketMatch> + '_ {
4116 // Find bracket pairs that *inclusively* contain the given range.
4117 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4118 self.all_bracket_ranges(range)
4119 .filter(|pair| !pair.newline_only)
4120 }
4121
4122 pub fn debug_variables_query<T: ToOffset>(
4123 &self,
4124 range: Range<T>,
4125 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4126 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4127
4128 let mut matches = self.syntax.matches_with_options(
4129 range.clone(),
4130 &self.text,
4131 TreeSitterOptions::default(),
4132 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4133 );
4134
4135 let configs = matches
4136 .grammars()
4137 .iter()
4138 .map(|grammar| grammar.debug_variables_config.as_ref())
4139 .collect::<Vec<_>>();
4140
4141 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4142
4143 iter::from_fn(move || {
4144 loop {
4145 while let Some(capture) = captures.pop() {
4146 if capture.0.overlaps(&range) {
4147 return Some(capture);
4148 }
4149 }
4150
4151 let mat = matches.peek()?;
4152
4153 let Some(config) = configs[mat.grammar_index].as_ref() else {
4154 matches.advance();
4155 continue;
4156 };
4157
4158 for capture in mat.captures {
4159 let Some(ix) = config
4160 .objects_by_capture_ix
4161 .binary_search_by_key(&capture.index, |e| e.0)
4162 .ok()
4163 else {
4164 continue;
4165 };
4166 let text_object = config.objects_by_capture_ix[ix].1;
4167 let byte_range = capture.node.byte_range();
4168
4169 let mut found = false;
4170 for (range, existing) in captures.iter_mut() {
4171 if existing == &text_object {
4172 range.start = range.start.min(byte_range.start);
4173 range.end = range.end.max(byte_range.end);
4174 found = true;
4175 break;
4176 }
4177 }
4178
4179 if !found {
4180 captures.push((byte_range, text_object));
4181 }
4182 }
4183
4184 matches.advance();
4185 }
4186 })
4187 }
4188
4189 pub fn text_object_ranges<T: ToOffset>(
4190 &self,
4191 range: Range<T>,
4192 options: TreeSitterOptions,
4193 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4194 let range =
4195 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4196
4197 let mut matches =
4198 self.syntax
4199 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4200 grammar.text_object_config.as_ref().map(|c| &c.query)
4201 });
4202
4203 let configs = matches
4204 .grammars()
4205 .iter()
4206 .map(|grammar| grammar.text_object_config.as_ref())
4207 .collect::<Vec<_>>();
4208
4209 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4210
4211 iter::from_fn(move || {
4212 loop {
4213 while let Some(capture) = captures.pop() {
4214 if capture.0.overlaps(&range) {
4215 return Some(capture);
4216 }
4217 }
4218
4219 let mat = matches.peek()?;
4220
4221 let Some(config) = configs[mat.grammar_index].as_ref() else {
4222 matches.advance();
4223 continue;
4224 };
4225
4226 for capture in mat.captures {
4227 let Some(ix) = config
4228 .text_objects_by_capture_ix
4229 .binary_search_by_key(&capture.index, |e| e.0)
4230 .ok()
4231 else {
4232 continue;
4233 };
4234 let text_object = config.text_objects_by_capture_ix[ix].1;
4235 let byte_range = capture.node.byte_range();
4236
4237 let mut found = false;
4238 for (range, existing) in captures.iter_mut() {
4239 if existing == &text_object {
4240 range.start = range.start.min(byte_range.start);
4241 range.end = range.end.max(byte_range.end);
4242 found = true;
4243 break;
4244 }
4245 }
4246
4247 if !found {
4248 captures.push((byte_range, text_object));
4249 }
4250 }
4251
4252 matches.advance();
4253 }
4254 })
4255 }
4256
4257 /// Returns enclosing bracket ranges containing the given range
4258 pub fn enclosing_bracket_ranges<T: ToOffset>(
4259 &self,
4260 range: Range<T>,
4261 ) -> impl Iterator<Item = BracketMatch> + '_ {
4262 let range = range.start.to_offset(self)..range.end.to_offset(self);
4263
4264 self.bracket_ranges(range.clone()).filter(move |pair| {
4265 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4266 })
4267 }
4268
4269 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4270 ///
4271 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4272 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4273 &self,
4274 range: Range<T>,
4275 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4276 ) -> Option<(Range<usize>, Range<usize>)> {
4277 let range = range.start.to_offset(self)..range.end.to_offset(self);
4278
4279 // Get the ranges of the innermost pair of brackets.
4280 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4281
4282 for pair in self.enclosing_bracket_ranges(range) {
4283 if let Some(range_filter) = range_filter
4284 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4285 {
4286 continue;
4287 }
4288
4289 let len = pair.close_range.end - pair.open_range.start;
4290
4291 if let Some((existing_open, existing_close)) = &result {
4292 let existing_len = existing_close.end - existing_open.start;
4293 if len > existing_len {
4294 continue;
4295 }
4296 }
4297
4298 result = Some((pair.open_range, pair.close_range));
4299 }
4300
4301 result
4302 }
4303
4304 /// Returns anchor ranges for any matches of the redaction query.
4305 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4306 /// will be run on the relevant section of the buffer.
4307 pub fn redacted_ranges<T: ToOffset>(
4308 &self,
4309 range: Range<T>,
4310 ) -> impl Iterator<Item = Range<usize>> + '_ {
4311 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4312 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4313 grammar
4314 .redactions_config
4315 .as_ref()
4316 .map(|config| &config.query)
4317 });
4318
4319 let configs = syntax_matches
4320 .grammars()
4321 .iter()
4322 .map(|grammar| grammar.redactions_config.as_ref())
4323 .collect::<Vec<_>>();
4324
4325 iter::from_fn(move || {
4326 let redacted_range = syntax_matches
4327 .peek()
4328 .and_then(|mat| {
4329 configs[mat.grammar_index].and_then(|config| {
4330 mat.captures
4331 .iter()
4332 .find(|capture| capture.index == config.redaction_capture_ix)
4333 })
4334 })
4335 .map(|mat| mat.node.byte_range());
4336 syntax_matches.advance();
4337 redacted_range
4338 })
4339 }
4340
4341 pub fn injections_intersecting_range<T: ToOffset>(
4342 &self,
4343 range: Range<T>,
4344 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4345 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4346
4347 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4348 grammar
4349 .injection_config
4350 .as_ref()
4351 .map(|config| &config.query)
4352 });
4353
4354 let configs = syntax_matches
4355 .grammars()
4356 .iter()
4357 .map(|grammar| grammar.injection_config.as_ref())
4358 .collect::<Vec<_>>();
4359
4360 iter::from_fn(move || {
4361 let ranges = syntax_matches.peek().and_then(|mat| {
4362 let config = &configs[mat.grammar_index]?;
4363 let content_capture_range = mat.captures.iter().find_map(|capture| {
4364 if capture.index == config.content_capture_ix {
4365 Some(capture.node.byte_range())
4366 } else {
4367 None
4368 }
4369 })?;
4370 let language = self.language_at(content_capture_range.start)?;
4371 Some((content_capture_range, language))
4372 });
4373 syntax_matches.advance();
4374 ranges
4375 })
4376 }
4377
4378 pub fn runnable_ranges(
4379 &self,
4380 offset_range: Range<usize>,
4381 ) -> impl Iterator<Item = RunnableRange> + '_ {
4382 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4383 grammar.runnable_config.as_ref().map(|config| &config.query)
4384 });
4385
4386 let test_configs = syntax_matches
4387 .grammars()
4388 .iter()
4389 .map(|grammar| grammar.runnable_config.as_ref())
4390 .collect::<Vec<_>>();
4391
4392 iter::from_fn(move || {
4393 loop {
4394 let mat = syntax_matches.peek()?;
4395
4396 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4397 let mut run_range = None;
4398 let full_range = mat.captures.iter().fold(
4399 Range {
4400 start: usize::MAX,
4401 end: 0,
4402 },
4403 |mut acc, next| {
4404 let byte_range = next.node.byte_range();
4405 if acc.start > byte_range.start {
4406 acc.start = byte_range.start;
4407 }
4408 if acc.end < byte_range.end {
4409 acc.end = byte_range.end;
4410 }
4411 acc
4412 },
4413 );
4414 if full_range.start > full_range.end {
4415 // We did not find a full spanning range of this match.
4416 return None;
4417 }
4418 let extra_captures: SmallVec<[_; 1]> =
4419 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4420 test_configs
4421 .extra_captures
4422 .get(capture.index as usize)
4423 .cloned()
4424 .and_then(|tag_name| match tag_name {
4425 RunnableCapture::Named(name) => {
4426 Some((capture.node.byte_range(), name))
4427 }
4428 RunnableCapture::Run => {
4429 let _ = run_range.insert(capture.node.byte_range());
4430 None
4431 }
4432 })
4433 }));
4434 let run_range = run_range?;
4435 let tags = test_configs
4436 .query
4437 .property_settings(mat.pattern_index)
4438 .iter()
4439 .filter_map(|property| {
4440 if *property.key == *"tag" {
4441 property
4442 .value
4443 .as_ref()
4444 .map(|value| RunnableTag(value.to_string().into()))
4445 } else {
4446 None
4447 }
4448 })
4449 .collect();
4450 let extra_captures = extra_captures
4451 .into_iter()
4452 .map(|(range, name)| {
4453 (
4454 name.to_string(),
4455 self.text_for_range(range).collect::<String>(),
4456 )
4457 })
4458 .collect();
4459 // All tags should have the same range.
4460 Some(RunnableRange {
4461 run_range,
4462 full_range,
4463 runnable: Runnable {
4464 tags,
4465 language: mat.language,
4466 buffer: self.remote_id(),
4467 },
4468 extra_captures,
4469 buffer_id: self.remote_id(),
4470 })
4471 });
4472
4473 syntax_matches.advance();
4474 if test_range.is_some() {
4475 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4476 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4477 return test_range;
4478 }
4479 }
4480 })
4481 }
4482
4483 /// Returns selections for remote peers intersecting the given range.
4484 #[allow(clippy::type_complexity)]
4485 pub fn selections_in_range(
4486 &self,
4487 range: Range<Anchor>,
4488 include_local: bool,
4489 ) -> impl Iterator<
4490 Item = (
4491 ReplicaId,
4492 bool,
4493 CursorShape,
4494 impl Iterator<Item = &Selection<Anchor>> + '_,
4495 ),
4496 > + '_ {
4497 self.remote_selections
4498 .iter()
4499 .filter(move |(replica_id, set)| {
4500 (include_local || **replica_id != self.text.replica_id())
4501 && !set.selections.is_empty()
4502 })
4503 .map(move |(replica_id, set)| {
4504 let start_ix = match set.selections.binary_search_by(|probe| {
4505 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4506 }) {
4507 Ok(ix) | Err(ix) => ix,
4508 };
4509 let end_ix = match set.selections.binary_search_by(|probe| {
4510 probe.start.cmp(&range.end, self).then(Ordering::Less)
4511 }) {
4512 Ok(ix) | Err(ix) => ix,
4513 };
4514
4515 (
4516 *replica_id,
4517 set.line_mode,
4518 set.cursor_shape,
4519 set.selections[start_ix..end_ix].iter(),
4520 )
4521 })
4522 }
4523
4524 /// Returns if the buffer contains any diagnostics.
4525 pub fn has_diagnostics(&self) -> bool {
4526 !self.diagnostics.is_empty()
4527 }
4528
4529 /// Returns all the diagnostics intersecting the given range.
4530 pub fn diagnostics_in_range<'a, T, O>(
4531 &'a self,
4532 search_range: Range<T>,
4533 reversed: bool,
4534 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4535 where
4536 T: 'a + Clone + ToOffset,
4537 O: 'a + FromAnchor,
4538 {
4539 let mut iterators: Vec<_> = self
4540 .diagnostics
4541 .iter()
4542 .map(|(_, collection)| {
4543 collection
4544 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4545 .peekable()
4546 })
4547 .collect();
4548
4549 std::iter::from_fn(move || {
4550 let (next_ix, _) = iterators
4551 .iter_mut()
4552 .enumerate()
4553 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4554 .min_by(|(_, a), (_, b)| {
4555 let cmp = a
4556 .range
4557 .start
4558 .cmp(&b.range.start, self)
4559 // when range is equal, sort by diagnostic severity
4560 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4561 // and stabilize order with group_id
4562 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4563 if reversed { cmp.reverse() } else { cmp }
4564 })?;
4565 iterators[next_ix]
4566 .next()
4567 .map(
4568 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4569 diagnostic,
4570 range: FromAnchor::from_anchor(&range.start, self)
4571 ..FromAnchor::from_anchor(&range.end, self),
4572 },
4573 )
4574 })
4575 }
4576
4577 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4578 /// should be used instead.
4579 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4580 &self.diagnostics
4581 }
4582
4583 /// Returns all the diagnostic groups associated with the given
4584 /// language server ID. If no language server ID is provided,
4585 /// all diagnostics groups are returned.
4586 pub fn diagnostic_groups(
4587 &self,
4588 language_server_id: Option<LanguageServerId>,
4589 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4590 let mut groups = Vec::new();
4591
4592 if let Some(language_server_id) = language_server_id {
4593 if let Ok(ix) = self
4594 .diagnostics
4595 .binary_search_by_key(&language_server_id, |e| e.0)
4596 {
4597 self.diagnostics[ix]
4598 .1
4599 .groups(language_server_id, &mut groups, self);
4600 }
4601 } else {
4602 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4603 diagnostics.groups(*language_server_id, &mut groups, self);
4604 }
4605 }
4606
4607 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4608 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4609 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4610 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4611 });
4612
4613 groups
4614 }
4615
4616 /// Returns an iterator over the diagnostics for the given group.
4617 pub fn diagnostic_group<O>(
4618 &self,
4619 group_id: usize,
4620 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4621 where
4622 O: FromAnchor + 'static,
4623 {
4624 self.diagnostics
4625 .iter()
4626 .flat_map(move |(_, set)| set.group(group_id, self))
4627 }
4628
4629 /// An integer version number that accounts for all updates besides
4630 /// the buffer's text itself (which is versioned via a version vector).
4631 pub fn non_text_state_update_count(&self) -> usize {
4632 self.non_text_state_update_count
4633 }
4634
4635 /// An integer version that changes when the buffer's syntax changes.
4636 pub fn syntax_update_count(&self) -> usize {
4637 self.syntax.update_count()
4638 }
4639
4640 /// Returns a snapshot of underlying file.
4641 pub fn file(&self) -> Option<&Arc<dyn File>> {
4642 self.file.as_ref()
4643 }
4644
4645 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4646 if let Some(file) = self.file() {
4647 if file.path().file_name().is_none() || include_root {
4648 Some(file.full_path(cx).to_string_lossy().into_owned())
4649 } else {
4650 Some(file.path().display(file.path_style(cx)).to_string())
4651 }
4652 } else {
4653 None
4654 }
4655 }
4656
4657 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4658 let query_str = query.fuzzy_contents;
4659 if query_str.is_some_and(|query| query.is_empty()) {
4660 return BTreeMap::default();
4661 }
4662
4663 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4664 language,
4665 override_id: None,
4666 }));
4667
4668 let mut query_ix = 0;
4669 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4670 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4671
4672 let mut words = BTreeMap::default();
4673 let mut current_word_start_ix = None;
4674 let mut chunk_ix = query.range.start;
4675 for chunk in self.chunks(query.range, false) {
4676 for (i, c) in chunk.text.char_indices() {
4677 let ix = chunk_ix + i;
4678 if classifier.is_word(c) {
4679 if current_word_start_ix.is_none() {
4680 current_word_start_ix = Some(ix);
4681 }
4682
4683 if let Some(query_chars) = &query_chars
4684 && query_ix < query_len
4685 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4686 {
4687 query_ix += 1;
4688 }
4689 continue;
4690 } else if let Some(word_start) = current_word_start_ix.take()
4691 && query_ix == query_len
4692 {
4693 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4694 let mut word_text = self.text_for_range(word_start..ix).peekable();
4695 let first_char = word_text
4696 .peek()
4697 .and_then(|first_chunk| first_chunk.chars().next());
4698 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4699 if !query.skip_digits
4700 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4701 {
4702 words.insert(word_text.collect(), word_range);
4703 }
4704 }
4705 query_ix = 0;
4706 }
4707 chunk_ix += chunk.text.len();
4708 }
4709
4710 words
4711 }
4712}
4713
4714pub struct WordsQuery<'a> {
4715 /// Only returns words with all chars from the fuzzy string in them.
4716 pub fuzzy_contents: Option<&'a str>,
4717 /// Skips words that start with a digit.
4718 pub skip_digits: bool,
4719 /// Buffer offset range, to look for words.
4720 pub range: Range<usize>,
4721}
4722
4723fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4724 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4725}
4726
4727fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4728 let mut result = IndentSize::spaces(0);
4729 for c in text {
4730 let kind = match c {
4731 ' ' => IndentKind::Space,
4732 '\t' => IndentKind::Tab,
4733 _ => break,
4734 };
4735 if result.len == 0 {
4736 result.kind = kind;
4737 }
4738 result.len += 1;
4739 }
4740 result
4741}
4742
4743impl Clone for BufferSnapshot {
4744 fn clone(&self) -> Self {
4745 Self {
4746 text: self.text.clone(),
4747 syntax: self.syntax.clone(),
4748 file: self.file.clone(),
4749 remote_selections: self.remote_selections.clone(),
4750 diagnostics: self.diagnostics.clone(),
4751 language: self.language.clone(),
4752 non_text_state_update_count: self.non_text_state_update_count,
4753 }
4754 }
4755}
4756
4757impl Deref for BufferSnapshot {
4758 type Target = text::BufferSnapshot;
4759
4760 fn deref(&self) -> &Self::Target {
4761 &self.text
4762 }
4763}
4764
4765unsafe impl Send for BufferChunks<'_> {}
4766
4767impl<'a> BufferChunks<'a> {
4768 pub(crate) fn new(
4769 text: &'a Rope,
4770 range: Range<usize>,
4771 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4772 diagnostics: bool,
4773 buffer_snapshot: Option<&'a BufferSnapshot>,
4774 ) -> Self {
4775 let mut highlights = None;
4776 if let Some((captures, highlight_maps)) = syntax {
4777 highlights = Some(BufferChunkHighlights {
4778 captures,
4779 next_capture: None,
4780 stack: Default::default(),
4781 highlight_maps,
4782 })
4783 }
4784
4785 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4786 let chunks = text.chunks_in_range(range.clone());
4787
4788 let mut this = BufferChunks {
4789 range,
4790 buffer_snapshot,
4791 chunks,
4792 diagnostic_endpoints,
4793 error_depth: 0,
4794 warning_depth: 0,
4795 information_depth: 0,
4796 hint_depth: 0,
4797 unnecessary_depth: 0,
4798 underline: true,
4799 highlights,
4800 };
4801 this.initialize_diagnostic_endpoints();
4802 this
4803 }
4804
4805 /// Seeks to the given byte offset in the buffer.
4806 pub fn seek(&mut self, range: Range<usize>) {
4807 let old_range = std::mem::replace(&mut self.range, range.clone());
4808 self.chunks.set_range(self.range.clone());
4809 if let Some(highlights) = self.highlights.as_mut() {
4810 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4811 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4812 highlights
4813 .stack
4814 .retain(|(end_offset, _)| *end_offset > range.start);
4815 if let Some(capture) = &highlights.next_capture
4816 && range.start >= capture.node.start_byte()
4817 {
4818 let next_capture_end = capture.node.end_byte();
4819 if range.start < next_capture_end {
4820 highlights.stack.push((
4821 next_capture_end,
4822 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4823 ));
4824 }
4825 highlights.next_capture.take();
4826 }
4827 } else if let Some(snapshot) = self.buffer_snapshot {
4828 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4829 *highlights = BufferChunkHighlights {
4830 captures,
4831 next_capture: None,
4832 stack: Default::default(),
4833 highlight_maps,
4834 };
4835 } else {
4836 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4837 // Seeking such BufferChunks is not supported.
4838 debug_assert!(
4839 false,
4840 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4841 );
4842 }
4843
4844 highlights.captures.set_byte_range(self.range.clone());
4845 self.initialize_diagnostic_endpoints();
4846 }
4847 }
4848
4849 fn initialize_diagnostic_endpoints(&mut self) {
4850 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4851 && let Some(buffer) = self.buffer_snapshot
4852 {
4853 let mut diagnostic_endpoints = Vec::new();
4854 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4855 diagnostic_endpoints.push(DiagnosticEndpoint {
4856 offset: entry.range.start,
4857 is_start: true,
4858 severity: entry.diagnostic.severity,
4859 is_unnecessary: entry.diagnostic.is_unnecessary,
4860 underline: entry.diagnostic.underline,
4861 });
4862 diagnostic_endpoints.push(DiagnosticEndpoint {
4863 offset: entry.range.end,
4864 is_start: false,
4865 severity: entry.diagnostic.severity,
4866 is_unnecessary: entry.diagnostic.is_unnecessary,
4867 underline: entry.diagnostic.underline,
4868 });
4869 }
4870 diagnostic_endpoints
4871 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4872 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4873 self.hint_depth = 0;
4874 self.error_depth = 0;
4875 self.warning_depth = 0;
4876 self.information_depth = 0;
4877 }
4878 }
4879
4880 /// The current byte offset in the buffer.
4881 pub fn offset(&self) -> usize {
4882 self.range.start
4883 }
4884
4885 pub fn range(&self) -> Range<usize> {
4886 self.range.clone()
4887 }
4888
4889 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4890 let depth = match endpoint.severity {
4891 DiagnosticSeverity::ERROR => &mut self.error_depth,
4892 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4893 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4894 DiagnosticSeverity::HINT => &mut self.hint_depth,
4895 _ => return,
4896 };
4897 if endpoint.is_start {
4898 *depth += 1;
4899 } else {
4900 *depth -= 1;
4901 }
4902
4903 if endpoint.is_unnecessary {
4904 if endpoint.is_start {
4905 self.unnecessary_depth += 1;
4906 } else {
4907 self.unnecessary_depth -= 1;
4908 }
4909 }
4910 }
4911
4912 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4913 if self.error_depth > 0 {
4914 Some(DiagnosticSeverity::ERROR)
4915 } else if self.warning_depth > 0 {
4916 Some(DiagnosticSeverity::WARNING)
4917 } else if self.information_depth > 0 {
4918 Some(DiagnosticSeverity::INFORMATION)
4919 } else if self.hint_depth > 0 {
4920 Some(DiagnosticSeverity::HINT)
4921 } else {
4922 None
4923 }
4924 }
4925
4926 fn current_code_is_unnecessary(&self) -> bool {
4927 self.unnecessary_depth > 0
4928 }
4929}
4930
4931impl<'a> Iterator for BufferChunks<'a> {
4932 type Item = Chunk<'a>;
4933
4934 fn next(&mut self) -> Option<Self::Item> {
4935 let mut next_capture_start = usize::MAX;
4936 let mut next_diagnostic_endpoint = usize::MAX;
4937
4938 if let Some(highlights) = self.highlights.as_mut() {
4939 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4940 if *parent_capture_end <= self.range.start {
4941 highlights.stack.pop();
4942 } else {
4943 break;
4944 }
4945 }
4946
4947 if highlights.next_capture.is_none() {
4948 highlights.next_capture = highlights.captures.next();
4949 }
4950
4951 while let Some(capture) = highlights.next_capture.as_ref() {
4952 if self.range.start < capture.node.start_byte() {
4953 next_capture_start = capture.node.start_byte();
4954 break;
4955 } else {
4956 let highlight_id =
4957 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4958 highlights
4959 .stack
4960 .push((capture.node.end_byte(), highlight_id));
4961 highlights.next_capture = highlights.captures.next();
4962 }
4963 }
4964 }
4965
4966 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4967 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4968 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4969 if endpoint.offset <= self.range.start {
4970 self.update_diagnostic_depths(endpoint);
4971 diagnostic_endpoints.next();
4972 self.underline = endpoint.underline;
4973 } else {
4974 next_diagnostic_endpoint = endpoint.offset;
4975 break;
4976 }
4977 }
4978 }
4979 self.diagnostic_endpoints = diagnostic_endpoints;
4980
4981 if let Some(ChunkBitmaps {
4982 text: chunk,
4983 chars: chars_map,
4984 tabs,
4985 }) = self.chunks.peek_with_bitmaps()
4986 {
4987 let chunk_start = self.range.start;
4988 let mut chunk_end = (self.chunks.offset() + chunk.len())
4989 .min(next_capture_start)
4990 .min(next_diagnostic_endpoint);
4991 let mut highlight_id = None;
4992 if let Some(highlights) = self.highlights.as_ref()
4993 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4994 {
4995 chunk_end = chunk_end.min(*parent_capture_end);
4996 highlight_id = Some(*parent_highlight_id);
4997 }
4998 let bit_start = chunk_start - self.chunks.offset();
4999 let bit_end = chunk_end - self.chunks.offset();
5000
5001 let slice = &chunk[bit_start..bit_end];
5002
5003 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5004 let tabs = (tabs >> bit_start) & mask;
5005 let chars = (chars_map >> bit_start) & mask;
5006
5007 self.range.start = chunk_end;
5008 if self.range.start == self.chunks.offset() + chunk.len() {
5009 self.chunks.next().unwrap();
5010 }
5011
5012 Some(Chunk {
5013 text: slice,
5014 syntax_highlight_id: highlight_id,
5015 underline: self.underline,
5016 diagnostic_severity: self.current_diagnostic_severity(),
5017 is_unnecessary: self.current_code_is_unnecessary(),
5018 tabs,
5019 chars,
5020 ..Chunk::default()
5021 })
5022 } else {
5023 None
5024 }
5025 }
5026}
5027
5028impl operation_queue::Operation for Operation {
5029 fn lamport_timestamp(&self) -> clock::Lamport {
5030 match self {
5031 Operation::Buffer(_) => {
5032 unreachable!("buffer operations should never be deferred at this layer")
5033 }
5034 Operation::UpdateDiagnostics {
5035 lamport_timestamp, ..
5036 }
5037 | Operation::UpdateSelections {
5038 lamport_timestamp, ..
5039 }
5040 | Operation::UpdateCompletionTriggers {
5041 lamport_timestamp, ..
5042 }
5043 | Operation::UpdateLineEnding {
5044 lamport_timestamp, ..
5045 } => *lamport_timestamp,
5046 }
5047 }
5048}
5049
5050impl Default for Diagnostic {
5051 fn default() -> Self {
5052 Self {
5053 source: Default::default(),
5054 source_kind: DiagnosticSourceKind::Other,
5055 code: None,
5056 code_description: None,
5057 severity: DiagnosticSeverity::ERROR,
5058 message: Default::default(),
5059 markdown: None,
5060 group_id: 0,
5061 is_primary: false,
5062 is_disk_based: false,
5063 is_unnecessary: false,
5064 underline: true,
5065 data: None,
5066 }
5067 }
5068}
5069
5070impl IndentSize {
5071 /// Returns an [`IndentSize`] representing the given spaces.
5072 pub fn spaces(len: u32) -> Self {
5073 Self {
5074 len,
5075 kind: IndentKind::Space,
5076 }
5077 }
5078
5079 /// Returns an [`IndentSize`] representing a tab.
5080 pub fn tab() -> Self {
5081 Self {
5082 len: 1,
5083 kind: IndentKind::Tab,
5084 }
5085 }
5086
5087 /// An iterator over the characters represented by this [`IndentSize`].
5088 pub fn chars(&self) -> impl Iterator<Item = char> {
5089 iter::repeat(self.char()).take(self.len as usize)
5090 }
5091
5092 /// The character representation of this [`IndentSize`].
5093 pub fn char(&self) -> char {
5094 match self.kind {
5095 IndentKind::Space => ' ',
5096 IndentKind::Tab => '\t',
5097 }
5098 }
5099
5100 /// Consumes the current [`IndentSize`] and returns a new one that has
5101 /// been shrunk or enlarged by the given size along the given direction.
5102 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5103 match direction {
5104 Ordering::Less => {
5105 if self.kind == size.kind && self.len >= size.len {
5106 self.len -= size.len;
5107 }
5108 }
5109 Ordering::Equal => {}
5110 Ordering::Greater => {
5111 if self.len == 0 {
5112 self = size;
5113 } else if self.kind == size.kind {
5114 self.len += size.len;
5115 }
5116 }
5117 }
5118 self
5119 }
5120
5121 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5122 match self.kind {
5123 IndentKind::Space => self.len as usize,
5124 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5125 }
5126 }
5127}
5128
5129#[cfg(any(test, feature = "test-support"))]
5130pub struct TestFile {
5131 pub path: Arc<RelPath>,
5132 pub root_name: String,
5133 pub local_root: Option<PathBuf>,
5134}
5135
5136#[cfg(any(test, feature = "test-support"))]
5137impl File for TestFile {
5138 fn path(&self) -> &Arc<RelPath> {
5139 &self.path
5140 }
5141
5142 fn full_path(&self, _: &gpui::App) -> PathBuf {
5143 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5144 }
5145
5146 fn as_local(&self) -> Option<&dyn LocalFile> {
5147 if self.local_root.is_some() {
5148 Some(self)
5149 } else {
5150 None
5151 }
5152 }
5153
5154 fn disk_state(&self) -> DiskState {
5155 unimplemented!()
5156 }
5157
5158 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5159 self.path().file_name().unwrap_or(self.root_name.as_ref())
5160 }
5161
5162 fn worktree_id(&self, _: &App) -> WorktreeId {
5163 WorktreeId::from_usize(0)
5164 }
5165
5166 fn to_proto(&self, _: &App) -> rpc::proto::File {
5167 unimplemented!()
5168 }
5169
5170 fn is_private(&self) -> bool {
5171 false
5172 }
5173
5174 fn path_style(&self, _cx: &App) -> PathStyle {
5175 PathStyle::local()
5176 }
5177}
5178
5179#[cfg(any(test, feature = "test-support"))]
5180impl LocalFile for TestFile {
5181 fn abs_path(&self, _cx: &App) -> PathBuf {
5182 PathBuf::from(self.local_root.as_ref().unwrap())
5183 .join(&self.root_name)
5184 .join(self.path.as_std_path())
5185 }
5186
5187 fn load(&self, _cx: &App) -> Task<Result<String>> {
5188 unimplemented!()
5189 }
5190
5191 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5192 unimplemented!()
5193 }
5194}
5195
5196pub(crate) fn contiguous_ranges(
5197 values: impl Iterator<Item = u32>,
5198 max_len: usize,
5199) -> impl Iterator<Item = Range<u32>> {
5200 let mut values = values;
5201 let mut current_range: Option<Range<u32>> = None;
5202 std::iter::from_fn(move || {
5203 loop {
5204 if let Some(value) = values.next() {
5205 if let Some(range) = &mut current_range
5206 && value == range.end
5207 && range.len() < max_len
5208 {
5209 range.end += 1;
5210 continue;
5211 }
5212
5213 let prev_range = current_range.clone();
5214 current_range = Some(value..(value + 1));
5215 if prev_range.is_some() {
5216 return prev_range;
5217 }
5218 } else {
5219 return current_range.take();
5220 }
5221 }
5222 })
5223}
5224
5225#[derive(Default, Debug)]
5226pub struct CharClassifier {
5227 scope: Option<LanguageScope>,
5228 scope_context: Option<CharScopeContext>,
5229 ignore_punctuation: bool,
5230}
5231
5232impl CharClassifier {
5233 pub fn new(scope: Option<LanguageScope>) -> Self {
5234 Self {
5235 scope,
5236 scope_context: None,
5237 ignore_punctuation: false,
5238 }
5239 }
5240
5241 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5242 Self {
5243 scope_context,
5244 ..self
5245 }
5246 }
5247
5248 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5249 Self {
5250 ignore_punctuation,
5251 ..self
5252 }
5253 }
5254
5255 pub fn is_whitespace(&self, c: char) -> bool {
5256 self.kind(c) == CharKind::Whitespace
5257 }
5258
5259 pub fn is_word(&self, c: char) -> bool {
5260 self.kind(c) == CharKind::Word
5261 }
5262
5263 pub fn is_punctuation(&self, c: char) -> bool {
5264 self.kind(c) == CharKind::Punctuation
5265 }
5266
5267 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5268 if c.is_alphanumeric() || c == '_' {
5269 return CharKind::Word;
5270 }
5271
5272 if let Some(scope) = &self.scope {
5273 let characters = match self.scope_context {
5274 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5275 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5276 None => scope.word_characters(),
5277 };
5278 if let Some(characters) = characters
5279 && characters.contains(&c)
5280 {
5281 return CharKind::Word;
5282 }
5283 }
5284
5285 if c.is_whitespace() {
5286 return CharKind::Whitespace;
5287 }
5288
5289 if ignore_punctuation {
5290 CharKind::Word
5291 } else {
5292 CharKind::Punctuation
5293 }
5294 }
5295
5296 pub fn kind(&self, c: char) -> CharKind {
5297 self.kind_with(c, self.ignore_punctuation)
5298 }
5299}
5300
5301/// Find all of the ranges of whitespace that occur at the ends of lines
5302/// in the given rope.
5303///
5304/// This could also be done with a regex search, but this implementation
5305/// avoids copying text.
5306pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5307 let mut ranges = Vec::new();
5308
5309 let mut offset = 0;
5310 let mut prev_chunk_trailing_whitespace_range = 0..0;
5311 for chunk in rope.chunks() {
5312 let mut prev_line_trailing_whitespace_range = 0..0;
5313 for (i, line) in chunk.split('\n').enumerate() {
5314 let line_end_offset = offset + line.len();
5315 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5316 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5317
5318 if i == 0 && trimmed_line_len == 0 {
5319 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5320 }
5321 if !prev_line_trailing_whitespace_range.is_empty() {
5322 ranges.push(prev_line_trailing_whitespace_range);
5323 }
5324
5325 offset = line_end_offset + 1;
5326 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5327 }
5328
5329 offset -= 1;
5330 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5331 }
5332
5333 if !prev_chunk_trailing_whitespace_range.is_empty() {
5334 ranges.push(prev_chunk_trailing_whitespace_range);
5335 }
5336
5337 ranges
5338}