1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, impl AsRef<str>)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot =
751 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
752
753 let unchanged_range_in_preview_snapshot =
754 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
755 if !unchanged_range_in_preview_snapshot.is_empty() {
756 highlighted_text.add_text_from_buffer_range(
757 unchanged_range_in_preview_snapshot,
758 &self.applied_edits_snapshot,
759 &self.syntax_snapshot,
760 None,
761 syntax_theme,
762 );
763 }
764
765 let range_in_current_snapshot = range.to_offset(current_snapshot);
766 if include_deletions && !range_in_current_snapshot.is_empty() {
767 highlighted_text.add_text_from_buffer_range(
768 range_in_current_snapshot,
769 ¤t_snapshot.text,
770 ¤t_snapshot.syntax,
771 Some(deletion_highlight_style),
772 syntax_theme,
773 );
774 }
775
776 if !edit_text.as_ref().is_empty() {
777 highlighted_text.add_text_from_buffer_range(
778 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
779 &self.applied_edits_snapshot,
780 &self.syntax_snapshot,
781 Some(insertion_highlight_style),
782 syntax_theme,
783 );
784 }
785
786 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
787 }
788
789 highlighted_text.add_text_from_buffer_range(
790 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
791 &self.applied_edits_snapshot,
792 &self.syntax_snapshot,
793 None,
794 syntax_theme,
795 );
796
797 highlighted_text.build()
798 }
799
800 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
801 let (first, _) = edits.first()?;
802 let (last, _) = edits.last()?;
803
804 let start = first
805 .start
806 .bias_left(&self.old_snapshot)
807 .to_point(&self.applied_edits_snapshot);
808 let end = last
809 .end
810 .bias_right(&self.old_snapshot)
811 .to_point(&self.applied_edits_snapshot);
812
813 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
814 let range = Point::new(start.row, 0)
815 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
816
817 Some(range.to_offset(&self.applied_edits_snapshot))
818 }
819}
820
821#[derive(Clone, Debug, PartialEq, Eq)]
822pub struct BracketMatch {
823 pub open_range: Range<usize>,
824 pub close_range: Range<usize>,
825 pub newline_only: bool,
826}
827
828impl Buffer {
829 /// Create a new buffer with the given base text.
830 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
831 Self::build(
832 TextBuffer::new(
833 ReplicaId::LOCAL,
834 cx.entity_id().as_non_zero_u64().into(),
835 base_text.into(),
836 ),
837 None,
838 Capability::ReadWrite,
839 )
840 }
841
842 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
843 pub fn local_normalized(
844 base_text_normalized: Rope,
845 line_ending: LineEnding,
846 cx: &Context<Self>,
847 ) -> Self {
848 Self::build(
849 TextBuffer::new_normalized(
850 ReplicaId::LOCAL,
851 cx.entity_id().as_non_zero_u64().into(),
852 line_ending,
853 base_text_normalized,
854 ),
855 None,
856 Capability::ReadWrite,
857 )
858 }
859
860 /// Create a new buffer that is a replica of a remote buffer.
861 pub fn remote(
862 remote_id: BufferId,
863 replica_id: ReplicaId,
864 capability: Capability,
865 base_text: impl Into<String>,
866 ) -> Self {
867 Self::build(
868 TextBuffer::new(replica_id, remote_id, base_text.into()),
869 None,
870 capability,
871 )
872 }
873
874 /// Create a new buffer that is a replica of a remote buffer, populating its
875 /// state from the given protobuf message.
876 pub fn from_proto(
877 replica_id: ReplicaId,
878 capability: Capability,
879 message: proto::BufferState,
880 file: Option<Arc<dyn File>>,
881 ) -> Result<Self> {
882 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
883 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
884 let mut this = Self::build(buffer, file, capability);
885 this.text.set_line_ending(proto::deserialize_line_ending(
886 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
887 ));
888 this.saved_version = proto::deserialize_version(&message.saved_version);
889 this.saved_mtime = message.saved_mtime.map(|time| time.into());
890 Ok(this)
891 }
892
893 /// Serialize the buffer's state to a protobuf message.
894 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
895 proto::BufferState {
896 id: self.remote_id().into(),
897 file: self.file.as_ref().map(|f| f.to_proto(cx)),
898 base_text: self.base_text().to_string(),
899 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
900 saved_version: proto::serialize_version(&self.saved_version),
901 saved_mtime: self.saved_mtime.map(|time| time.into()),
902 }
903 }
904
905 /// Serialize as protobufs all of the changes to the buffer since the given version.
906 pub fn serialize_ops(
907 &self,
908 since: Option<clock::Global>,
909 cx: &App,
910 ) -> Task<Vec<proto::Operation>> {
911 let mut operations = Vec::new();
912 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
913
914 operations.extend(self.remote_selections.iter().map(|(_, set)| {
915 proto::serialize_operation(&Operation::UpdateSelections {
916 selections: set.selections.clone(),
917 lamport_timestamp: set.lamport_timestamp,
918 line_mode: set.line_mode,
919 cursor_shape: set.cursor_shape,
920 })
921 }));
922
923 for (server_id, diagnostics) in &self.diagnostics {
924 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
925 lamport_timestamp: self.diagnostics_timestamp,
926 server_id: *server_id,
927 diagnostics: diagnostics.iter().cloned().collect(),
928 }));
929 }
930
931 for (server_id, completions) in &self.completion_triggers_per_language_server {
932 operations.push(proto::serialize_operation(
933 &Operation::UpdateCompletionTriggers {
934 triggers: completions.iter().cloned().collect(),
935 lamport_timestamp: self.completion_triggers_timestamp,
936 server_id: *server_id,
937 },
938 ));
939 }
940
941 let text_operations = self.text.operations().clone();
942 cx.background_spawn(async move {
943 let since = since.unwrap_or_default();
944 operations.extend(
945 text_operations
946 .iter()
947 .filter(|(_, op)| !since.observed(op.timestamp()))
948 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
949 );
950 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
951 operations
952 })
953 }
954
955 /// Assign a language to the buffer, returning the buffer.
956 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
957 self.set_language(Some(language), cx);
958 self
959 }
960
961 /// Returns the [`Capability`] of this buffer.
962 pub fn capability(&self) -> Capability {
963 self.capability
964 }
965
966 /// Whether this buffer can only be read.
967 pub fn read_only(&self) -> bool {
968 self.capability == Capability::ReadOnly
969 }
970
971 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
972 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
973 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
974 let snapshot = buffer.snapshot();
975 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
976 Self {
977 saved_mtime,
978 saved_version: buffer.version(),
979 preview_version: buffer.version(),
980 reload_task: None,
981 transaction_depth: 0,
982 was_dirty_before_starting_transaction: None,
983 has_unsaved_edits: Cell::new((buffer.version(), false)),
984 text: buffer,
985 branch_state: None,
986 file,
987 capability,
988 syntax_map,
989 reparse: None,
990 non_text_state_update_count: 0,
991 sync_parse_timeout: Duration::from_millis(1),
992 parse_status: watch::channel(ParseStatus::Idle),
993 autoindent_requests: Default::default(),
994 wait_for_autoindent_txs: Default::default(),
995 pending_autoindent: Default::default(),
996 language: None,
997 remote_selections: Default::default(),
998 diagnostics: Default::default(),
999 diagnostics_timestamp: Lamport::MIN,
1000 completion_triggers: Default::default(),
1001 completion_triggers_per_language_server: Default::default(),
1002 completion_triggers_timestamp: Lamport::MIN,
1003 deferred_ops: OperationQueue::new(),
1004 has_conflict: false,
1005 change_bits: Default::default(),
1006 _subscriptions: Vec::new(),
1007 }
1008 }
1009
1010 pub fn build_snapshot(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> impl Future<Output = BufferSnapshot> + use<> {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 async move {
1019 let text =
1020 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1021 .snapshot();
1022 let mut syntax = SyntaxMap::new(&text).snapshot();
1023 if let Some(language) = language.clone() {
1024 let language_registry = language_registry.clone();
1025 syntax.reparse(&text, language_registry, language);
1026 }
1027 BufferSnapshot {
1028 text,
1029 syntax,
1030 file: None,
1031 diagnostics: Default::default(),
1032 remote_selections: Default::default(),
1033 language,
1034 non_text_state_update_count: 0,
1035 }
1036 }
1037 }
1038
1039 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1040 let entity_id = cx.reserve_entity::<Self>().entity_id();
1041 let buffer_id = entity_id.as_non_zero_u64().into();
1042 let text = TextBuffer::new_normalized(
1043 ReplicaId::LOCAL,
1044 buffer_id,
1045 Default::default(),
1046 Rope::new(),
1047 )
1048 .snapshot();
1049 let syntax = SyntaxMap::new(&text).snapshot();
1050 BufferSnapshot {
1051 text,
1052 syntax,
1053 file: None,
1054 diagnostics: Default::default(),
1055 remote_selections: Default::default(),
1056 language: None,
1057 non_text_state_update_count: 0,
1058 }
1059 }
1060
1061 #[cfg(any(test, feature = "test-support"))]
1062 pub fn build_snapshot_sync(
1063 text: Rope,
1064 language: Option<Arc<Language>>,
1065 language_registry: Option<Arc<LanguageRegistry>>,
1066 cx: &mut App,
1067 ) -> BufferSnapshot {
1068 let entity_id = cx.reserve_entity::<Self>().entity_id();
1069 let buffer_id = entity_id.as_non_zero_u64().into();
1070 let text =
1071 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1072 .snapshot();
1073 let mut syntax = SyntaxMap::new(&text).snapshot();
1074 if let Some(language) = language.clone() {
1075 syntax.reparse(&text, language_registry, language);
1076 }
1077 BufferSnapshot {
1078 text,
1079 syntax,
1080 file: None,
1081 diagnostics: Default::default(),
1082 remote_selections: Default::default(),
1083 language,
1084 non_text_state_update_count: 0,
1085 }
1086 }
1087
1088 /// Retrieve a snapshot of the buffer's current state. This is computationally
1089 /// cheap, and allows reading from the buffer on a background thread.
1090 pub fn snapshot(&self) -> BufferSnapshot {
1091 let text = self.text.snapshot();
1092 let mut syntax_map = self.syntax_map.lock();
1093 syntax_map.interpolate(&text);
1094 let syntax = syntax_map.snapshot();
1095
1096 BufferSnapshot {
1097 text,
1098 syntax,
1099 file: self.file.clone(),
1100 remote_selections: self.remote_selections.clone(),
1101 diagnostics: self.diagnostics.clone(),
1102 language: self.language.clone(),
1103 non_text_state_update_count: self.non_text_state_update_count,
1104 }
1105 }
1106
1107 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1108 let this = cx.entity();
1109 cx.new(|cx| {
1110 let mut branch = Self {
1111 branch_state: Some(BufferBranchState {
1112 base_buffer: this.clone(),
1113 merged_operations: Default::default(),
1114 }),
1115 language: self.language.clone(),
1116 has_conflict: self.has_conflict,
1117 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1118 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1119 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1120 };
1121 if let Some(language_registry) = self.language_registry() {
1122 branch.set_language_registry(language_registry);
1123 }
1124
1125 // Reparse the branch buffer so that we get syntax highlighting immediately.
1126 branch.reparse(cx);
1127
1128 branch
1129 })
1130 }
1131
1132 pub fn preview_edits(
1133 &self,
1134 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1135 cx: &App,
1136 ) -> Task<EditPreview> {
1137 let registry = self.language_registry();
1138 let language = self.language().cloned();
1139 let old_snapshot = self.text.snapshot();
1140 let mut branch_buffer = self.text.branch();
1141 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1142 cx.background_spawn(async move {
1143 if !edits.is_empty() {
1144 if let Some(language) = language.clone() {
1145 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1146 }
1147
1148 branch_buffer.edit(edits.iter().cloned());
1149 let snapshot = branch_buffer.snapshot();
1150 syntax_snapshot.interpolate(&snapshot);
1151
1152 if let Some(language) = language {
1153 syntax_snapshot.reparse(&snapshot, registry, language);
1154 }
1155 }
1156 EditPreview {
1157 old_snapshot,
1158 applied_edits_snapshot: branch_buffer.snapshot(),
1159 syntax_snapshot,
1160 }
1161 })
1162 }
1163
1164 /// Applies all of the changes in this buffer that intersect any of the
1165 /// given `ranges` to its base buffer.
1166 ///
1167 /// If `ranges` is empty, then all changes will be applied. This buffer must
1168 /// be a branch buffer to call this method.
1169 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1170 let Some(base_buffer) = self.base_buffer() else {
1171 debug_panic!("not a branch buffer");
1172 return;
1173 };
1174
1175 let mut ranges = if ranges.is_empty() {
1176 &[0..usize::MAX]
1177 } else {
1178 ranges.as_slice()
1179 }
1180 .iter()
1181 .peekable();
1182
1183 let mut edits = Vec::new();
1184 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1185 let mut is_included = false;
1186 while let Some(range) = ranges.peek() {
1187 if range.end < edit.new.start {
1188 ranges.next().unwrap();
1189 } else {
1190 if range.start <= edit.new.end {
1191 is_included = true;
1192 }
1193 break;
1194 }
1195 }
1196
1197 if is_included {
1198 edits.push((
1199 edit.old.clone(),
1200 self.text_for_range(edit.new.clone()).collect::<String>(),
1201 ));
1202 }
1203 }
1204
1205 let operation = base_buffer.update(cx, |base_buffer, cx| {
1206 // cx.emit(BufferEvent::DiffBaseChanged);
1207 base_buffer.edit(edits, None, cx)
1208 });
1209
1210 if let Some(operation) = operation
1211 && let Some(BufferBranchState {
1212 merged_operations, ..
1213 }) = &mut self.branch_state
1214 {
1215 merged_operations.push(operation);
1216 }
1217 }
1218
1219 fn on_base_buffer_event(
1220 &mut self,
1221 _: Entity<Buffer>,
1222 event: &BufferEvent,
1223 cx: &mut Context<Self>,
1224 ) {
1225 let BufferEvent::Operation { operation, .. } = event else {
1226 return;
1227 };
1228 let Some(BufferBranchState {
1229 merged_operations, ..
1230 }) = &mut self.branch_state
1231 else {
1232 return;
1233 };
1234
1235 let mut operation_to_undo = None;
1236 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1237 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1238 {
1239 merged_operations.remove(ix);
1240 operation_to_undo = Some(operation.timestamp);
1241 }
1242
1243 self.apply_ops([operation.clone()], cx);
1244
1245 if let Some(timestamp) = operation_to_undo {
1246 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1247 self.undo_operations(counts, cx);
1248 }
1249 }
1250
1251 #[cfg(test)]
1252 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1253 &self.text
1254 }
1255
1256 /// Retrieve a snapshot of the buffer's raw text, without any
1257 /// language-related state like the syntax tree or diagnostics.
1258 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1259 self.text.snapshot()
1260 }
1261
1262 /// The file associated with the buffer, if any.
1263 pub fn file(&self) -> Option<&Arc<dyn File>> {
1264 self.file.as_ref()
1265 }
1266
1267 /// The version of the buffer that was last saved or reloaded from disk.
1268 pub fn saved_version(&self) -> &clock::Global {
1269 &self.saved_version
1270 }
1271
1272 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1273 pub fn saved_mtime(&self) -> Option<MTime> {
1274 self.saved_mtime
1275 }
1276
1277 /// Assign a language to the buffer.
1278 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1279 self.non_text_state_update_count += 1;
1280 self.syntax_map.lock().clear(&self.text);
1281 self.language = language;
1282 self.was_changed();
1283 self.reparse(cx);
1284 cx.emit(BufferEvent::LanguageChanged);
1285 }
1286
1287 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1288 /// other languages if parts of the buffer are written in different languages.
1289 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1290 self.syntax_map
1291 .lock()
1292 .set_language_registry(language_registry);
1293 }
1294
1295 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1296 self.syntax_map.lock().language_registry()
1297 }
1298
1299 /// Assign the line ending type to the buffer.
1300 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1301 self.text.set_line_ending(line_ending);
1302
1303 let lamport_timestamp = self.text.lamport_clock.tick();
1304 self.send_operation(
1305 Operation::UpdateLineEnding {
1306 line_ending,
1307 lamport_timestamp,
1308 },
1309 true,
1310 cx,
1311 );
1312 }
1313
1314 /// Assign the buffer a new [`Capability`].
1315 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1316 if self.capability != capability {
1317 self.capability = capability;
1318 cx.emit(BufferEvent::CapabilityChanged)
1319 }
1320 }
1321
1322 /// This method is called to signal that the buffer has been saved.
1323 pub fn did_save(
1324 &mut self,
1325 version: clock::Global,
1326 mtime: Option<MTime>,
1327 cx: &mut Context<Self>,
1328 ) {
1329 self.saved_version = version.clone();
1330 self.has_unsaved_edits.set((version, false));
1331 self.has_conflict = false;
1332 self.saved_mtime = mtime;
1333 self.was_changed();
1334 cx.emit(BufferEvent::Saved);
1335 cx.notify();
1336 }
1337
1338 /// Reloads the contents of the buffer from disk.
1339 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1340 let (tx, rx) = futures::channel::oneshot::channel();
1341 let prev_version = self.text.version();
1342 self.reload_task = Some(cx.spawn(async move |this, cx| {
1343 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1344 let file = this.file.as_ref()?.as_local()?;
1345
1346 Some((file.disk_state().mtime(), file.load(cx)))
1347 })?
1348 else {
1349 return Ok(());
1350 };
1351
1352 let new_text = new_text.await?;
1353 let diff = this
1354 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1355 .await;
1356 this.update(cx, |this, cx| {
1357 if this.version() == diff.base_version {
1358 this.finalize_last_transaction();
1359 this.apply_diff(diff, cx);
1360 tx.send(this.finalize_last_transaction().cloned()).ok();
1361 this.has_conflict = false;
1362 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1363 } else {
1364 if !diff.edits.is_empty()
1365 || this
1366 .edits_since::<usize>(&diff.base_version)
1367 .next()
1368 .is_some()
1369 {
1370 this.has_conflict = true;
1371 }
1372
1373 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1374 }
1375
1376 this.reload_task.take();
1377 })
1378 }));
1379 rx
1380 }
1381
1382 /// This method is called to signal that the buffer has been reloaded.
1383 pub fn did_reload(
1384 &mut self,
1385 version: clock::Global,
1386 line_ending: LineEnding,
1387 mtime: Option<MTime>,
1388 cx: &mut Context<Self>,
1389 ) {
1390 self.saved_version = version;
1391 self.has_unsaved_edits
1392 .set((self.saved_version.clone(), false));
1393 self.text.set_line_ending(line_ending);
1394 self.saved_mtime = mtime;
1395 cx.emit(BufferEvent::Reloaded);
1396 cx.notify();
1397 }
1398
1399 /// Updates the [`File`] backing this buffer. This should be called when
1400 /// the file has changed or has been deleted.
1401 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1402 let was_dirty = self.is_dirty();
1403 let mut file_changed = false;
1404
1405 if let Some(old_file) = self.file.as_ref() {
1406 if new_file.path() != old_file.path() {
1407 file_changed = true;
1408 }
1409
1410 let old_state = old_file.disk_state();
1411 let new_state = new_file.disk_state();
1412 if old_state != new_state {
1413 file_changed = true;
1414 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1415 cx.emit(BufferEvent::ReloadNeeded)
1416 }
1417 }
1418 } else {
1419 file_changed = true;
1420 };
1421
1422 self.file = Some(new_file);
1423 if file_changed {
1424 self.was_changed();
1425 self.non_text_state_update_count += 1;
1426 if was_dirty != self.is_dirty() {
1427 cx.emit(BufferEvent::DirtyChanged);
1428 }
1429 cx.emit(BufferEvent::FileHandleChanged);
1430 cx.notify();
1431 }
1432 }
1433
1434 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1435 Some(self.branch_state.as_ref()?.base_buffer.clone())
1436 }
1437
1438 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1439 pub fn language(&self) -> Option<&Arc<Language>> {
1440 self.language.as_ref()
1441 }
1442
1443 /// Returns the [`Language`] at the given location.
1444 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1445 let offset = position.to_offset(self);
1446 let mut is_first = true;
1447 let start_anchor = self.anchor_before(offset);
1448 let end_anchor = self.anchor_after(offset);
1449 self.syntax_map
1450 .lock()
1451 .layers_for_range(offset..offset, &self.text, false)
1452 .filter(|layer| {
1453 if is_first {
1454 is_first = false;
1455 return true;
1456 }
1457
1458 layer
1459 .included_sub_ranges
1460 .map(|sub_ranges| {
1461 sub_ranges.iter().any(|sub_range| {
1462 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1463 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1464 !is_before_start && !is_after_end
1465 })
1466 })
1467 .unwrap_or(true)
1468 })
1469 .last()
1470 .map(|info| info.language.clone())
1471 .or_else(|| self.language.clone())
1472 }
1473
1474 /// Returns each [`Language`] for the active syntax layers at the given location.
1475 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1476 let offset = position.to_offset(self);
1477 let mut languages: Vec<Arc<Language>> = self
1478 .syntax_map
1479 .lock()
1480 .layers_for_range(offset..offset, &self.text, false)
1481 .map(|info| info.language.clone())
1482 .collect();
1483
1484 if languages.is_empty()
1485 && let Some(buffer_language) = self.language()
1486 {
1487 languages.push(buffer_language.clone());
1488 }
1489
1490 languages
1491 }
1492
1493 /// An integer version number that accounts for all updates besides
1494 /// the buffer's text itself (which is versioned via a version vector).
1495 pub fn non_text_state_update_count(&self) -> usize {
1496 self.non_text_state_update_count
1497 }
1498
1499 /// Whether the buffer is being parsed in the background.
1500 #[cfg(any(test, feature = "test-support"))]
1501 pub fn is_parsing(&self) -> bool {
1502 self.reparse.is_some()
1503 }
1504
1505 /// Indicates whether the buffer contains any regions that may be
1506 /// written in a language that hasn't been loaded yet.
1507 pub fn contains_unknown_injections(&self) -> bool {
1508 self.syntax_map.lock().contains_unknown_injections()
1509 }
1510
1511 #[cfg(any(test, feature = "test-support"))]
1512 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1513 self.sync_parse_timeout = timeout;
1514 }
1515
1516 /// Called after an edit to synchronize the buffer's main parse tree with
1517 /// the buffer's new underlying state.
1518 ///
1519 /// Locks the syntax map and interpolates the edits since the last reparse
1520 /// into the foreground syntax tree.
1521 ///
1522 /// Then takes a stable snapshot of the syntax map before unlocking it.
1523 /// The snapshot with the interpolated edits is sent to a background thread,
1524 /// where we ask Tree-sitter to perform an incremental parse.
1525 ///
1526 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1527 /// waiting on the parse to complete. As soon as it completes, we proceed
1528 /// synchronously, unless a 1ms timeout elapses.
1529 ///
1530 /// If we time out waiting on the parse, we spawn a second task waiting
1531 /// until the parse does complete and return with the interpolated tree still
1532 /// in the foreground. When the background parse completes, call back into
1533 /// the main thread and assign the foreground parse state.
1534 ///
1535 /// If the buffer or grammar changed since the start of the background parse,
1536 /// initiate an additional reparse recursively. To avoid concurrent parses
1537 /// for the same buffer, we only initiate a new parse if we are not already
1538 /// parsing in the background.
1539 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1540 if self.reparse.is_some() {
1541 return;
1542 }
1543 let language = if let Some(language) = self.language.clone() {
1544 language
1545 } else {
1546 return;
1547 };
1548
1549 let text = self.text_snapshot();
1550 let parsed_version = self.version();
1551
1552 let mut syntax_map = self.syntax_map.lock();
1553 syntax_map.interpolate(&text);
1554 let language_registry = syntax_map.language_registry();
1555 let mut syntax_snapshot = syntax_map.snapshot();
1556 drop(syntax_map);
1557
1558 let parse_task = cx.background_spawn({
1559 let language = language.clone();
1560 let language_registry = language_registry.clone();
1561 async move {
1562 syntax_snapshot.reparse(&text, language_registry, language);
1563 syntax_snapshot
1564 }
1565 });
1566
1567 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1568 match cx
1569 .background_executor()
1570 .block_with_timeout(self.sync_parse_timeout, parse_task)
1571 {
1572 Ok(new_syntax_snapshot) => {
1573 self.did_finish_parsing(new_syntax_snapshot, cx);
1574 self.reparse = None;
1575 }
1576 Err(parse_task) => {
1577 // todo(lw): hot foreground spawn
1578 self.reparse = Some(cx.spawn(async move |this, cx| {
1579 let new_syntax_map = cx.background_spawn(parse_task).await;
1580 this.update(cx, move |this, cx| {
1581 let grammar_changed = || {
1582 this.language.as_ref().is_none_or(|current_language| {
1583 !Arc::ptr_eq(&language, current_language)
1584 })
1585 };
1586 let language_registry_changed = || {
1587 new_syntax_map.contains_unknown_injections()
1588 && language_registry.is_some_and(|registry| {
1589 registry.version() != new_syntax_map.language_registry_version()
1590 })
1591 };
1592 let parse_again = this.version.changed_since(&parsed_version)
1593 || language_registry_changed()
1594 || grammar_changed();
1595 this.did_finish_parsing(new_syntax_map, cx);
1596 this.reparse = None;
1597 if parse_again {
1598 this.reparse(cx);
1599 }
1600 })
1601 .ok();
1602 }));
1603 }
1604 }
1605 }
1606
1607 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1608 self.was_changed();
1609 self.non_text_state_update_count += 1;
1610 self.syntax_map.lock().did_parse(syntax_snapshot);
1611 self.request_autoindent(cx);
1612 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1613 cx.emit(BufferEvent::Reparsed);
1614 cx.notify();
1615 }
1616
1617 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1618 self.parse_status.1.clone()
1619 }
1620
1621 /// Assign to the buffer a set of diagnostics created by a given language server.
1622 pub fn update_diagnostics(
1623 &mut self,
1624 server_id: LanguageServerId,
1625 diagnostics: DiagnosticSet,
1626 cx: &mut Context<Self>,
1627 ) {
1628 let lamport_timestamp = self.text.lamport_clock.tick();
1629 let op = Operation::UpdateDiagnostics {
1630 server_id,
1631 diagnostics: diagnostics.iter().cloned().collect(),
1632 lamport_timestamp,
1633 };
1634
1635 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1636 self.send_operation(op, true, cx);
1637 }
1638
1639 pub fn buffer_diagnostics(
1640 &self,
1641 for_server: Option<LanguageServerId>,
1642 ) -> Vec<&DiagnosticEntry<Anchor>> {
1643 match for_server {
1644 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1645 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1646 Err(_) => Vec::new(),
1647 },
1648 None => self
1649 .diagnostics
1650 .iter()
1651 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1652 .collect(),
1653 }
1654 }
1655
1656 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1657 if let Some(indent_sizes) = self.compute_autoindents() {
1658 let indent_sizes = cx.background_spawn(indent_sizes);
1659 match cx
1660 .background_executor()
1661 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1662 {
1663 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1664 Err(indent_sizes) => {
1665 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1666 let indent_sizes = indent_sizes.await;
1667 this.update(cx, |this, cx| {
1668 this.apply_autoindents(indent_sizes, cx);
1669 })
1670 .ok();
1671 }));
1672 }
1673 }
1674 } else {
1675 self.autoindent_requests.clear();
1676 for tx in self.wait_for_autoindent_txs.drain(..) {
1677 tx.send(()).ok();
1678 }
1679 }
1680 }
1681
1682 fn compute_autoindents(
1683 &self,
1684 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1685 let max_rows_between_yields = 100;
1686 let snapshot = self.snapshot();
1687 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1688 return None;
1689 }
1690
1691 let autoindent_requests = self.autoindent_requests.clone();
1692 Some(async move {
1693 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1694 for request in autoindent_requests {
1695 // Resolve each edited range to its row in the current buffer and in the
1696 // buffer before this batch of edits.
1697 let mut row_ranges = Vec::new();
1698 let mut old_to_new_rows = BTreeMap::new();
1699 let mut language_indent_sizes_by_new_row = Vec::new();
1700 for entry in &request.entries {
1701 let position = entry.range.start;
1702 let new_row = position.to_point(&snapshot).row;
1703 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1704 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1705
1706 if !entry.first_line_is_new {
1707 let old_row = position.to_point(&request.before_edit).row;
1708 old_to_new_rows.insert(old_row, new_row);
1709 }
1710 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1711 }
1712
1713 // Build a map containing the suggested indentation for each of the edited lines
1714 // with respect to the state of the buffer before these edits. This map is keyed
1715 // by the rows for these lines in the current state of the buffer.
1716 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1717 let old_edited_ranges =
1718 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1719 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1720 let mut language_indent_size = IndentSize::default();
1721 for old_edited_range in old_edited_ranges {
1722 let suggestions = request
1723 .before_edit
1724 .suggest_autoindents(old_edited_range.clone())
1725 .into_iter()
1726 .flatten();
1727 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1728 if let Some(suggestion) = suggestion {
1729 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1730
1731 // Find the indent size based on the language for this row.
1732 while let Some((row, size)) = language_indent_sizes.peek() {
1733 if *row > new_row {
1734 break;
1735 }
1736 language_indent_size = *size;
1737 language_indent_sizes.next();
1738 }
1739
1740 let suggested_indent = old_to_new_rows
1741 .get(&suggestion.basis_row)
1742 .and_then(|from_row| {
1743 Some(old_suggestions.get(from_row).copied()?.0)
1744 })
1745 .unwrap_or_else(|| {
1746 request
1747 .before_edit
1748 .indent_size_for_line(suggestion.basis_row)
1749 })
1750 .with_delta(suggestion.delta, language_indent_size);
1751 old_suggestions
1752 .insert(new_row, (suggested_indent, suggestion.within_error));
1753 }
1754 }
1755 yield_now().await;
1756 }
1757
1758 // Compute new suggestions for each line, but only include them in the result
1759 // if they differ from the old suggestion for that line.
1760 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1761 let mut language_indent_size = IndentSize::default();
1762 for (row_range, original_indent_column) in row_ranges {
1763 let new_edited_row_range = if request.is_block_mode {
1764 row_range.start..row_range.start + 1
1765 } else {
1766 row_range.clone()
1767 };
1768
1769 let suggestions = snapshot
1770 .suggest_autoindents(new_edited_row_range.clone())
1771 .into_iter()
1772 .flatten();
1773 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1774 if let Some(suggestion) = suggestion {
1775 // Find the indent size based on the language for this row.
1776 while let Some((row, size)) = language_indent_sizes.peek() {
1777 if *row > new_row {
1778 break;
1779 }
1780 language_indent_size = *size;
1781 language_indent_sizes.next();
1782 }
1783
1784 let suggested_indent = indent_sizes
1785 .get(&suggestion.basis_row)
1786 .copied()
1787 .map(|e| e.0)
1788 .unwrap_or_else(|| {
1789 snapshot.indent_size_for_line(suggestion.basis_row)
1790 })
1791 .with_delta(suggestion.delta, language_indent_size);
1792
1793 if old_suggestions.get(&new_row).is_none_or(
1794 |(old_indentation, was_within_error)| {
1795 suggested_indent != *old_indentation
1796 && (!suggestion.within_error || *was_within_error)
1797 },
1798 ) {
1799 indent_sizes.insert(
1800 new_row,
1801 (suggested_indent, request.ignore_empty_lines),
1802 );
1803 }
1804 }
1805 }
1806
1807 if let (true, Some(original_indent_column)) =
1808 (request.is_block_mode, original_indent_column)
1809 {
1810 let new_indent =
1811 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1812 *indent
1813 } else {
1814 snapshot.indent_size_for_line(row_range.start)
1815 };
1816 let delta = new_indent.len as i64 - original_indent_column as i64;
1817 if delta != 0 {
1818 for row in row_range.skip(1) {
1819 indent_sizes.entry(row).or_insert_with(|| {
1820 let mut size = snapshot.indent_size_for_line(row);
1821 if size.kind == new_indent.kind {
1822 match delta.cmp(&0) {
1823 Ordering::Greater => size.len += delta as u32,
1824 Ordering::Less => {
1825 size.len = size.len.saturating_sub(-delta as u32)
1826 }
1827 Ordering::Equal => {}
1828 }
1829 }
1830 (size, request.ignore_empty_lines)
1831 });
1832 }
1833 }
1834 }
1835
1836 yield_now().await;
1837 }
1838 }
1839
1840 indent_sizes
1841 .into_iter()
1842 .filter_map(|(row, (indent, ignore_empty_lines))| {
1843 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1844 None
1845 } else {
1846 Some((row, indent))
1847 }
1848 })
1849 .collect()
1850 })
1851 }
1852
1853 fn apply_autoindents(
1854 &mut self,
1855 indent_sizes: BTreeMap<u32, IndentSize>,
1856 cx: &mut Context<Self>,
1857 ) {
1858 self.autoindent_requests.clear();
1859 for tx in self.wait_for_autoindent_txs.drain(..) {
1860 tx.send(()).ok();
1861 }
1862
1863 let edits: Vec<_> = indent_sizes
1864 .into_iter()
1865 .filter_map(|(row, indent_size)| {
1866 let current_size = indent_size_for_line(self, row);
1867 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1868 })
1869 .collect();
1870
1871 let preserve_preview = self.preserve_preview();
1872 self.edit(edits, None, cx);
1873 if preserve_preview {
1874 self.refresh_preview();
1875 }
1876 }
1877
1878 /// Create a minimal edit that will cause the given row to be indented
1879 /// with the given size. After applying this edit, the length of the line
1880 /// will always be at least `new_size.len`.
1881 pub fn edit_for_indent_size_adjustment(
1882 row: u32,
1883 current_size: IndentSize,
1884 new_size: IndentSize,
1885 ) -> Option<(Range<Point>, String)> {
1886 if new_size.kind == current_size.kind {
1887 match new_size.len.cmp(¤t_size.len) {
1888 Ordering::Greater => {
1889 let point = Point::new(row, 0);
1890 Some((
1891 point..point,
1892 iter::repeat(new_size.char())
1893 .take((new_size.len - current_size.len) as usize)
1894 .collect::<String>(),
1895 ))
1896 }
1897
1898 Ordering::Less => Some((
1899 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1900 String::new(),
1901 )),
1902
1903 Ordering::Equal => None,
1904 }
1905 } else {
1906 Some((
1907 Point::new(row, 0)..Point::new(row, current_size.len),
1908 iter::repeat(new_size.char())
1909 .take(new_size.len as usize)
1910 .collect::<String>(),
1911 ))
1912 }
1913 }
1914
1915 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1916 /// and the given new text.
1917 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1918 let old_text = self.as_rope().clone();
1919 let base_version = self.version();
1920 cx.background_executor()
1921 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1922 let old_text = old_text.to_string();
1923 let line_ending = LineEnding::detect(&new_text);
1924 LineEnding::normalize(&mut new_text);
1925 let edits = text_diff(&old_text, &new_text);
1926 Diff {
1927 base_version,
1928 line_ending,
1929 edits,
1930 }
1931 })
1932 }
1933
1934 /// Spawns a background task that searches the buffer for any whitespace
1935 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1936 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1937 let old_text = self.as_rope().clone();
1938 let line_ending = self.line_ending();
1939 let base_version = self.version();
1940 cx.background_spawn(async move {
1941 let ranges = trailing_whitespace_ranges(&old_text);
1942 let empty = Arc::<str>::from("");
1943 Diff {
1944 base_version,
1945 line_ending,
1946 edits: ranges
1947 .into_iter()
1948 .map(|range| (range, empty.clone()))
1949 .collect(),
1950 }
1951 })
1952 }
1953
1954 /// Ensures that the buffer ends with a single newline character, and
1955 /// no other whitespace. Skips if the buffer is empty.
1956 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1957 let len = self.len();
1958 if len == 0 {
1959 return;
1960 }
1961 let mut offset = len;
1962 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1963 let non_whitespace_len = chunk
1964 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1965 .len();
1966 offset -= chunk.len();
1967 offset += non_whitespace_len;
1968 if non_whitespace_len != 0 {
1969 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1970 return;
1971 }
1972 break;
1973 }
1974 }
1975 self.edit([(offset..len, "\n")], None, cx);
1976 }
1977
1978 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1979 /// calculated, then adjust the diff to account for those changes, and discard any
1980 /// parts of the diff that conflict with those changes.
1981 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1982 let snapshot = self.snapshot();
1983 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1984 let mut delta = 0;
1985 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1986 while let Some(edit_since) = edits_since.peek() {
1987 // If the edit occurs after a diff hunk, then it does not
1988 // affect that hunk.
1989 if edit_since.old.start > range.end {
1990 break;
1991 }
1992 // If the edit precedes the diff hunk, then adjust the hunk
1993 // to reflect the edit.
1994 else if edit_since.old.end < range.start {
1995 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1996 edits_since.next();
1997 }
1998 // If the edit intersects a diff hunk, then discard that hunk.
1999 else {
2000 return None;
2001 }
2002 }
2003
2004 let start = (range.start as i64 + delta) as usize;
2005 let end = (range.end as i64 + delta) as usize;
2006 Some((start..end, new_text))
2007 });
2008
2009 self.start_transaction();
2010 self.text.set_line_ending(diff.line_ending);
2011 self.edit(adjusted_edits, None, cx);
2012 self.end_transaction(cx)
2013 }
2014
2015 pub fn has_unsaved_edits(&self) -> bool {
2016 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2017
2018 if last_version == self.version {
2019 self.has_unsaved_edits
2020 .set((last_version, has_unsaved_edits));
2021 return has_unsaved_edits;
2022 }
2023
2024 let has_edits = self.has_edits_since(&self.saved_version);
2025 self.has_unsaved_edits
2026 .set((self.version.clone(), has_edits));
2027 has_edits
2028 }
2029
2030 /// Checks if the buffer has unsaved changes.
2031 pub fn is_dirty(&self) -> bool {
2032 if self.capability == Capability::ReadOnly {
2033 return false;
2034 }
2035 if self.has_conflict {
2036 return true;
2037 }
2038 match self.file.as_ref().map(|f| f.disk_state()) {
2039 Some(DiskState::New) | Some(DiskState::Deleted) => {
2040 !self.is_empty() && self.has_unsaved_edits()
2041 }
2042 _ => self.has_unsaved_edits(),
2043 }
2044 }
2045
2046 /// Checks if the buffer and its file have both changed since the buffer
2047 /// was last saved or reloaded.
2048 pub fn has_conflict(&self) -> bool {
2049 if self.has_conflict {
2050 return true;
2051 }
2052 let Some(file) = self.file.as_ref() else {
2053 return false;
2054 };
2055 match file.disk_state() {
2056 DiskState::New => false,
2057 DiskState::Present { mtime } => match self.saved_mtime {
2058 Some(saved_mtime) => {
2059 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2060 }
2061 None => true,
2062 },
2063 DiskState::Deleted => false,
2064 }
2065 }
2066
2067 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2068 pub fn subscribe(&mut self) -> Subscription {
2069 self.text.subscribe()
2070 }
2071
2072 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2073 ///
2074 /// This allows downstream code to check if the buffer's text has changed without
2075 /// waiting for an effect cycle, which would be required if using eents.
2076 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2077 if let Err(ix) = self
2078 .change_bits
2079 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2080 {
2081 self.change_bits.insert(ix, bit);
2082 }
2083 }
2084
2085 /// Set the change bit for all "listeners".
2086 fn was_changed(&mut self) {
2087 self.change_bits.retain(|change_bit| {
2088 change_bit
2089 .upgrade()
2090 .inspect(|bit| {
2091 _ = bit.replace(true);
2092 })
2093 .is_some()
2094 });
2095 }
2096
2097 /// Starts a transaction, if one is not already in-progress. When undoing or
2098 /// redoing edits, all of the edits performed within a transaction are undone
2099 /// or redone together.
2100 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2101 self.start_transaction_at(Instant::now())
2102 }
2103
2104 /// Starts a transaction, providing the current time. Subsequent transactions
2105 /// that occur within a short period of time will be grouped together. This
2106 /// is controlled by the buffer's undo grouping duration.
2107 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2108 self.transaction_depth += 1;
2109 if self.was_dirty_before_starting_transaction.is_none() {
2110 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2111 }
2112 self.text.start_transaction_at(now)
2113 }
2114
2115 /// Terminates the current transaction, if this is the outermost transaction.
2116 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2117 self.end_transaction_at(Instant::now(), cx)
2118 }
2119
2120 /// Terminates the current transaction, providing the current time. Subsequent transactions
2121 /// that occur within a short period of time will be grouped together. This
2122 /// is controlled by the buffer's undo grouping duration.
2123 pub fn end_transaction_at(
2124 &mut self,
2125 now: Instant,
2126 cx: &mut Context<Self>,
2127 ) -> Option<TransactionId> {
2128 assert!(self.transaction_depth > 0);
2129 self.transaction_depth -= 1;
2130 let was_dirty = if self.transaction_depth == 0 {
2131 self.was_dirty_before_starting_transaction.take().unwrap()
2132 } else {
2133 false
2134 };
2135 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2136 self.did_edit(&start_version, was_dirty, cx);
2137 Some(transaction_id)
2138 } else {
2139 None
2140 }
2141 }
2142
2143 /// Manually add a transaction to the buffer's undo history.
2144 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2145 self.text.push_transaction(transaction, now);
2146 }
2147
2148 /// Differs from `push_transaction` in that it does not clear the redo
2149 /// stack. Intended to be used to create a parent transaction to merge
2150 /// potential child transactions into.
2151 ///
2152 /// The caller is responsible for removing it from the undo history using
2153 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2154 /// are merged into this transaction, the caller is responsible for ensuring
2155 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2156 /// cleared is to create transactions with the usual `start_transaction` and
2157 /// `end_transaction` methods and merging the resulting transactions into
2158 /// the transaction created by this method
2159 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2160 self.text.push_empty_transaction(now)
2161 }
2162
2163 /// Prevent the last transaction from being grouped with any subsequent transactions,
2164 /// even if they occur with the buffer's undo grouping duration.
2165 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2166 self.text.finalize_last_transaction()
2167 }
2168
2169 /// Manually group all changes since a given transaction.
2170 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2171 self.text.group_until_transaction(transaction_id);
2172 }
2173
2174 /// Manually remove a transaction from the buffer's undo history
2175 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2176 self.text.forget_transaction(transaction_id)
2177 }
2178
2179 /// Retrieve a transaction from the buffer's undo history
2180 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2181 self.text.get_transaction(transaction_id)
2182 }
2183
2184 /// Manually merge two transactions in the buffer's undo history.
2185 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2186 self.text.merge_transactions(transaction, destination);
2187 }
2188
2189 /// Waits for the buffer to receive operations with the given timestamps.
2190 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2191 &mut self,
2192 edit_ids: It,
2193 ) -> impl Future<Output = Result<()>> + use<It> {
2194 self.text.wait_for_edits(edit_ids)
2195 }
2196
2197 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2198 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2199 &mut self,
2200 anchors: It,
2201 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2202 self.text.wait_for_anchors(anchors)
2203 }
2204
2205 /// Waits for the buffer to receive operations up to the given version.
2206 pub fn wait_for_version(
2207 &mut self,
2208 version: clock::Global,
2209 ) -> impl Future<Output = Result<()>> + use<> {
2210 self.text.wait_for_version(version)
2211 }
2212
2213 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2214 /// [`Buffer::wait_for_version`] to resolve with an error.
2215 pub fn give_up_waiting(&mut self) {
2216 self.text.give_up_waiting();
2217 }
2218
2219 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2220 let mut rx = None;
2221 if !self.autoindent_requests.is_empty() {
2222 let channel = oneshot::channel();
2223 self.wait_for_autoindent_txs.push(channel.0);
2224 rx = Some(channel.1);
2225 }
2226 rx
2227 }
2228
2229 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2230 pub fn set_active_selections(
2231 &mut self,
2232 selections: Arc<[Selection<Anchor>]>,
2233 line_mode: bool,
2234 cursor_shape: CursorShape,
2235 cx: &mut Context<Self>,
2236 ) {
2237 let lamport_timestamp = self.text.lamport_clock.tick();
2238 self.remote_selections.insert(
2239 self.text.replica_id(),
2240 SelectionSet {
2241 selections: selections.clone(),
2242 lamport_timestamp,
2243 line_mode,
2244 cursor_shape,
2245 },
2246 );
2247 self.send_operation(
2248 Operation::UpdateSelections {
2249 selections,
2250 line_mode,
2251 lamport_timestamp,
2252 cursor_shape,
2253 },
2254 true,
2255 cx,
2256 );
2257 self.non_text_state_update_count += 1;
2258 cx.notify();
2259 }
2260
2261 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2262 /// this replica.
2263 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2264 if self
2265 .remote_selections
2266 .get(&self.text.replica_id())
2267 .is_none_or(|set| !set.selections.is_empty())
2268 {
2269 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2270 }
2271 }
2272
2273 pub fn set_agent_selections(
2274 &mut self,
2275 selections: Arc<[Selection<Anchor>]>,
2276 line_mode: bool,
2277 cursor_shape: CursorShape,
2278 cx: &mut Context<Self>,
2279 ) {
2280 let lamport_timestamp = self.text.lamport_clock.tick();
2281 self.remote_selections.insert(
2282 ReplicaId::AGENT,
2283 SelectionSet {
2284 selections,
2285 lamport_timestamp,
2286 line_mode,
2287 cursor_shape,
2288 },
2289 );
2290 self.non_text_state_update_count += 1;
2291 cx.notify();
2292 }
2293
2294 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2295 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2296 }
2297
2298 /// Replaces the buffer's entire text.
2299 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2300 where
2301 T: Into<Arc<str>>,
2302 {
2303 self.autoindent_requests.clear();
2304 self.edit([(0..self.len(), text)], None, cx)
2305 }
2306
2307 /// Appends the given text to the end of the buffer.
2308 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2309 where
2310 T: Into<Arc<str>>,
2311 {
2312 self.edit([(self.len()..self.len(), text)], None, cx)
2313 }
2314
2315 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2316 /// delete, and a string of text to insert at that location.
2317 ///
2318 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2319 /// request for the edited ranges, which will be processed when the buffer finishes
2320 /// parsing.
2321 ///
2322 /// Parsing takes place at the end of a transaction, and may compute synchronously
2323 /// or asynchronously, depending on the changes.
2324 pub fn edit<I, S, T>(
2325 &mut self,
2326 edits_iter: I,
2327 autoindent_mode: Option<AutoindentMode>,
2328 cx: &mut Context<Self>,
2329 ) -> Option<clock::Lamport>
2330 where
2331 I: IntoIterator<Item = (Range<S>, T)>,
2332 S: ToOffset,
2333 T: Into<Arc<str>>,
2334 {
2335 // Skip invalid edits and coalesce contiguous ones.
2336 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2337
2338 for (range, new_text) in edits_iter {
2339 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2340
2341 if range.start > range.end {
2342 mem::swap(&mut range.start, &mut range.end);
2343 }
2344 let new_text = new_text.into();
2345 if !new_text.is_empty() || !range.is_empty() {
2346 if let Some((prev_range, prev_text)) = edits.last_mut()
2347 && prev_range.end >= range.start
2348 {
2349 prev_range.end = cmp::max(prev_range.end, range.end);
2350 *prev_text = format!("{prev_text}{new_text}").into();
2351 } else {
2352 edits.push((range, new_text));
2353 }
2354 }
2355 }
2356 if edits.is_empty() {
2357 return None;
2358 }
2359
2360 self.start_transaction();
2361 self.pending_autoindent.take();
2362 let autoindent_request = autoindent_mode
2363 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2364
2365 let edit_operation = self.text.edit(edits.iter().cloned());
2366 let edit_id = edit_operation.timestamp();
2367
2368 if let Some((before_edit, mode)) = autoindent_request {
2369 let mut delta = 0isize;
2370 let mut previous_setting = None;
2371 let entries: Vec<_> = edits
2372 .into_iter()
2373 .enumerate()
2374 .zip(&edit_operation.as_edit().unwrap().new_text)
2375 .filter(|((_, (range, _)), _)| {
2376 let language = before_edit.language_at(range.start);
2377 let language_id = language.map(|l| l.id());
2378 if let Some((cached_language_id, auto_indent)) = previous_setting
2379 && cached_language_id == language_id
2380 {
2381 auto_indent
2382 } else {
2383 // The auto-indent setting is not present in editorconfigs, hence
2384 // we can avoid passing the file here.
2385 let auto_indent =
2386 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2387 previous_setting = Some((language_id, auto_indent));
2388 auto_indent
2389 }
2390 })
2391 .map(|((ix, (range, _)), new_text)| {
2392 let new_text_length = new_text.len();
2393 let old_start = range.start.to_point(&before_edit);
2394 let new_start = (delta + range.start as isize) as usize;
2395 let range_len = range.end - range.start;
2396 delta += new_text_length as isize - range_len as isize;
2397
2398 // Decide what range of the insertion to auto-indent, and whether
2399 // the first line of the insertion should be considered a newly-inserted line
2400 // or an edit to an existing line.
2401 let mut range_of_insertion_to_indent = 0..new_text_length;
2402 let mut first_line_is_new = true;
2403
2404 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2405 let old_line_end = before_edit.line_len(old_start.row);
2406
2407 if old_start.column > old_line_start {
2408 first_line_is_new = false;
2409 }
2410
2411 if !new_text.contains('\n')
2412 && (old_start.column + (range_len as u32) < old_line_end
2413 || old_line_end == old_line_start)
2414 {
2415 first_line_is_new = false;
2416 }
2417
2418 // When inserting text starting with a newline, avoid auto-indenting the
2419 // previous line.
2420 if new_text.starts_with('\n') {
2421 range_of_insertion_to_indent.start += 1;
2422 first_line_is_new = true;
2423 }
2424
2425 let mut original_indent_column = None;
2426 if let AutoindentMode::Block {
2427 original_indent_columns,
2428 } = &mode
2429 {
2430 original_indent_column = Some(if new_text.starts_with('\n') {
2431 indent_size_for_text(
2432 new_text[range_of_insertion_to_indent.clone()].chars(),
2433 )
2434 .len
2435 } else {
2436 original_indent_columns
2437 .get(ix)
2438 .copied()
2439 .flatten()
2440 .unwrap_or_else(|| {
2441 indent_size_for_text(
2442 new_text[range_of_insertion_to_indent.clone()].chars(),
2443 )
2444 .len
2445 })
2446 });
2447
2448 // Avoid auto-indenting the line after the edit.
2449 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2450 range_of_insertion_to_indent.end -= 1;
2451 }
2452 }
2453
2454 AutoindentRequestEntry {
2455 first_line_is_new,
2456 original_indent_column,
2457 indent_size: before_edit.language_indent_size_at(range.start, cx),
2458 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2459 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2460 }
2461 })
2462 .collect();
2463
2464 if !entries.is_empty() {
2465 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2466 before_edit,
2467 entries,
2468 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2469 ignore_empty_lines: false,
2470 }));
2471 }
2472 }
2473
2474 self.end_transaction(cx);
2475 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2476 Some(edit_id)
2477 }
2478
2479 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2480 self.was_changed();
2481
2482 if self.edits_since::<usize>(old_version).next().is_none() {
2483 return;
2484 }
2485
2486 self.reparse(cx);
2487 cx.emit(BufferEvent::Edited);
2488 if was_dirty != self.is_dirty() {
2489 cx.emit(BufferEvent::DirtyChanged);
2490 }
2491 cx.notify();
2492 }
2493
2494 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2495 where
2496 I: IntoIterator<Item = Range<T>>,
2497 T: ToOffset + Copy,
2498 {
2499 let before_edit = self.snapshot();
2500 let entries = ranges
2501 .into_iter()
2502 .map(|range| AutoindentRequestEntry {
2503 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2504 first_line_is_new: true,
2505 indent_size: before_edit.language_indent_size_at(range.start, cx),
2506 original_indent_column: None,
2507 })
2508 .collect();
2509 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2510 before_edit,
2511 entries,
2512 is_block_mode: false,
2513 ignore_empty_lines: true,
2514 }));
2515 self.request_autoindent(cx);
2516 }
2517
2518 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2519 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2520 pub fn insert_empty_line(
2521 &mut self,
2522 position: impl ToPoint,
2523 space_above: bool,
2524 space_below: bool,
2525 cx: &mut Context<Self>,
2526 ) -> Point {
2527 let mut position = position.to_point(self);
2528
2529 self.start_transaction();
2530
2531 self.edit(
2532 [(position..position, "\n")],
2533 Some(AutoindentMode::EachLine),
2534 cx,
2535 );
2536
2537 if position.column > 0 {
2538 position += Point::new(1, 0);
2539 }
2540
2541 if !self.is_line_blank(position.row) {
2542 self.edit(
2543 [(position..position, "\n")],
2544 Some(AutoindentMode::EachLine),
2545 cx,
2546 );
2547 }
2548
2549 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2550 self.edit(
2551 [(position..position, "\n")],
2552 Some(AutoindentMode::EachLine),
2553 cx,
2554 );
2555 position.row += 1;
2556 }
2557
2558 if space_below
2559 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2560 {
2561 self.edit(
2562 [(position..position, "\n")],
2563 Some(AutoindentMode::EachLine),
2564 cx,
2565 );
2566 }
2567
2568 self.end_transaction(cx);
2569
2570 position
2571 }
2572
2573 /// Applies the given remote operations to the buffer.
2574 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2575 self.pending_autoindent.take();
2576 let was_dirty = self.is_dirty();
2577 let old_version = self.version.clone();
2578 let mut deferred_ops = Vec::new();
2579 let buffer_ops = ops
2580 .into_iter()
2581 .filter_map(|op| match op {
2582 Operation::Buffer(op) => Some(op),
2583 _ => {
2584 if self.can_apply_op(&op) {
2585 self.apply_op(op, cx);
2586 } else {
2587 deferred_ops.push(op);
2588 }
2589 None
2590 }
2591 })
2592 .collect::<Vec<_>>();
2593 for operation in buffer_ops.iter() {
2594 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2595 }
2596 self.text.apply_ops(buffer_ops);
2597 self.deferred_ops.insert(deferred_ops);
2598 self.flush_deferred_ops(cx);
2599 self.did_edit(&old_version, was_dirty, cx);
2600 // Notify independently of whether the buffer was edited as the operations could include a
2601 // selection update.
2602 cx.notify();
2603 }
2604
2605 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2606 let mut deferred_ops = Vec::new();
2607 for op in self.deferred_ops.drain().iter().cloned() {
2608 if self.can_apply_op(&op) {
2609 self.apply_op(op, cx);
2610 } else {
2611 deferred_ops.push(op);
2612 }
2613 }
2614 self.deferred_ops.insert(deferred_ops);
2615 }
2616
2617 pub fn has_deferred_ops(&self) -> bool {
2618 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2619 }
2620
2621 fn can_apply_op(&self, operation: &Operation) -> bool {
2622 match operation {
2623 Operation::Buffer(_) => {
2624 unreachable!("buffer operations should never be applied at this layer")
2625 }
2626 Operation::UpdateDiagnostics {
2627 diagnostics: diagnostic_set,
2628 ..
2629 } => diagnostic_set.iter().all(|diagnostic| {
2630 self.text.can_resolve(&diagnostic.range.start)
2631 && self.text.can_resolve(&diagnostic.range.end)
2632 }),
2633 Operation::UpdateSelections { selections, .. } => selections
2634 .iter()
2635 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2636 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2637 }
2638 }
2639
2640 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2641 match operation {
2642 Operation::Buffer(_) => {
2643 unreachable!("buffer operations should never be applied at this layer")
2644 }
2645 Operation::UpdateDiagnostics {
2646 server_id,
2647 diagnostics: diagnostic_set,
2648 lamport_timestamp,
2649 } => {
2650 let snapshot = self.snapshot();
2651 self.apply_diagnostic_update(
2652 server_id,
2653 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2654 lamport_timestamp,
2655 cx,
2656 );
2657 }
2658 Operation::UpdateSelections {
2659 selections,
2660 lamport_timestamp,
2661 line_mode,
2662 cursor_shape,
2663 } => {
2664 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2665 && set.lamport_timestamp > lamport_timestamp
2666 {
2667 return;
2668 }
2669
2670 self.remote_selections.insert(
2671 lamport_timestamp.replica_id,
2672 SelectionSet {
2673 selections,
2674 lamport_timestamp,
2675 line_mode,
2676 cursor_shape,
2677 },
2678 );
2679 self.text.lamport_clock.observe(lamport_timestamp);
2680 self.non_text_state_update_count += 1;
2681 }
2682 Operation::UpdateCompletionTriggers {
2683 triggers,
2684 lamport_timestamp,
2685 server_id,
2686 } => {
2687 if triggers.is_empty() {
2688 self.completion_triggers_per_language_server
2689 .remove(&server_id);
2690 self.completion_triggers = self
2691 .completion_triggers_per_language_server
2692 .values()
2693 .flat_map(|triggers| triggers.iter().cloned())
2694 .collect();
2695 } else {
2696 self.completion_triggers_per_language_server
2697 .insert(server_id, triggers.iter().cloned().collect());
2698 self.completion_triggers.extend(triggers);
2699 }
2700 self.text.lamport_clock.observe(lamport_timestamp);
2701 }
2702 Operation::UpdateLineEnding {
2703 line_ending,
2704 lamport_timestamp,
2705 } => {
2706 self.text.set_line_ending(line_ending);
2707 self.text.lamport_clock.observe(lamport_timestamp);
2708 }
2709 }
2710 }
2711
2712 fn apply_diagnostic_update(
2713 &mut self,
2714 server_id: LanguageServerId,
2715 diagnostics: DiagnosticSet,
2716 lamport_timestamp: clock::Lamport,
2717 cx: &mut Context<Self>,
2718 ) {
2719 if lamport_timestamp > self.diagnostics_timestamp {
2720 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2721 if diagnostics.is_empty() {
2722 if let Ok(ix) = ix {
2723 self.diagnostics.remove(ix);
2724 }
2725 } else {
2726 match ix {
2727 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2728 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2729 };
2730 }
2731 self.diagnostics_timestamp = lamport_timestamp;
2732 self.non_text_state_update_count += 1;
2733 self.text.lamport_clock.observe(lamport_timestamp);
2734 cx.notify();
2735 cx.emit(BufferEvent::DiagnosticsUpdated);
2736 }
2737 }
2738
2739 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2740 self.was_changed();
2741 cx.emit(BufferEvent::Operation {
2742 operation,
2743 is_local,
2744 });
2745 }
2746
2747 /// Removes the selections for a given peer.
2748 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2749 self.remote_selections.remove(&replica_id);
2750 cx.notify();
2751 }
2752
2753 /// Undoes the most recent transaction.
2754 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2755 let was_dirty = self.is_dirty();
2756 let old_version = self.version.clone();
2757
2758 if let Some((transaction_id, operation)) = self.text.undo() {
2759 self.send_operation(Operation::Buffer(operation), true, cx);
2760 self.did_edit(&old_version, was_dirty, cx);
2761 Some(transaction_id)
2762 } else {
2763 None
2764 }
2765 }
2766
2767 /// Manually undoes a specific transaction in the buffer's undo history.
2768 pub fn undo_transaction(
2769 &mut self,
2770 transaction_id: TransactionId,
2771 cx: &mut Context<Self>,
2772 ) -> bool {
2773 let was_dirty = self.is_dirty();
2774 let old_version = self.version.clone();
2775 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2776 self.send_operation(Operation::Buffer(operation), true, cx);
2777 self.did_edit(&old_version, was_dirty, cx);
2778 true
2779 } else {
2780 false
2781 }
2782 }
2783
2784 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2785 pub fn undo_to_transaction(
2786 &mut self,
2787 transaction_id: TransactionId,
2788 cx: &mut Context<Self>,
2789 ) -> bool {
2790 let was_dirty = self.is_dirty();
2791 let old_version = self.version.clone();
2792
2793 let operations = self.text.undo_to_transaction(transaction_id);
2794 let undone = !operations.is_empty();
2795 for operation in operations {
2796 self.send_operation(Operation::Buffer(operation), true, cx);
2797 }
2798 if undone {
2799 self.did_edit(&old_version, was_dirty, cx)
2800 }
2801 undone
2802 }
2803
2804 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2805 let was_dirty = self.is_dirty();
2806 let operation = self.text.undo_operations(counts);
2807 let old_version = self.version.clone();
2808 self.send_operation(Operation::Buffer(operation), true, cx);
2809 self.did_edit(&old_version, was_dirty, cx);
2810 }
2811
2812 /// Manually redoes a specific transaction in the buffer's redo history.
2813 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2814 let was_dirty = self.is_dirty();
2815 let old_version = self.version.clone();
2816
2817 if let Some((transaction_id, operation)) = self.text.redo() {
2818 self.send_operation(Operation::Buffer(operation), true, cx);
2819 self.did_edit(&old_version, was_dirty, cx);
2820 Some(transaction_id)
2821 } else {
2822 None
2823 }
2824 }
2825
2826 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2827 pub fn redo_to_transaction(
2828 &mut self,
2829 transaction_id: TransactionId,
2830 cx: &mut Context<Self>,
2831 ) -> bool {
2832 let was_dirty = self.is_dirty();
2833 let old_version = self.version.clone();
2834
2835 let operations = self.text.redo_to_transaction(transaction_id);
2836 let redone = !operations.is_empty();
2837 for operation in operations {
2838 self.send_operation(Operation::Buffer(operation), true, cx);
2839 }
2840 if redone {
2841 self.did_edit(&old_version, was_dirty, cx)
2842 }
2843 redone
2844 }
2845
2846 /// Override current completion triggers with the user-provided completion triggers.
2847 pub fn set_completion_triggers(
2848 &mut self,
2849 server_id: LanguageServerId,
2850 triggers: BTreeSet<String>,
2851 cx: &mut Context<Self>,
2852 ) {
2853 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2854 if triggers.is_empty() {
2855 self.completion_triggers_per_language_server
2856 .remove(&server_id);
2857 self.completion_triggers = self
2858 .completion_triggers_per_language_server
2859 .values()
2860 .flat_map(|triggers| triggers.iter().cloned())
2861 .collect();
2862 } else {
2863 self.completion_triggers_per_language_server
2864 .insert(server_id, triggers.clone());
2865 self.completion_triggers.extend(triggers.iter().cloned());
2866 }
2867 self.send_operation(
2868 Operation::UpdateCompletionTriggers {
2869 triggers: triggers.into_iter().collect(),
2870 lamport_timestamp: self.completion_triggers_timestamp,
2871 server_id,
2872 },
2873 true,
2874 cx,
2875 );
2876 cx.notify();
2877 }
2878
2879 /// Returns a list of strings which trigger a completion menu for this language.
2880 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2881 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2882 &self.completion_triggers
2883 }
2884
2885 /// Call this directly after performing edits to prevent the preview tab
2886 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2887 /// to return false until there are additional edits.
2888 pub fn refresh_preview(&mut self) {
2889 self.preview_version = self.version.clone();
2890 }
2891
2892 /// Whether we should preserve the preview status of a tab containing this buffer.
2893 pub fn preserve_preview(&self) -> bool {
2894 !self.has_edits_since(&self.preview_version)
2895 }
2896}
2897
2898#[doc(hidden)]
2899#[cfg(any(test, feature = "test-support"))]
2900impl Buffer {
2901 pub fn edit_via_marked_text(
2902 &mut self,
2903 marked_string: &str,
2904 autoindent_mode: Option<AutoindentMode>,
2905 cx: &mut Context<Self>,
2906 ) {
2907 let edits = self.edits_for_marked_text(marked_string);
2908 self.edit(edits, autoindent_mode, cx);
2909 }
2910
2911 pub fn set_group_interval(&mut self, group_interval: Duration) {
2912 self.text.set_group_interval(group_interval);
2913 }
2914
2915 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2916 where
2917 T: rand::Rng,
2918 {
2919 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2920 let mut last_end = None;
2921 for _ in 0..old_range_count {
2922 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2923 break;
2924 }
2925
2926 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2927 let mut range = self.random_byte_range(new_start, rng);
2928 if rng.random_bool(0.2) {
2929 mem::swap(&mut range.start, &mut range.end);
2930 }
2931 last_end = Some(range.end);
2932
2933 let new_text_len = rng.random_range(0..10);
2934 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2935 new_text = new_text.to_uppercase();
2936
2937 edits.push((range, new_text));
2938 }
2939 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2940 self.edit(edits, None, cx);
2941 }
2942
2943 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2944 let was_dirty = self.is_dirty();
2945 let old_version = self.version.clone();
2946
2947 let ops = self.text.randomly_undo_redo(rng);
2948 if !ops.is_empty() {
2949 for op in ops {
2950 self.send_operation(Operation::Buffer(op), true, cx);
2951 self.did_edit(&old_version, was_dirty, cx);
2952 }
2953 }
2954 }
2955}
2956
2957impl EventEmitter<BufferEvent> for Buffer {}
2958
2959impl Deref for Buffer {
2960 type Target = TextBuffer;
2961
2962 fn deref(&self) -> &Self::Target {
2963 &self.text
2964 }
2965}
2966
2967impl BufferSnapshot {
2968 /// Returns [`IndentSize`] for a given line that respects user settings and
2969 /// language preferences.
2970 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2971 indent_size_for_line(self, row)
2972 }
2973
2974 /// Returns [`IndentSize`] for a given position that respects user settings
2975 /// and language preferences.
2976 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2977 let settings = language_settings(
2978 self.language_at(position).map(|l| l.name()),
2979 self.file(),
2980 cx,
2981 );
2982 if settings.hard_tabs {
2983 IndentSize::tab()
2984 } else {
2985 IndentSize::spaces(settings.tab_size.get())
2986 }
2987 }
2988
2989 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2990 /// is passed in as `single_indent_size`.
2991 pub fn suggested_indents(
2992 &self,
2993 rows: impl Iterator<Item = u32>,
2994 single_indent_size: IndentSize,
2995 ) -> BTreeMap<u32, IndentSize> {
2996 let mut result = BTreeMap::new();
2997
2998 for row_range in contiguous_ranges(rows, 10) {
2999 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3000 Some(suggestions) => suggestions,
3001 _ => break,
3002 };
3003
3004 for (row, suggestion) in row_range.zip(suggestions) {
3005 let indent_size = if let Some(suggestion) = suggestion {
3006 result
3007 .get(&suggestion.basis_row)
3008 .copied()
3009 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3010 .with_delta(suggestion.delta, single_indent_size)
3011 } else {
3012 self.indent_size_for_line(row)
3013 };
3014
3015 result.insert(row, indent_size);
3016 }
3017 }
3018
3019 result
3020 }
3021
3022 fn suggest_autoindents(
3023 &self,
3024 row_range: Range<u32>,
3025 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3026 let config = &self.language.as_ref()?.config;
3027 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3028
3029 #[derive(Debug, Clone)]
3030 struct StartPosition {
3031 start: Point,
3032 suffix: SharedString,
3033 }
3034
3035 // Find the suggested indentation ranges based on the syntax tree.
3036 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3037 let end = Point::new(row_range.end, 0);
3038 let range = (start..end).to_offset(&self.text);
3039 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3040 Some(&grammar.indents_config.as_ref()?.query)
3041 });
3042 let indent_configs = matches
3043 .grammars()
3044 .iter()
3045 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3046 .collect::<Vec<_>>();
3047
3048 let mut indent_ranges = Vec::<Range<Point>>::new();
3049 let mut start_positions = Vec::<StartPosition>::new();
3050 let mut outdent_positions = Vec::<Point>::new();
3051 while let Some(mat) = matches.peek() {
3052 let mut start: Option<Point> = None;
3053 let mut end: Option<Point> = None;
3054
3055 let config = indent_configs[mat.grammar_index];
3056 for capture in mat.captures {
3057 if capture.index == config.indent_capture_ix {
3058 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3059 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3060 } else if Some(capture.index) == config.start_capture_ix {
3061 start = Some(Point::from_ts_point(capture.node.end_position()));
3062 } else if Some(capture.index) == config.end_capture_ix {
3063 end = Some(Point::from_ts_point(capture.node.start_position()));
3064 } else if Some(capture.index) == config.outdent_capture_ix {
3065 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3066 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3067 start_positions.push(StartPosition {
3068 start: Point::from_ts_point(capture.node.start_position()),
3069 suffix: suffix.clone(),
3070 });
3071 }
3072 }
3073
3074 matches.advance();
3075 if let Some((start, end)) = start.zip(end) {
3076 if start.row == end.row {
3077 continue;
3078 }
3079 let range = start..end;
3080 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3081 Err(ix) => indent_ranges.insert(ix, range),
3082 Ok(ix) => {
3083 let prev_range = &mut indent_ranges[ix];
3084 prev_range.end = prev_range.end.max(range.end);
3085 }
3086 }
3087 }
3088 }
3089
3090 let mut error_ranges = Vec::<Range<Point>>::new();
3091 let mut matches = self
3092 .syntax
3093 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3094 while let Some(mat) = matches.peek() {
3095 let node = mat.captures[0].node;
3096 let start = Point::from_ts_point(node.start_position());
3097 let end = Point::from_ts_point(node.end_position());
3098 let range = start..end;
3099 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3100 Ok(ix) | Err(ix) => ix,
3101 };
3102 let mut end_ix = ix;
3103 while let Some(existing_range) = error_ranges.get(end_ix) {
3104 if existing_range.end < end {
3105 end_ix += 1;
3106 } else {
3107 break;
3108 }
3109 }
3110 error_ranges.splice(ix..end_ix, [range]);
3111 matches.advance();
3112 }
3113
3114 outdent_positions.sort();
3115 for outdent_position in outdent_positions {
3116 // find the innermost indent range containing this outdent_position
3117 // set its end to the outdent position
3118 if let Some(range_to_truncate) = indent_ranges
3119 .iter_mut()
3120 .filter(|indent_range| indent_range.contains(&outdent_position))
3121 .next_back()
3122 {
3123 range_to_truncate.end = outdent_position;
3124 }
3125 }
3126
3127 start_positions.sort_by_key(|b| b.start);
3128
3129 // Find the suggested indentation increases and decreased based on regexes.
3130 let mut regex_outdent_map = HashMap::default();
3131 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3132 let mut start_positions_iter = start_positions.iter().peekable();
3133
3134 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3135 self.for_each_line(
3136 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3137 ..Point::new(row_range.end, 0),
3138 |row, line| {
3139 if config
3140 .decrease_indent_pattern
3141 .as_ref()
3142 .is_some_and(|regex| regex.is_match(line))
3143 {
3144 indent_change_rows.push((row, Ordering::Less));
3145 }
3146 if config
3147 .increase_indent_pattern
3148 .as_ref()
3149 .is_some_and(|regex| regex.is_match(line))
3150 {
3151 indent_change_rows.push((row + 1, Ordering::Greater));
3152 }
3153 while let Some(pos) = start_positions_iter.peek() {
3154 if pos.start.row < row {
3155 let pos = start_positions_iter.next().unwrap();
3156 last_seen_suffix
3157 .entry(pos.suffix.to_string())
3158 .or_default()
3159 .push(pos.start);
3160 } else {
3161 break;
3162 }
3163 }
3164 for rule in &config.decrease_indent_patterns {
3165 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3166 let row_start_column = self.indent_size_for_line(row).len;
3167 let basis_row = rule
3168 .valid_after
3169 .iter()
3170 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3171 .flatten()
3172 .filter(|start_point| start_point.column <= row_start_column)
3173 .max_by_key(|start_point| start_point.row);
3174 if let Some(outdent_to_row) = basis_row {
3175 regex_outdent_map.insert(row, outdent_to_row.row);
3176 }
3177 break;
3178 }
3179 }
3180 },
3181 );
3182
3183 let mut indent_changes = indent_change_rows.into_iter().peekable();
3184 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3185 prev_non_blank_row.unwrap_or(0)
3186 } else {
3187 row_range.start.saturating_sub(1)
3188 };
3189
3190 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3191 Some(row_range.map(move |row| {
3192 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3193
3194 let mut indent_from_prev_row = false;
3195 let mut outdent_from_prev_row = false;
3196 let mut outdent_to_row = u32::MAX;
3197 let mut from_regex = false;
3198
3199 while let Some((indent_row, delta)) = indent_changes.peek() {
3200 match indent_row.cmp(&row) {
3201 Ordering::Equal => match delta {
3202 Ordering::Less => {
3203 from_regex = true;
3204 outdent_from_prev_row = true
3205 }
3206 Ordering::Greater => {
3207 indent_from_prev_row = true;
3208 from_regex = true
3209 }
3210 _ => {}
3211 },
3212
3213 Ordering::Greater => break,
3214 Ordering::Less => {}
3215 }
3216
3217 indent_changes.next();
3218 }
3219
3220 for range in &indent_ranges {
3221 if range.start.row >= row {
3222 break;
3223 }
3224 if range.start.row == prev_row && range.end > row_start {
3225 indent_from_prev_row = true;
3226 }
3227 if range.end > prev_row_start && range.end <= row_start {
3228 outdent_to_row = outdent_to_row.min(range.start.row);
3229 }
3230 }
3231
3232 if let Some(basis_row) = regex_outdent_map.get(&row) {
3233 indent_from_prev_row = false;
3234 outdent_to_row = *basis_row;
3235 from_regex = true;
3236 }
3237
3238 let within_error = error_ranges
3239 .iter()
3240 .any(|e| e.start.row < row && e.end > row_start);
3241
3242 let suggestion = if outdent_to_row == prev_row
3243 || (outdent_from_prev_row && indent_from_prev_row)
3244 {
3245 Some(IndentSuggestion {
3246 basis_row: prev_row,
3247 delta: Ordering::Equal,
3248 within_error: within_error && !from_regex,
3249 })
3250 } else if indent_from_prev_row {
3251 Some(IndentSuggestion {
3252 basis_row: prev_row,
3253 delta: Ordering::Greater,
3254 within_error: within_error && !from_regex,
3255 })
3256 } else if outdent_to_row < prev_row {
3257 Some(IndentSuggestion {
3258 basis_row: outdent_to_row,
3259 delta: Ordering::Equal,
3260 within_error: within_error && !from_regex,
3261 })
3262 } else if outdent_from_prev_row {
3263 Some(IndentSuggestion {
3264 basis_row: prev_row,
3265 delta: Ordering::Less,
3266 within_error: within_error && !from_regex,
3267 })
3268 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3269 {
3270 Some(IndentSuggestion {
3271 basis_row: prev_row,
3272 delta: Ordering::Equal,
3273 within_error: within_error && !from_regex,
3274 })
3275 } else {
3276 None
3277 };
3278
3279 prev_row = row;
3280 prev_row_start = row_start;
3281 suggestion
3282 }))
3283 }
3284
3285 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3286 while row > 0 {
3287 row -= 1;
3288 if !self.is_line_blank(row) {
3289 return Some(row);
3290 }
3291 }
3292 None
3293 }
3294
3295 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3296 let captures = self.syntax.captures(range, &self.text, |grammar| {
3297 grammar
3298 .highlights_config
3299 .as_ref()
3300 .map(|config| &config.query)
3301 });
3302 let highlight_maps = captures
3303 .grammars()
3304 .iter()
3305 .map(|grammar| grammar.highlight_map())
3306 .collect();
3307 (captures, highlight_maps)
3308 }
3309
3310 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3311 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3312 /// returned in chunks where each chunk has a single syntax highlighting style and
3313 /// diagnostic status.
3314 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3315 let range = range.start.to_offset(self)..range.end.to_offset(self);
3316
3317 let mut syntax = None;
3318 if language_aware {
3319 syntax = Some(self.get_highlights(range.clone()));
3320 }
3321 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3322 let diagnostics = language_aware;
3323 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3324 }
3325
3326 pub fn highlighted_text_for_range<T: ToOffset>(
3327 &self,
3328 range: Range<T>,
3329 override_style: Option<HighlightStyle>,
3330 syntax_theme: &SyntaxTheme,
3331 ) -> HighlightedText {
3332 HighlightedText::from_buffer_range(
3333 range,
3334 &self.text,
3335 &self.syntax,
3336 override_style,
3337 syntax_theme,
3338 )
3339 }
3340
3341 /// Invokes the given callback for each line of text in the given range of the buffer.
3342 /// Uses callback to avoid allocating a string for each line.
3343 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3344 let mut line = String::new();
3345 let mut row = range.start.row;
3346 for chunk in self
3347 .as_rope()
3348 .chunks_in_range(range.to_offset(self))
3349 .chain(["\n"])
3350 {
3351 for (newline_ix, text) in chunk.split('\n').enumerate() {
3352 if newline_ix > 0 {
3353 callback(row, &line);
3354 row += 1;
3355 line.clear();
3356 }
3357 line.push_str(text);
3358 }
3359 }
3360 }
3361
3362 /// Iterates over every [`SyntaxLayer`] in the buffer.
3363 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3364 self.syntax_layers_for_range(0..self.len(), true)
3365 }
3366
3367 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3368 let offset = position.to_offset(self);
3369 self.syntax_layers_for_range(offset..offset, false)
3370 .filter(|l| {
3371 if let Some(ranges) = l.included_sub_ranges {
3372 ranges.iter().any(|range| {
3373 let start = range.start.to_offset(self);
3374 start <= offset && {
3375 let end = range.end.to_offset(self);
3376 offset < end
3377 }
3378 })
3379 } else {
3380 l.node().start_byte() <= offset && l.node().end_byte() > offset
3381 }
3382 })
3383 .last()
3384 }
3385
3386 pub fn syntax_layers_for_range<D: ToOffset>(
3387 &self,
3388 range: Range<D>,
3389 include_hidden: bool,
3390 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3391 self.syntax
3392 .layers_for_range(range, &self.text, include_hidden)
3393 }
3394
3395 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3396 &self,
3397 range: Range<D>,
3398 ) -> Option<SyntaxLayer<'_>> {
3399 let range = range.to_offset(self);
3400 self.syntax
3401 .layers_for_range(range, &self.text, false)
3402 .max_by(|a, b| {
3403 if a.depth != b.depth {
3404 a.depth.cmp(&b.depth)
3405 } else if a.offset.0 != b.offset.0 {
3406 a.offset.0.cmp(&b.offset.0)
3407 } else {
3408 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3409 }
3410 })
3411 }
3412
3413 /// Returns the main [`Language`].
3414 pub fn language(&self) -> Option<&Arc<Language>> {
3415 self.language.as_ref()
3416 }
3417
3418 /// Returns the [`Language`] at the given location.
3419 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3420 self.syntax_layer_at(position)
3421 .map(|info| info.language)
3422 .or(self.language.as_ref())
3423 }
3424
3425 /// Returns the settings for the language at the given location.
3426 pub fn settings_at<'a, D: ToOffset>(
3427 &'a self,
3428 position: D,
3429 cx: &'a App,
3430 ) -> Cow<'a, LanguageSettings> {
3431 language_settings(
3432 self.language_at(position).map(|l| l.name()),
3433 self.file.as_ref(),
3434 cx,
3435 )
3436 }
3437
3438 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3439 CharClassifier::new(self.language_scope_at(point))
3440 }
3441
3442 /// Returns the [`LanguageScope`] at the given location.
3443 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3444 let offset = position.to_offset(self);
3445 let mut scope = None;
3446 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3447
3448 // Use the layer that has the smallest node intersecting the given point.
3449 for layer in self
3450 .syntax
3451 .layers_for_range(offset..offset, &self.text, false)
3452 {
3453 let mut cursor = layer.node().walk();
3454
3455 let mut range = None;
3456 loop {
3457 let child_range = cursor.node().byte_range();
3458 if !child_range.contains(&offset) {
3459 break;
3460 }
3461
3462 range = Some(child_range);
3463 if cursor.goto_first_child_for_byte(offset).is_none() {
3464 break;
3465 }
3466 }
3467
3468 if let Some(range) = range
3469 && smallest_range_and_depth.as_ref().is_none_or(
3470 |(smallest_range, smallest_range_depth)| {
3471 if layer.depth > *smallest_range_depth {
3472 true
3473 } else if layer.depth == *smallest_range_depth {
3474 range.len() < smallest_range.len()
3475 } else {
3476 false
3477 }
3478 },
3479 )
3480 {
3481 smallest_range_and_depth = Some((range, layer.depth));
3482 scope = Some(LanguageScope {
3483 language: layer.language.clone(),
3484 override_id: layer.override_id(offset, &self.text),
3485 });
3486 }
3487 }
3488
3489 scope.or_else(|| {
3490 self.language.clone().map(|language| LanguageScope {
3491 language,
3492 override_id: None,
3493 })
3494 })
3495 }
3496
3497 /// Returns a tuple of the range and character kind of the word
3498 /// surrounding the given position.
3499 pub fn surrounding_word<T: ToOffset>(
3500 &self,
3501 start: T,
3502 scope_context: Option<CharScopeContext>,
3503 ) -> (Range<usize>, Option<CharKind>) {
3504 let mut start = start.to_offset(self);
3505 let mut end = start;
3506 let mut next_chars = self.chars_at(start).take(128).peekable();
3507 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3508
3509 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3510 let word_kind = cmp::max(
3511 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3512 next_chars.peek().copied().map(|c| classifier.kind(c)),
3513 );
3514
3515 for ch in prev_chars {
3516 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3517 start -= ch.len_utf8();
3518 } else {
3519 break;
3520 }
3521 }
3522
3523 for ch in next_chars {
3524 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3525 end += ch.len_utf8();
3526 } else {
3527 break;
3528 }
3529 }
3530
3531 (start..end, word_kind)
3532 }
3533
3534 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3535 /// range. When `require_larger` is true, the node found must be larger than the query range.
3536 ///
3537 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3538 /// be moved to the root of the tree.
3539 fn goto_node_enclosing_range(
3540 cursor: &mut tree_sitter::TreeCursor,
3541 query_range: &Range<usize>,
3542 require_larger: bool,
3543 ) -> bool {
3544 let mut ascending = false;
3545 loop {
3546 let mut range = cursor.node().byte_range();
3547 if query_range.is_empty() {
3548 // When the query range is empty and the current node starts after it, move to the
3549 // previous sibling to find the node the containing node.
3550 if range.start > query_range.start {
3551 cursor.goto_previous_sibling();
3552 range = cursor.node().byte_range();
3553 }
3554 } else {
3555 // When the query range is non-empty and the current node ends exactly at the start,
3556 // move to the next sibling to find a node that extends beyond the start.
3557 if range.end == query_range.start {
3558 cursor.goto_next_sibling();
3559 range = cursor.node().byte_range();
3560 }
3561 }
3562
3563 let encloses = range.contains_inclusive(query_range)
3564 && (!require_larger || range.len() > query_range.len());
3565 if !encloses {
3566 ascending = true;
3567 if !cursor.goto_parent() {
3568 return false;
3569 }
3570 continue;
3571 } else if ascending {
3572 return true;
3573 }
3574
3575 // Descend into the current node.
3576 if cursor
3577 .goto_first_child_for_byte(query_range.start)
3578 .is_none()
3579 {
3580 return true;
3581 }
3582 }
3583 }
3584
3585 pub fn syntax_ancestor<'a, T: ToOffset>(
3586 &'a self,
3587 range: Range<T>,
3588 ) -> Option<tree_sitter::Node<'a>> {
3589 let range = range.start.to_offset(self)..range.end.to_offset(self);
3590 let mut result: Option<tree_sitter::Node<'a>> = None;
3591 for layer in self
3592 .syntax
3593 .layers_for_range(range.clone(), &self.text, true)
3594 {
3595 let mut cursor = layer.node().walk();
3596
3597 // Find the node that both contains the range and is larger than it.
3598 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3599 continue;
3600 }
3601
3602 let left_node = cursor.node();
3603 let mut layer_result = left_node;
3604
3605 // For an empty range, try to find another node immediately to the right of the range.
3606 if left_node.end_byte() == range.start {
3607 let mut right_node = None;
3608 while !cursor.goto_next_sibling() {
3609 if !cursor.goto_parent() {
3610 break;
3611 }
3612 }
3613
3614 while cursor.node().start_byte() == range.start {
3615 right_node = Some(cursor.node());
3616 if !cursor.goto_first_child() {
3617 break;
3618 }
3619 }
3620
3621 // If there is a candidate node on both sides of the (empty) range, then
3622 // decide between the two by favoring a named node over an anonymous token.
3623 // If both nodes are the same in that regard, favor the right one.
3624 if let Some(right_node) = right_node
3625 && (right_node.is_named() || !left_node.is_named())
3626 {
3627 layer_result = right_node;
3628 }
3629 }
3630
3631 if let Some(previous_result) = &result
3632 && previous_result.byte_range().len() < layer_result.byte_range().len()
3633 {
3634 continue;
3635 }
3636 result = Some(layer_result);
3637 }
3638
3639 result
3640 }
3641
3642 /// Find the previous sibling syntax node at the given range.
3643 ///
3644 /// This function locates the syntax node that precedes the node containing
3645 /// the given range. It searches hierarchically by:
3646 /// 1. Finding the node that contains the given range
3647 /// 2. Looking for the previous sibling at the same tree level
3648 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3649 ///
3650 /// Returns `None` if there is no previous sibling at any ancestor level.
3651 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3652 &'a self,
3653 range: Range<T>,
3654 ) -> Option<tree_sitter::Node<'a>> {
3655 let range = range.start.to_offset(self)..range.end.to_offset(self);
3656 let mut result: Option<tree_sitter::Node<'a>> = None;
3657
3658 for layer in self
3659 .syntax
3660 .layers_for_range(range.clone(), &self.text, true)
3661 {
3662 let mut cursor = layer.node().walk();
3663
3664 // Find the node that contains the range
3665 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3666 continue;
3667 }
3668
3669 // Look for the previous sibling, moving up ancestor levels if needed
3670 loop {
3671 if cursor.goto_previous_sibling() {
3672 let layer_result = cursor.node();
3673
3674 if let Some(previous_result) = &result {
3675 if previous_result.byte_range().end < layer_result.byte_range().end {
3676 continue;
3677 }
3678 }
3679 result = Some(layer_result);
3680 break;
3681 }
3682
3683 // No sibling found at this level, try moving up to parent
3684 if !cursor.goto_parent() {
3685 break;
3686 }
3687 }
3688 }
3689
3690 result
3691 }
3692
3693 /// Find the next sibling syntax node at the given range.
3694 ///
3695 /// This function locates the syntax node that follows the node containing
3696 /// the given range. It searches hierarchically by:
3697 /// 1. Finding the node that contains the given range
3698 /// 2. Looking for the next sibling at the same tree level
3699 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3700 ///
3701 /// Returns `None` if there is no next sibling at any ancestor level.
3702 pub fn syntax_next_sibling<'a, T: ToOffset>(
3703 &'a self,
3704 range: Range<T>,
3705 ) -> Option<tree_sitter::Node<'a>> {
3706 let range = range.start.to_offset(self)..range.end.to_offset(self);
3707 let mut result: Option<tree_sitter::Node<'a>> = None;
3708
3709 for layer in self
3710 .syntax
3711 .layers_for_range(range.clone(), &self.text, true)
3712 {
3713 let mut cursor = layer.node().walk();
3714
3715 // Find the node that contains the range
3716 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3717 continue;
3718 }
3719
3720 // Look for the next sibling, moving up ancestor levels if needed
3721 loop {
3722 if cursor.goto_next_sibling() {
3723 let layer_result = cursor.node();
3724
3725 if let Some(previous_result) = &result {
3726 if previous_result.byte_range().start > layer_result.byte_range().start {
3727 continue;
3728 }
3729 }
3730 result = Some(layer_result);
3731 break;
3732 }
3733
3734 // No sibling found at this level, try moving up to parent
3735 if !cursor.goto_parent() {
3736 break;
3737 }
3738 }
3739 }
3740
3741 result
3742 }
3743
3744 /// Returns the root syntax node within the given row
3745 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3746 let start_offset = position.to_offset(self);
3747
3748 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3749
3750 let layer = self
3751 .syntax
3752 .layers_for_range(start_offset..start_offset, &self.text, true)
3753 .next()?;
3754
3755 let mut cursor = layer.node().walk();
3756
3757 // Descend to the first leaf that touches the start of the range.
3758 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3759 if cursor.node().end_byte() == start_offset {
3760 cursor.goto_next_sibling();
3761 }
3762 }
3763
3764 // Ascend to the root node within the same row.
3765 while cursor.goto_parent() {
3766 if cursor.node().start_position().row != row {
3767 break;
3768 }
3769 }
3770
3771 Some(cursor.node())
3772 }
3773
3774 /// Returns the outline for the buffer.
3775 ///
3776 /// This method allows passing an optional [`SyntaxTheme`] to
3777 /// syntax-highlight the returned symbols.
3778 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3779 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3780 }
3781
3782 /// Returns all the symbols that contain the given position.
3783 ///
3784 /// This method allows passing an optional [`SyntaxTheme`] to
3785 /// syntax-highlight the returned symbols.
3786 pub fn symbols_containing<T: ToOffset>(
3787 &self,
3788 position: T,
3789 theme: Option<&SyntaxTheme>,
3790 ) -> Vec<OutlineItem<Anchor>> {
3791 let position = position.to_offset(self);
3792 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3793 let end = self.clip_offset(position + 1, Bias::Right);
3794 let mut items = self.outline_items_containing(start..end, false, theme);
3795 let mut prev_depth = None;
3796 items.retain(|item| {
3797 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3798 prev_depth = Some(item.depth);
3799 result
3800 });
3801 items
3802 }
3803
3804 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3805 let range = range.to_offset(self);
3806 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3807 grammar.outline_config.as_ref().map(|c| &c.query)
3808 });
3809 let configs = matches
3810 .grammars()
3811 .iter()
3812 .map(|g| g.outline_config.as_ref().unwrap())
3813 .collect::<Vec<_>>();
3814
3815 while let Some(mat) = matches.peek() {
3816 let config = &configs[mat.grammar_index];
3817 let containing_item_node = maybe!({
3818 let item_node = mat.captures.iter().find_map(|cap| {
3819 if cap.index == config.item_capture_ix {
3820 Some(cap.node)
3821 } else {
3822 None
3823 }
3824 })?;
3825
3826 let item_byte_range = item_node.byte_range();
3827 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3828 None
3829 } else {
3830 Some(item_node)
3831 }
3832 });
3833
3834 if let Some(item_node) = containing_item_node {
3835 return Some(
3836 Point::from_ts_point(item_node.start_position())
3837 ..Point::from_ts_point(item_node.end_position()),
3838 );
3839 }
3840
3841 matches.advance();
3842 }
3843 None
3844 }
3845
3846 pub fn outline_items_containing<T: ToOffset>(
3847 &self,
3848 range: Range<T>,
3849 include_extra_context: bool,
3850 theme: Option<&SyntaxTheme>,
3851 ) -> Vec<OutlineItem<Anchor>> {
3852 self.outline_items_containing_internal(
3853 range,
3854 include_extra_context,
3855 theme,
3856 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3857 )
3858 }
3859
3860 pub fn outline_items_as_points_containing<T: ToOffset>(
3861 &self,
3862 range: Range<T>,
3863 include_extra_context: bool,
3864 theme: Option<&SyntaxTheme>,
3865 ) -> Vec<OutlineItem<Point>> {
3866 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3867 range
3868 })
3869 }
3870
3871 fn outline_items_containing_internal<T: ToOffset, U>(
3872 &self,
3873 range: Range<T>,
3874 include_extra_context: bool,
3875 theme: Option<&SyntaxTheme>,
3876 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3877 ) -> Vec<OutlineItem<U>> {
3878 let range = range.to_offset(self);
3879 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3880 grammar.outline_config.as_ref().map(|c| &c.query)
3881 });
3882
3883 let mut items = Vec::new();
3884 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3885 while let Some(mat) = matches.peek() {
3886 let config = matches.grammars()[mat.grammar_index]
3887 .outline_config
3888 .as_ref()
3889 .unwrap();
3890 if let Some(item) =
3891 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3892 {
3893 items.push(item);
3894 } else if let Some(capture) = mat
3895 .captures
3896 .iter()
3897 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3898 {
3899 let capture_range = capture.node.start_position()..capture.node.end_position();
3900 let mut capture_row_range =
3901 capture_range.start.row as u32..capture_range.end.row as u32;
3902 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3903 {
3904 capture_row_range.end -= 1;
3905 }
3906 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3907 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3908 last_row_range.end = capture_row_range.end;
3909 } else {
3910 annotation_row_ranges.push(capture_row_range);
3911 }
3912 } else {
3913 annotation_row_ranges.push(capture_row_range);
3914 }
3915 }
3916 matches.advance();
3917 }
3918
3919 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3920
3921 // Assign depths based on containment relationships and convert to anchors.
3922 let mut item_ends_stack = Vec::<Point>::new();
3923 let mut anchor_items = Vec::new();
3924 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3925 for item in items {
3926 while let Some(last_end) = item_ends_stack.last().copied() {
3927 if last_end < item.range.end {
3928 item_ends_stack.pop();
3929 } else {
3930 break;
3931 }
3932 }
3933
3934 let mut annotation_row_range = None;
3935 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3936 let row_preceding_item = item.range.start.row.saturating_sub(1);
3937 if next_annotation_row_range.end < row_preceding_item {
3938 annotation_row_ranges.next();
3939 } else {
3940 if next_annotation_row_range.end == row_preceding_item {
3941 annotation_row_range = Some(next_annotation_row_range.clone());
3942 annotation_row_ranges.next();
3943 }
3944 break;
3945 }
3946 }
3947
3948 anchor_items.push(OutlineItem {
3949 depth: item_ends_stack.len(),
3950 range: range_callback(self, item.range.clone()),
3951 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3952 text: item.text,
3953 highlight_ranges: item.highlight_ranges,
3954 name_ranges: item.name_ranges,
3955 body_range: item.body_range.map(|r| range_callback(self, r)),
3956 annotation_range: annotation_row_range.map(|annotation_range| {
3957 let point_range = Point::new(annotation_range.start, 0)
3958 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3959 range_callback(self, point_range)
3960 }),
3961 });
3962 item_ends_stack.push(item.range.end);
3963 }
3964
3965 anchor_items
3966 }
3967
3968 fn next_outline_item(
3969 &self,
3970 config: &OutlineConfig,
3971 mat: &SyntaxMapMatch,
3972 range: &Range<usize>,
3973 include_extra_context: bool,
3974 theme: Option<&SyntaxTheme>,
3975 ) -> Option<OutlineItem<Point>> {
3976 let item_node = mat.captures.iter().find_map(|cap| {
3977 if cap.index == config.item_capture_ix {
3978 Some(cap.node)
3979 } else {
3980 None
3981 }
3982 })?;
3983
3984 let item_byte_range = item_node.byte_range();
3985 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3986 return None;
3987 }
3988 let item_point_range = Point::from_ts_point(item_node.start_position())
3989 ..Point::from_ts_point(item_node.end_position());
3990
3991 let mut open_point = None;
3992 let mut close_point = None;
3993
3994 let mut buffer_ranges = Vec::new();
3995 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3996 let mut range = node.start_byte()..node.end_byte();
3997 let start = node.start_position();
3998 if node.end_position().row > start.row {
3999 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4000 }
4001
4002 if !range.is_empty() {
4003 buffer_ranges.push((range, node_is_name));
4004 }
4005 };
4006
4007 for capture in mat.captures {
4008 if capture.index == config.name_capture_ix {
4009 add_to_buffer_ranges(capture.node, true);
4010 } else if Some(capture.index) == config.context_capture_ix
4011 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4012 {
4013 add_to_buffer_ranges(capture.node, false);
4014 } else {
4015 if Some(capture.index) == config.open_capture_ix {
4016 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4017 } else if Some(capture.index) == config.close_capture_ix {
4018 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4019 }
4020 }
4021 }
4022
4023 if buffer_ranges.is_empty() {
4024 return None;
4025 }
4026 let source_range_for_text =
4027 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4028
4029 let mut text = String::new();
4030 let mut highlight_ranges = Vec::new();
4031 let mut name_ranges = Vec::new();
4032 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4033 let mut last_buffer_range_end = 0;
4034 for (buffer_range, is_name) in buffer_ranges {
4035 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4036 if space_added {
4037 text.push(' ');
4038 }
4039 let before_append_len = text.len();
4040 let mut offset = buffer_range.start;
4041 chunks.seek(buffer_range.clone());
4042 for mut chunk in chunks.by_ref() {
4043 if chunk.text.len() > buffer_range.end - offset {
4044 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4045 offset = buffer_range.end;
4046 } else {
4047 offset += chunk.text.len();
4048 }
4049 let style = chunk
4050 .syntax_highlight_id
4051 .zip(theme)
4052 .and_then(|(highlight, theme)| highlight.style(theme));
4053 if let Some(style) = style {
4054 let start = text.len();
4055 let end = start + chunk.text.len();
4056 highlight_ranges.push((start..end, style));
4057 }
4058 text.push_str(chunk.text);
4059 if offset >= buffer_range.end {
4060 break;
4061 }
4062 }
4063 if is_name {
4064 let after_append_len = text.len();
4065 let start = if space_added && !name_ranges.is_empty() {
4066 before_append_len - 1
4067 } else {
4068 before_append_len
4069 };
4070 name_ranges.push(start..after_append_len);
4071 }
4072 last_buffer_range_end = buffer_range.end;
4073 }
4074
4075 Some(OutlineItem {
4076 depth: 0, // We'll calculate the depth later
4077 range: item_point_range,
4078 source_range_for_text: source_range_for_text.to_point(self),
4079 text,
4080 highlight_ranges,
4081 name_ranges,
4082 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4083 annotation_range: None,
4084 })
4085 }
4086
4087 pub fn function_body_fold_ranges<T: ToOffset>(
4088 &self,
4089 within: Range<T>,
4090 ) -> impl Iterator<Item = Range<usize>> + '_ {
4091 self.text_object_ranges(within, TreeSitterOptions::default())
4092 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4093 }
4094
4095 /// For each grammar in the language, runs the provided
4096 /// [`tree_sitter::Query`] against the given range.
4097 pub fn matches(
4098 &self,
4099 range: Range<usize>,
4100 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4101 ) -> SyntaxMapMatches<'_> {
4102 self.syntax.matches(range, self, query)
4103 }
4104
4105 pub fn all_bracket_ranges(
4106 &self,
4107 range: Range<usize>,
4108 ) -> impl Iterator<Item = BracketMatch> + '_ {
4109 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4110 grammar.brackets_config.as_ref().map(|c| &c.query)
4111 });
4112 let configs = matches
4113 .grammars()
4114 .iter()
4115 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4116 .collect::<Vec<_>>();
4117
4118 iter::from_fn(move || {
4119 while let Some(mat) = matches.peek() {
4120 let mut open = None;
4121 let mut close = None;
4122 let config = &configs[mat.grammar_index];
4123 let pattern = &config.patterns[mat.pattern_index];
4124 for capture in mat.captures {
4125 if capture.index == config.open_capture_ix {
4126 open = Some(capture.node.byte_range());
4127 } else if capture.index == config.close_capture_ix {
4128 close = Some(capture.node.byte_range());
4129 }
4130 }
4131
4132 matches.advance();
4133
4134 let Some((open_range, close_range)) = open.zip(close) else {
4135 continue;
4136 };
4137
4138 let bracket_range = open_range.start..=close_range.end;
4139 if !bracket_range.overlaps(&range) {
4140 continue;
4141 }
4142
4143 return Some(BracketMatch {
4144 open_range,
4145 close_range,
4146 newline_only: pattern.newline_only,
4147 });
4148 }
4149 None
4150 })
4151 }
4152
4153 /// Returns bracket range pairs overlapping or adjacent to `range`
4154 pub fn bracket_ranges<T: ToOffset>(
4155 &self,
4156 range: Range<T>,
4157 ) -> impl Iterator<Item = BracketMatch> + '_ {
4158 // Find bracket pairs that *inclusively* contain the given range.
4159 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4160 self.all_bracket_ranges(range)
4161 .filter(|pair| !pair.newline_only)
4162 }
4163
4164 pub fn debug_variables_query<T: ToOffset>(
4165 &self,
4166 range: Range<T>,
4167 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4168 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4169
4170 let mut matches = self.syntax.matches_with_options(
4171 range.clone(),
4172 &self.text,
4173 TreeSitterOptions::default(),
4174 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4175 );
4176
4177 let configs = matches
4178 .grammars()
4179 .iter()
4180 .map(|grammar| grammar.debug_variables_config.as_ref())
4181 .collect::<Vec<_>>();
4182
4183 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4184
4185 iter::from_fn(move || {
4186 loop {
4187 while let Some(capture) = captures.pop() {
4188 if capture.0.overlaps(&range) {
4189 return Some(capture);
4190 }
4191 }
4192
4193 let mat = matches.peek()?;
4194
4195 let Some(config) = configs[mat.grammar_index].as_ref() else {
4196 matches.advance();
4197 continue;
4198 };
4199
4200 for capture in mat.captures {
4201 let Some(ix) = config
4202 .objects_by_capture_ix
4203 .binary_search_by_key(&capture.index, |e| e.0)
4204 .ok()
4205 else {
4206 continue;
4207 };
4208 let text_object = config.objects_by_capture_ix[ix].1;
4209 let byte_range = capture.node.byte_range();
4210
4211 let mut found = false;
4212 for (range, existing) in captures.iter_mut() {
4213 if existing == &text_object {
4214 range.start = range.start.min(byte_range.start);
4215 range.end = range.end.max(byte_range.end);
4216 found = true;
4217 break;
4218 }
4219 }
4220
4221 if !found {
4222 captures.push((byte_range, text_object));
4223 }
4224 }
4225
4226 matches.advance();
4227 }
4228 })
4229 }
4230
4231 pub fn text_object_ranges<T: ToOffset>(
4232 &self,
4233 range: Range<T>,
4234 options: TreeSitterOptions,
4235 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4236 let range =
4237 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4238
4239 let mut matches =
4240 self.syntax
4241 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4242 grammar.text_object_config.as_ref().map(|c| &c.query)
4243 });
4244
4245 let configs = matches
4246 .grammars()
4247 .iter()
4248 .map(|grammar| grammar.text_object_config.as_ref())
4249 .collect::<Vec<_>>();
4250
4251 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4252
4253 iter::from_fn(move || {
4254 loop {
4255 while let Some(capture) = captures.pop() {
4256 if capture.0.overlaps(&range) {
4257 return Some(capture);
4258 }
4259 }
4260
4261 let mat = matches.peek()?;
4262
4263 let Some(config) = configs[mat.grammar_index].as_ref() else {
4264 matches.advance();
4265 continue;
4266 };
4267
4268 for capture in mat.captures {
4269 let Some(ix) = config
4270 .text_objects_by_capture_ix
4271 .binary_search_by_key(&capture.index, |e| e.0)
4272 .ok()
4273 else {
4274 continue;
4275 };
4276 let text_object = config.text_objects_by_capture_ix[ix].1;
4277 let byte_range = capture.node.byte_range();
4278
4279 let mut found = false;
4280 for (range, existing) in captures.iter_mut() {
4281 if existing == &text_object {
4282 range.start = range.start.min(byte_range.start);
4283 range.end = range.end.max(byte_range.end);
4284 found = true;
4285 break;
4286 }
4287 }
4288
4289 if !found {
4290 captures.push((byte_range, text_object));
4291 }
4292 }
4293
4294 matches.advance();
4295 }
4296 })
4297 }
4298
4299 /// Returns enclosing bracket ranges containing the given range
4300 pub fn enclosing_bracket_ranges<T: ToOffset>(
4301 &self,
4302 range: Range<T>,
4303 ) -> impl Iterator<Item = BracketMatch> + '_ {
4304 let range = range.start.to_offset(self)..range.end.to_offset(self);
4305
4306 self.bracket_ranges(range.clone()).filter(move |pair| {
4307 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4308 })
4309 }
4310
4311 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4312 ///
4313 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4314 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4315 &self,
4316 range: Range<T>,
4317 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4318 ) -> Option<(Range<usize>, Range<usize>)> {
4319 let range = range.start.to_offset(self)..range.end.to_offset(self);
4320
4321 // Get the ranges of the innermost pair of brackets.
4322 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4323
4324 for pair in self.enclosing_bracket_ranges(range) {
4325 if let Some(range_filter) = range_filter
4326 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4327 {
4328 continue;
4329 }
4330
4331 let len = pair.close_range.end - pair.open_range.start;
4332
4333 if let Some((existing_open, existing_close)) = &result {
4334 let existing_len = existing_close.end - existing_open.start;
4335 if len > existing_len {
4336 continue;
4337 }
4338 }
4339
4340 result = Some((pair.open_range, pair.close_range));
4341 }
4342
4343 result
4344 }
4345
4346 /// Returns anchor ranges for any matches of the redaction query.
4347 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4348 /// will be run on the relevant section of the buffer.
4349 pub fn redacted_ranges<T: ToOffset>(
4350 &self,
4351 range: Range<T>,
4352 ) -> impl Iterator<Item = Range<usize>> + '_ {
4353 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4354 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4355 grammar
4356 .redactions_config
4357 .as_ref()
4358 .map(|config| &config.query)
4359 });
4360
4361 let configs = syntax_matches
4362 .grammars()
4363 .iter()
4364 .map(|grammar| grammar.redactions_config.as_ref())
4365 .collect::<Vec<_>>();
4366
4367 iter::from_fn(move || {
4368 let redacted_range = syntax_matches
4369 .peek()
4370 .and_then(|mat| {
4371 configs[mat.grammar_index].and_then(|config| {
4372 mat.captures
4373 .iter()
4374 .find(|capture| capture.index == config.redaction_capture_ix)
4375 })
4376 })
4377 .map(|mat| mat.node.byte_range());
4378 syntax_matches.advance();
4379 redacted_range
4380 })
4381 }
4382
4383 pub fn injections_intersecting_range<T: ToOffset>(
4384 &self,
4385 range: Range<T>,
4386 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4387 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4388
4389 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4390 grammar
4391 .injection_config
4392 .as_ref()
4393 .map(|config| &config.query)
4394 });
4395
4396 let configs = syntax_matches
4397 .grammars()
4398 .iter()
4399 .map(|grammar| grammar.injection_config.as_ref())
4400 .collect::<Vec<_>>();
4401
4402 iter::from_fn(move || {
4403 let ranges = syntax_matches.peek().and_then(|mat| {
4404 let config = &configs[mat.grammar_index]?;
4405 let content_capture_range = mat.captures.iter().find_map(|capture| {
4406 if capture.index == config.content_capture_ix {
4407 Some(capture.node.byte_range())
4408 } else {
4409 None
4410 }
4411 })?;
4412 let language = self.language_at(content_capture_range.start)?;
4413 Some((content_capture_range, language))
4414 });
4415 syntax_matches.advance();
4416 ranges
4417 })
4418 }
4419
4420 pub fn runnable_ranges(
4421 &self,
4422 offset_range: Range<usize>,
4423 ) -> impl Iterator<Item = RunnableRange> + '_ {
4424 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4425 grammar.runnable_config.as_ref().map(|config| &config.query)
4426 });
4427
4428 let test_configs = syntax_matches
4429 .grammars()
4430 .iter()
4431 .map(|grammar| grammar.runnable_config.as_ref())
4432 .collect::<Vec<_>>();
4433
4434 iter::from_fn(move || {
4435 loop {
4436 let mat = syntax_matches.peek()?;
4437
4438 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4439 let mut run_range = None;
4440 let full_range = mat.captures.iter().fold(
4441 Range {
4442 start: usize::MAX,
4443 end: 0,
4444 },
4445 |mut acc, next| {
4446 let byte_range = next.node.byte_range();
4447 if acc.start > byte_range.start {
4448 acc.start = byte_range.start;
4449 }
4450 if acc.end < byte_range.end {
4451 acc.end = byte_range.end;
4452 }
4453 acc
4454 },
4455 );
4456 if full_range.start > full_range.end {
4457 // We did not find a full spanning range of this match.
4458 return None;
4459 }
4460 let extra_captures: SmallVec<[_; 1]> =
4461 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4462 test_configs
4463 .extra_captures
4464 .get(capture.index as usize)
4465 .cloned()
4466 .and_then(|tag_name| match tag_name {
4467 RunnableCapture::Named(name) => {
4468 Some((capture.node.byte_range(), name))
4469 }
4470 RunnableCapture::Run => {
4471 let _ = run_range.insert(capture.node.byte_range());
4472 None
4473 }
4474 })
4475 }));
4476 let run_range = run_range?;
4477 let tags = test_configs
4478 .query
4479 .property_settings(mat.pattern_index)
4480 .iter()
4481 .filter_map(|property| {
4482 if *property.key == *"tag" {
4483 property
4484 .value
4485 .as_ref()
4486 .map(|value| RunnableTag(value.to_string().into()))
4487 } else {
4488 None
4489 }
4490 })
4491 .collect();
4492 let extra_captures = extra_captures
4493 .into_iter()
4494 .map(|(range, name)| {
4495 (
4496 name.to_string(),
4497 self.text_for_range(range).collect::<String>(),
4498 )
4499 })
4500 .collect();
4501 // All tags should have the same range.
4502 Some(RunnableRange {
4503 run_range,
4504 full_range,
4505 runnable: Runnable {
4506 tags,
4507 language: mat.language,
4508 buffer: self.remote_id(),
4509 },
4510 extra_captures,
4511 buffer_id: self.remote_id(),
4512 })
4513 });
4514
4515 syntax_matches.advance();
4516 if test_range.is_some() {
4517 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4518 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4519 return test_range;
4520 }
4521 }
4522 })
4523 }
4524
4525 /// Returns selections for remote peers intersecting the given range.
4526 #[allow(clippy::type_complexity)]
4527 pub fn selections_in_range(
4528 &self,
4529 range: Range<Anchor>,
4530 include_local: bool,
4531 ) -> impl Iterator<
4532 Item = (
4533 ReplicaId,
4534 bool,
4535 CursorShape,
4536 impl Iterator<Item = &Selection<Anchor>> + '_,
4537 ),
4538 > + '_ {
4539 self.remote_selections
4540 .iter()
4541 .filter(move |(replica_id, set)| {
4542 (include_local || **replica_id != self.text.replica_id())
4543 && !set.selections.is_empty()
4544 })
4545 .map(move |(replica_id, set)| {
4546 let start_ix = match set.selections.binary_search_by(|probe| {
4547 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4548 }) {
4549 Ok(ix) | Err(ix) => ix,
4550 };
4551 let end_ix = match set.selections.binary_search_by(|probe| {
4552 probe.start.cmp(&range.end, self).then(Ordering::Less)
4553 }) {
4554 Ok(ix) | Err(ix) => ix,
4555 };
4556
4557 (
4558 *replica_id,
4559 set.line_mode,
4560 set.cursor_shape,
4561 set.selections[start_ix..end_ix].iter(),
4562 )
4563 })
4564 }
4565
4566 /// Returns if the buffer contains any diagnostics.
4567 pub fn has_diagnostics(&self) -> bool {
4568 !self.diagnostics.is_empty()
4569 }
4570
4571 /// Returns all the diagnostics intersecting the given range.
4572 pub fn diagnostics_in_range<'a, T, O>(
4573 &'a self,
4574 search_range: Range<T>,
4575 reversed: bool,
4576 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4577 where
4578 T: 'a + Clone + ToOffset,
4579 O: 'a + FromAnchor,
4580 {
4581 let mut iterators: Vec<_> = self
4582 .diagnostics
4583 .iter()
4584 .map(|(_, collection)| {
4585 collection
4586 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4587 .peekable()
4588 })
4589 .collect();
4590
4591 std::iter::from_fn(move || {
4592 let (next_ix, _) = iterators
4593 .iter_mut()
4594 .enumerate()
4595 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4596 .min_by(|(_, a), (_, b)| {
4597 let cmp = a
4598 .range
4599 .start
4600 .cmp(&b.range.start, self)
4601 // when range is equal, sort by diagnostic severity
4602 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4603 // and stabilize order with group_id
4604 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4605 if reversed { cmp.reverse() } else { cmp }
4606 })?;
4607 iterators[next_ix]
4608 .next()
4609 .map(
4610 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4611 diagnostic,
4612 range: FromAnchor::from_anchor(&range.start, self)
4613 ..FromAnchor::from_anchor(&range.end, self),
4614 },
4615 )
4616 })
4617 }
4618
4619 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4620 /// should be used instead.
4621 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4622 &self.diagnostics
4623 }
4624
4625 /// Returns all the diagnostic groups associated with the given
4626 /// language server ID. If no language server ID is provided,
4627 /// all diagnostics groups are returned.
4628 pub fn diagnostic_groups(
4629 &self,
4630 language_server_id: Option<LanguageServerId>,
4631 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4632 let mut groups = Vec::new();
4633
4634 if let Some(language_server_id) = language_server_id {
4635 if let Ok(ix) = self
4636 .diagnostics
4637 .binary_search_by_key(&language_server_id, |e| e.0)
4638 {
4639 self.diagnostics[ix]
4640 .1
4641 .groups(language_server_id, &mut groups, self);
4642 }
4643 } else {
4644 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4645 diagnostics.groups(*language_server_id, &mut groups, self);
4646 }
4647 }
4648
4649 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4650 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4651 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4652 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4653 });
4654
4655 groups
4656 }
4657
4658 /// Returns an iterator over the diagnostics for the given group.
4659 pub fn diagnostic_group<O>(
4660 &self,
4661 group_id: usize,
4662 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4663 where
4664 O: FromAnchor + 'static,
4665 {
4666 self.diagnostics
4667 .iter()
4668 .flat_map(move |(_, set)| set.group(group_id, self))
4669 }
4670
4671 /// An integer version number that accounts for all updates besides
4672 /// the buffer's text itself (which is versioned via a version vector).
4673 pub fn non_text_state_update_count(&self) -> usize {
4674 self.non_text_state_update_count
4675 }
4676
4677 /// An integer version that changes when the buffer's syntax changes.
4678 pub fn syntax_update_count(&self) -> usize {
4679 self.syntax.update_count()
4680 }
4681
4682 /// Returns a snapshot of underlying file.
4683 pub fn file(&self) -> Option<&Arc<dyn File>> {
4684 self.file.as_ref()
4685 }
4686
4687 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4688 if let Some(file) = self.file() {
4689 if file.path().file_name().is_none() || include_root {
4690 Some(file.full_path(cx).to_string_lossy().into_owned())
4691 } else {
4692 Some(file.path().display(file.path_style(cx)).to_string())
4693 }
4694 } else {
4695 None
4696 }
4697 }
4698
4699 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4700 let query_str = query.fuzzy_contents;
4701 if query_str.is_some_and(|query| query.is_empty()) {
4702 return BTreeMap::default();
4703 }
4704
4705 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4706 language,
4707 override_id: None,
4708 }));
4709
4710 let mut query_ix = 0;
4711 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4712 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4713
4714 let mut words = BTreeMap::default();
4715 let mut current_word_start_ix = None;
4716 let mut chunk_ix = query.range.start;
4717 for chunk in self.chunks(query.range, false) {
4718 for (i, c) in chunk.text.char_indices() {
4719 let ix = chunk_ix + i;
4720 if classifier.is_word(c) {
4721 if current_word_start_ix.is_none() {
4722 current_word_start_ix = Some(ix);
4723 }
4724
4725 if let Some(query_chars) = &query_chars
4726 && query_ix < query_len
4727 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4728 {
4729 query_ix += 1;
4730 }
4731 continue;
4732 } else if let Some(word_start) = current_word_start_ix.take()
4733 && query_ix == query_len
4734 {
4735 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4736 let mut word_text = self.text_for_range(word_start..ix).peekable();
4737 let first_char = word_text
4738 .peek()
4739 .and_then(|first_chunk| first_chunk.chars().next());
4740 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4741 if !query.skip_digits
4742 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4743 {
4744 words.insert(word_text.collect(), word_range);
4745 }
4746 }
4747 query_ix = 0;
4748 }
4749 chunk_ix += chunk.text.len();
4750 }
4751
4752 words
4753 }
4754}
4755
4756pub struct WordsQuery<'a> {
4757 /// Only returns words with all chars from the fuzzy string in them.
4758 pub fuzzy_contents: Option<&'a str>,
4759 /// Skips words that start with a digit.
4760 pub skip_digits: bool,
4761 /// Buffer offset range, to look for words.
4762 pub range: Range<usize>,
4763}
4764
4765fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4766 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4767}
4768
4769fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4770 let mut result = IndentSize::spaces(0);
4771 for c in text {
4772 let kind = match c {
4773 ' ' => IndentKind::Space,
4774 '\t' => IndentKind::Tab,
4775 _ => break,
4776 };
4777 if result.len == 0 {
4778 result.kind = kind;
4779 }
4780 result.len += 1;
4781 }
4782 result
4783}
4784
4785impl Clone for BufferSnapshot {
4786 fn clone(&self) -> Self {
4787 Self {
4788 text: self.text.clone(),
4789 syntax: self.syntax.clone(),
4790 file: self.file.clone(),
4791 remote_selections: self.remote_selections.clone(),
4792 diagnostics: self.diagnostics.clone(),
4793 language: self.language.clone(),
4794 non_text_state_update_count: self.non_text_state_update_count,
4795 }
4796 }
4797}
4798
4799impl Deref for BufferSnapshot {
4800 type Target = text::BufferSnapshot;
4801
4802 fn deref(&self) -> &Self::Target {
4803 &self.text
4804 }
4805}
4806
4807unsafe impl Send for BufferChunks<'_> {}
4808
4809impl<'a> BufferChunks<'a> {
4810 pub(crate) fn new(
4811 text: &'a Rope,
4812 range: Range<usize>,
4813 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4814 diagnostics: bool,
4815 buffer_snapshot: Option<&'a BufferSnapshot>,
4816 ) -> Self {
4817 let mut highlights = None;
4818 if let Some((captures, highlight_maps)) = syntax {
4819 highlights = Some(BufferChunkHighlights {
4820 captures,
4821 next_capture: None,
4822 stack: Default::default(),
4823 highlight_maps,
4824 })
4825 }
4826
4827 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4828 let chunks = text.chunks_in_range(range.clone());
4829
4830 let mut this = BufferChunks {
4831 range,
4832 buffer_snapshot,
4833 chunks,
4834 diagnostic_endpoints,
4835 error_depth: 0,
4836 warning_depth: 0,
4837 information_depth: 0,
4838 hint_depth: 0,
4839 unnecessary_depth: 0,
4840 underline: true,
4841 highlights,
4842 };
4843 this.initialize_diagnostic_endpoints();
4844 this
4845 }
4846
4847 /// Seeks to the given byte offset in the buffer.
4848 pub fn seek(&mut self, range: Range<usize>) {
4849 let old_range = std::mem::replace(&mut self.range, range.clone());
4850 self.chunks.set_range(self.range.clone());
4851 if let Some(highlights) = self.highlights.as_mut() {
4852 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4853 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4854 highlights
4855 .stack
4856 .retain(|(end_offset, _)| *end_offset > range.start);
4857 if let Some(capture) = &highlights.next_capture
4858 && range.start >= capture.node.start_byte()
4859 {
4860 let next_capture_end = capture.node.end_byte();
4861 if range.start < next_capture_end {
4862 highlights.stack.push((
4863 next_capture_end,
4864 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4865 ));
4866 }
4867 highlights.next_capture.take();
4868 }
4869 } else if let Some(snapshot) = self.buffer_snapshot {
4870 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4871 *highlights = BufferChunkHighlights {
4872 captures,
4873 next_capture: None,
4874 stack: Default::default(),
4875 highlight_maps,
4876 };
4877 } else {
4878 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4879 // Seeking such BufferChunks is not supported.
4880 debug_assert!(
4881 false,
4882 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4883 );
4884 }
4885
4886 highlights.captures.set_byte_range(self.range.clone());
4887 self.initialize_diagnostic_endpoints();
4888 }
4889 }
4890
4891 fn initialize_diagnostic_endpoints(&mut self) {
4892 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4893 && let Some(buffer) = self.buffer_snapshot
4894 {
4895 let mut diagnostic_endpoints = Vec::new();
4896 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4897 diagnostic_endpoints.push(DiagnosticEndpoint {
4898 offset: entry.range.start,
4899 is_start: true,
4900 severity: entry.diagnostic.severity,
4901 is_unnecessary: entry.diagnostic.is_unnecessary,
4902 underline: entry.diagnostic.underline,
4903 });
4904 diagnostic_endpoints.push(DiagnosticEndpoint {
4905 offset: entry.range.end,
4906 is_start: false,
4907 severity: entry.diagnostic.severity,
4908 is_unnecessary: entry.diagnostic.is_unnecessary,
4909 underline: entry.diagnostic.underline,
4910 });
4911 }
4912 diagnostic_endpoints
4913 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4914 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4915 self.hint_depth = 0;
4916 self.error_depth = 0;
4917 self.warning_depth = 0;
4918 self.information_depth = 0;
4919 }
4920 }
4921
4922 /// The current byte offset in the buffer.
4923 pub fn offset(&self) -> usize {
4924 self.range.start
4925 }
4926
4927 pub fn range(&self) -> Range<usize> {
4928 self.range.clone()
4929 }
4930
4931 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4932 let depth = match endpoint.severity {
4933 DiagnosticSeverity::ERROR => &mut self.error_depth,
4934 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4935 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4936 DiagnosticSeverity::HINT => &mut self.hint_depth,
4937 _ => return,
4938 };
4939 if endpoint.is_start {
4940 *depth += 1;
4941 } else {
4942 *depth -= 1;
4943 }
4944
4945 if endpoint.is_unnecessary {
4946 if endpoint.is_start {
4947 self.unnecessary_depth += 1;
4948 } else {
4949 self.unnecessary_depth -= 1;
4950 }
4951 }
4952 }
4953
4954 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4955 if self.error_depth > 0 {
4956 Some(DiagnosticSeverity::ERROR)
4957 } else if self.warning_depth > 0 {
4958 Some(DiagnosticSeverity::WARNING)
4959 } else if self.information_depth > 0 {
4960 Some(DiagnosticSeverity::INFORMATION)
4961 } else if self.hint_depth > 0 {
4962 Some(DiagnosticSeverity::HINT)
4963 } else {
4964 None
4965 }
4966 }
4967
4968 fn current_code_is_unnecessary(&self) -> bool {
4969 self.unnecessary_depth > 0
4970 }
4971}
4972
4973impl<'a> Iterator for BufferChunks<'a> {
4974 type Item = Chunk<'a>;
4975
4976 fn next(&mut self) -> Option<Self::Item> {
4977 let mut next_capture_start = usize::MAX;
4978 let mut next_diagnostic_endpoint = usize::MAX;
4979
4980 if let Some(highlights) = self.highlights.as_mut() {
4981 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4982 if *parent_capture_end <= self.range.start {
4983 highlights.stack.pop();
4984 } else {
4985 break;
4986 }
4987 }
4988
4989 if highlights.next_capture.is_none() {
4990 highlights.next_capture = highlights.captures.next();
4991 }
4992
4993 while let Some(capture) = highlights.next_capture.as_ref() {
4994 if self.range.start < capture.node.start_byte() {
4995 next_capture_start = capture.node.start_byte();
4996 break;
4997 } else {
4998 let highlight_id =
4999 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5000 highlights
5001 .stack
5002 .push((capture.node.end_byte(), highlight_id));
5003 highlights.next_capture = highlights.captures.next();
5004 }
5005 }
5006 }
5007
5008 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5009 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5010 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5011 if endpoint.offset <= self.range.start {
5012 self.update_diagnostic_depths(endpoint);
5013 diagnostic_endpoints.next();
5014 self.underline = endpoint.underline;
5015 } else {
5016 next_diagnostic_endpoint = endpoint.offset;
5017 break;
5018 }
5019 }
5020 }
5021 self.diagnostic_endpoints = diagnostic_endpoints;
5022
5023 if let Some(ChunkBitmaps {
5024 text: chunk,
5025 chars: chars_map,
5026 tabs,
5027 }) = self.chunks.peek_with_bitmaps()
5028 {
5029 let chunk_start = self.range.start;
5030 let mut chunk_end = (self.chunks.offset() + chunk.len())
5031 .min(next_capture_start)
5032 .min(next_diagnostic_endpoint);
5033 let mut highlight_id = None;
5034 if let Some(highlights) = self.highlights.as_ref()
5035 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5036 {
5037 chunk_end = chunk_end.min(*parent_capture_end);
5038 highlight_id = Some(*parent_highlight_id);
5039 }
5040 let bit_start = chunk_start - self.chunks.offset();
5041 let bit_end = chunk_end - self.chunks.offset();
5042
5043 let slice = &chunk[bit_start..bit_end];
5044
5045 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5046 let tabs = (tabs >> bit_start) & mask;
5047 let chars = (chars_map >> bit_start) & mask;
5048
5049 self.range.start = chunk_end;
5050 if self.range.start == self.chunks.offset() + chunk.len() {
5051 self.chunks.next().unwrap();
5052 }
5053
5054 Some(Chunk {
5055 text: slice,
5056 syntax_highlight_id: highlight_id,
5057 underline: self.underline,
5058 diagnostic_severity: self.current_diagnostic_severity(),
5059 is_unnecessary: self.current_code_is_unnecessary(),
5060 tabs,
5061 chars,
5062 ..Chunk::default()
5063 })
5064 } else {
5065 None
5066 }
5067 }
5068}
5069
5070impl operation_queue::Operation for Operation {
5071 fn lamport_timestamp(&self) -> clock::Lamport {
5072 match self {
5073 Operation::Buffer(_) => {
5074 unreachable!("buffer operations should never be deferred at this layer")
5075 }
5076 Operation::UpdateDiagnostics {
5077 lamport_timestamp, ..
5078 }
5079 | Operation::UpdateSelections {
5080 lamport_timestamp, ..
5081 }
5082 | Operation::UpdateCompletionTriggers {
5083 lamport_timestamp, ..
5084 }
5085 | Operation::UpdateLineEnding {
5086 lamport_timestamp, ..
5087 } => *lamport_timestamp,
5088 }
5089 }
5090}
5091
5092impl Default for Diagnostic {
5093 fn default() -> Self {
5094 Self {
5095 source: Default::default(),
5096 source_kind: DiagnosticSourceKind::Other,
5097 code: None,
5098 code_description: None,
5099 severity: DiagnosticSeverity::ERROR,
5100 message: Default::default(),
5101 markdown: None,
5102 group_id: 0,
5103 is_primary: false,
5104 is_disk_based: false,
5105 is_unnecessary: false,
5106 underline: true,
5107 data: None,
5108 }
5109 }
5110}
5111
5112impl IndentSize {
5113 /// Returns an [`IndentSize`] representing the given spaces.
5114 pub fn spaces(len: u32) -> Self {
5115 Self {
5116 len,
5117 kind: IndentKind::Space,
5118 }
5119 }
5120
5121 /// Returns an [`IndentSize`] representing a tab.
5122 pub fn tab() -> Self {
5123 Self {
5124 len: 1,
5125 kind: IndentKind::Tab,
5126 }
5127 }
5128
5129 /// An iterator over the characters represented by this [`IndentSize`].
5130 pub fn chars(&self) -> impl Iterator<Item = char> {
5131 iter::repeat(self.char()).take(self.len as usize)
5132 }
5133
5134 /// The character representation of this [`IndentSize`].
5135 pub fn char(&self) -> char {
5136 match self.kind {
5137 IndentKind::Space => ' ',
5138 IndentKind::Tab => '\t',
5139 }
5140 }
5141
5142 /// Consumes the current [`IndentSize`] and returns a new one that has
5143 /// been shrunk or enlarged by the given size along the given direction.
5144 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5145 match direction {
5146 Ordering::Less => {
5147 if self.kind == size.kind && self.len >= size.len {
5148 self.len -= size.len;
5149 }
5150 }
5151 Ordering::Equal => {}
5152 Ordering::Greater => {
5153 if self.len == 0 {
5154 self = size;
5155 } else if self.kind == size.kind {
5156 self.len += size.len;
5157 }
5158 }
5159 }
5160 self
5161 }
5162
5163 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5164 match self.kind {
5165 IndentKind::Space => self.len as usize,
5166 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5167 }
5168 }
5169}
5170
5171#[cfg(any(test, feature = "test-support"))]
5172pub struct TestFile {
5173 pub path: Arc<RelPath>,
5174 pub root_name: String,
5175 pub local_root: Option<PathBuf>,
5176}
5177
5178#[cfg(any(test, feature = "test-support"))]
5179impl File for TestFile {
5180 fn path(&self) -> &Arc<RelPath> {
5181 &self.path
5182 }
5183
5184 fn full_path(&self, _: &gpui::App) -> PathBuf {
5185 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5186 }
5187
5188 fn as_local(&self) -> Option<&dyn LocalFile> {
5189 if self.local_root.is_some() {
5190 Some(self)
5191 } else {
5192 None
5193 }
5194 }
5195
5196 fn disk_state(&self) -> DiskState {
5197 unimplemented!()
5198 }
5199
5200 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5201 self.path().file_name().unwrap_or(self.root_name.as_ref())
5202 }
5203
5204 fn worktree_id(&self, _: &App) -> WorktreeId {
5205 WorktreeId::from_usize(0)
5206 }
5207
5208 fn to_proto(&self, _: &App) -> rpc::proto::File {
5209 unimplemented!()
5210 }
5211
5212 fn is_private(&self) -> bool {
5213 false
5214 }
5215
5216 fn path_style(&self, _cx: &App) -> PathStyle {
5217 PathStyle::local()
5218 }
5219}
5220
5221#[cfg(any(test, feature = "test-support"))]
5222impl LocalFile for TestFile {
5223 fn abs_path(&self, _cx: &App) -> PathBuf {
5224 PathBuf::from(self.local_root.as_ref().unwrap())
5225 .join(&self.root_name)
5226 .join(self.path.as_std_path())
5227 }
5228
5229 fn load(&self, _cx: &App) -> Task<Result<String>> {
5230 unimplemented!()
5231 }
5232
5233 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5234 unimplemented!()
5235 }
5236}
5237
5238pub(crate) fn contiguous_ranges(
5239 values: impl Iterator<Item = u32>,
5240 max_len: usize,
5241) -> impl Iterator<Item = Range<u32>> {
5242 let mut values = values;
5243 let mut current_range: Option<Range<u32>> = None;
5244 std::iter::from_fn(move || {
5245 loop {
5246 if let Some(value) = values.next() {
5247 if let Some(range) = &mut current_range
5248 && value == range.end
5249 && range.len() < max_len
5250 {
5251 range.end += 1;
5252 continue;
5253 }
5254
5255 let prev_range = current_range.clone();
5256 current_range = Some(value..(value + 1));
5257 if prev_range.is_some() {
5258 return prev_range;
5259 }
5260 } else {
5261 return current_range.take();
5262 }
5263 }
5264 })
5265}
5266
5267#[derive(Default, Debug)]
5268pub struct CharClassifier {
5269 scope: Option<LanguageScope>,
5270 scope_context: Option<CharScopeContext>,
5271 ignore_punctuation: bool,
5272}
5273
5274impl CharClassifier {
5275 pub fn new(scope: Option<LanguageScope>) -> Self {
5276 Self {
5277 scope,
5278 scope_context: None,
5279 ignore_punctuation: false,
5280 }
5281 }
5282
5283 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5284 Self {
5285 scope_context,
5286 ..self
5287 }
5288 }
5289
5290 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5291 Self {
5292 ignore_punctuation,
5293 ..self
5294 }
5295 }
5296
5297 pub fn is_whitespace(&self, c: char) -> bool {
5298 self.kind(c) == CharKind::Whitespace
5299 }
5300
5301 pub fn is_word(&self, c: char) -> bool {
5302 self.kind(c) == CharKind::Word
5303 }
5304
5305 pub fn is_punctuation(&self, c: char) -> bool {
5306 self.kind(c) == CharKind::Punctuation
5307 }
5308
5309 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5310 if c.is_alphanumeric() || c == '_' {
5311 return CharKind::Word;
5312 }
5313
5314 if let Some(scope) = &self.scope {
5315 let characters = match self.scope_context {
5316 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5317 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5318 None => scope.word_characters(),
5319 };
5320 if let Some(characters) = characters
5321 && characters.contains(&c)
5322 {
5323 return CharKind::Word;
5324 }
5325 }
5326
5327 if c.is_whitespace() {
5328 return CharKind::Whitespace;
5329 }
5330
5331 if ignore_punctuation {
5332 CharKind::Word
5333 } else {
5334 CharKind::Punctuation
5335 }
5336 }
5337
5338 pub fn kind(&self, c: char) -> CharKind {
5339 self.kind_with(c, self.ignore_punctuation)
5340 }
5341}
5342
5343/// Find all of the ranges of whitespace that occur at the ends of lines
5344/// in the given rope.
5345///
5346/// This could also be done with a regex search, but this implementation
5347/// avoids copying text.
5348pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5349 let mut ranges = Vec::new();
5350
5351 let mut offset = 0;
5352 let mut prev_chunk_trailing_whitespace_range = 0..0;
5353 for chunk in rope.chunks() {
5354 let mut prev_line_trailing_whitespace_range = 0..0;
5355 for (i, line) in chunk.split('\n').enumerate() {
5356 let line_end_offset = offset + line.len();
5357 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5358 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5359
5360 if i == 0 && trimmed_line_len == 0 {
5361 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5362 }
5363 if !prev_line_trailing_whitespace_range.is_empty() {
5364 ranges.push(prev_line_trailing_whitespace_range);
5365 }
5366
5367 offset = line_end_offset + 1;
5368 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5369 }
5370
5371 offset -= 1;
5372 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5373 }
5374
5375 if !prev_chunk_trailing_whitespace_range.is_empty() {
5376 ranges.push(prev_chunk_trailing_whitespace_range);
5377 }
5378
5379 ranges
5380}