1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, String)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
751
752 let unchanged_range_in_preview_snapshot =
753 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
754 if !unchanged_range_in_preview_snapshot.is_empty() {
755 highlighted_text.add_text_from_buffer_range(
756 unchanged_range_in_preview_snapshot,
757 &self.applied_edits_snapshot,
758 &self.syntax_snapshot,
759 None,
760 syntax_theme,
761 );
762 }
763
764 let range_in_current_snapshot = range.to_offset(current_snapshot);
765 if include_deletions && !range_in_current_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 range_in_current_snapshot,
768 ¤t_snapshot.text,
769 ¤t_snapshot.syntax,
770 Some(deletion_highlight_style),
771 syntax_theme,
772 );
773 }
774
775 if !edit_text.is_empty() {
776 highlighted_text.add_text_from_buffer_range(
777 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
778 &self.applied_edits_snapshot,
779 &self.syntax_snapshot,
780 Some(insertion_highlight_style),
781 syntax_theme,
782 );
783 }
784
785 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
786 }
787
788 highlighted_text.add_text_from_buffer_range(
789 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
790 &self.applied_edits_snapshot,
791 &self.syntax_snapshot,
792 None,
793 syntax_theme,
794 );
795
796 highlighted_text.build()
797 }
798
799 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
800 let (first, _) = edits.first()?;
801 let (last, _) = edits.last()?;
802
803 let start = first
804 .start
805 .bias_left(&self.old_snapshot)
806 .to_point(&self.applied_edits_snapshot);
807 let end = last
808 .end
809 .bias_right(&self.old_snapshot)
810 .to_point(&self.applied_edits_snapshot);
811
812 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
813 let range = Point::new(start.row, 0)
814 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
815
816 Some(range.to_offset(&self.applied_edits_snapshot))
817 }
818}
819
820#[derive(Clone, Debug, PartialEq, Eq)]
821pub struct BracketMatch {
822 pub open_range: Range<usize>,
823 pub close_range: Range<usize>,
824 pub newline_only: bool,
825}
826
827impl Buffer {
828 /// Create a new buffer with the given base text.
829 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
830 Self::build(
831 TextBuffer::new(
832 ReplicaId::LOCAL,
833 cx.entity_id().as_non_zero_u64().into(),
834 base_text.into(),
835 ),
836 None,
837 Capability::ReadWrite,
838 )
839 }
840
841 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
842 pub fn local_normalized(
843 base_text_normalized: Rope,
844 line_ending: LineEnding,
845 cx: &Context<Self>,
846 ) -> Self {
847 Self::build(
848 TextBuffer::new_normalized(
849 ReplicaId::LOCAL,
850 cx.entity_id().as_non_zero_u64().into(),
851 line_ending,
852 base_text_normalized,
853 ),
854 None,
855 Capability::ReadWrite,
856 )
857 }
858
859 /// Create a new buffer that is a replica of a remote buffer.
860 pub fn remote(
861 remote_id: BufferId,
862 replica_id: ReplicaId,
863 capability: Capability,
864 base_text: impl Into<String>,
865 ) -> Self {
866 Self::build(
867 TextBuffer::new(replica_id, remote_id, base_text.into()),
868 None,
869 capability,
870 )
871 }
872
873 /// Create a new buffer that is a replica of a remote buffer, populating its
874 /// state from the given protobuf message.
875 pub fn from_proto(
876 replica_id: ReplicaId,
877 capability: Capability,
878 message: proto::BufferState,
879 file: Option<Arc<dyn File>>,
880 ) -> Result<Self> {
881 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
882 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
883 let mut this = Self::build(buffer, file, capability);
884 this.text.set_line_ending(proto::deserialize_line_ending(
885 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
886 ));
887 this.saved_version = proto::deserialize_version(&message.saved_version);
888 this.saved_mtime = message.saved_mtime.map(|time| time.into());
889 Ok(this)
890 }
891
892 /// Serialize the buffer's state to a protobuf message.
893 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
894 proto::BufferState {
895 id: self.remote_id().into(),
896 file: self.file.as_ref().map(|f| f.to_proto(cx)),
897 base_text: self.base_text().to_string(),
898 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
899 saved_version: proto::serialize_version(&self.saved_version),
900 saved_mtime: self.saved_mtime.map(|time| time.into()),
901 }
902 }
903
904 /// Serialize as protobufs all of the changes to the buffer since the given version.
905 pub fn serialize_ops(
906 &self,
907 since: Option<clock::Global>,
908 cx: &App,
909 ) -> Task<Vec<proto::Operation>> {
910 let mut operations = Vec::new();
911 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
912
913 operations.extend(self.remote_selections.iter().map(|(_, set)| {
914 proto::serialize_operation(&Operation::UpdateSelections {
915 selections: set.selections.clone(),
916 lamport_timestamp: set.lamport_timestamp,
917 line_mode: set.line_mode,
918 cursor_shape: set.cursor_shape,
919 })
920 }));
921
922 for (server_id, diagnostics) in &self.diagnostics {
923 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
924 lamport_timestamp: self.diagnostics_timestamp,
925 server_id: *server_id,
926 diagnostics: diagnostics.iter().cloned().collect(),
927 }));
928 }
929
930 for (server_id, completions) in &self.completion_triggers_per_language_server {
931 operations.push(proto::serialize_operation(
932 &Operation::UpdateCompletionTriggers {
933 triggers: completions.iter().cloned().collect(),
934 lamport_timestamp: self.completion_triggers_timestamp,
935 server_id: *server_id,
936 },
937 ));
938 }
939
940 let text_operations = self.text.operations().clone();
941 cx.background_spawn(async move {
942 let since = since.unwrap_or_default();
943 operations.extend(
944 text_operations
945 .iter()
946 .filter(|(_, op)| !since.observed(op.timestamp()))
947 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
948 );
949 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
950 operations
951 })
952 }
953
954 /// Assign a language to the buffer, returning the buffer.
955 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
956 self.set_language(Some(language), cx);
957 self
958 }
959
960 /// Returns the [`Capability`] of this buffer.
961 pub fn capability(&self) -> Capability {
962 self.capability
963 }
964
965 /// Whether this buffer can only be read.
966 pub fn read_only(&self) -> bool {
967 self.capability == Capability::ReadOnly
968 }
969
970 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
971 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
972 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
973 let snapshot = buffer.snapshot();
974 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
975 Self {
976 saved_mtime,
977 saved_version: buffer.version(),
978 preview_version: buffer.version(),
979 reload_task: None,
980 transaction_depth: 0,
981 was_dirty_before_starting_transaction: None,
982 has_unsaved_edits: Cell::new((buffer.version(), false)),
983 text: buffer,
984 branch_state: None,
985 file,
986 capability,
987 syntax_map,
988 reparse: None,
989 non_text_state_update_count: 0,
990 sync_parse_timeout: Duration::from_millis(1),
991 parse_status: watch::channel(ParseStatus::Idle),
992 autoindent_requests: Default::default(),
993 wait_for_autoindent_txs: Default::default(),
994 pending_autoindent: Default::default(),
995 language: None,
996 remote_selections: Default::default(),
997 diagnostics: Default::default(),
998 diagnostics_timestamp: Lamport::MIN,
999 completion_triggers: Default::default(),
1000 completion_triggers_per_language_server: Default::default(),
1001 completion_triggers_timestamp: Lamport::MIN,
1002 deferred_ops: OperationQueue::new(),
1003 has_conflict: false,
1004 change_bits: Default::default(),
1005 _subscriptions: Vec::new(),
1006 }
1007 }
1008
1009 pub fn build_snapshot(
1010 text: Rope,
1011 language: Option<Arc<Language>>,
1012 language_registry: Option<Arc<LanguageRegistry>>,
1013 cx: &mut App,
1014 ) -> impl Future<Output = BufferSnapshot> + use<> {
1015 let entity_id = cx.reserve_entity::<Self>().entity_id();
1016 let buffer_id = entity_id.as_non_zero_u64().into();
1017 async move {
1018 let text =
1019 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1020 .snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let language_registry = language_registry.clone();
1024 syntax.reparse(&text, language_registry, language);
1025 }
1026 BufferSnapshot {
1027 text,
1028 syntax,
1029 file: None,
1030 diagnostics: Default::default(),
1031 remote_selections: Default::default(),
1032 language,
1033 non_text_state_update_count: 0,
1034 }
1035 }
1036 }
1037
1038 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1039 let entity_id = cx.reserve_entity::<Self>().entity_id();
1040 let buffer_id = entity_id.as_non_zero_u64().into();
1041 let text = TextBuffer::new_normalized(
1042 ReplicaId::LOCAL,
1043 buffer_id,
1044 Default::default(),
1045 Rope::new(),
1046 )
1047 .snapshot();
1048 let syntax = SyntaxMap::new(&text).snapshot();
1049 BufferSnapshot {
1050 text,
1051 syntax,
1052 file: None,
1053 diagnostics: Default::default(),
1054 remote_selections: Default::default(),
1055 language: None,
1056 non_text_state_update_count: 0,
1057 }
1058 }
1059
1060 #[cfg(any(test, feature = "test-support"))]
1061 pub fn build_snapshot_sync(
1062 text: Rope,
1063 language: Option<Arc<Language>>,
1064 language_registry: Option<Arc<LanguageRegistry>>,
1065 cx: &mut App,
1066 ) -> BufferSnapshot {
1067 let entity_id = cx.reserve_entity::<Self>().entity_id();
1068 let buffer_id = entity_id.as_non_zero_u64().into();
1069 let text =
1070 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1071 .snapshot();
1072 let mut syntax = SyntaxMap::new(&text).snapshot();
1073 if let Some(language) = language.clone() {
1074 syntax.reparse(&text, language_registry, language);
1075 }
1076 BufferSnapshot {
1077 text,
1078 syntax,
1079 file: None,
1080 diagnostics: Default::default(),
1081 remote_selections: Default::default(),
1082 language,
1083 non_text_state_update_count: 0,
1084 }
1085 }
1086
1087 /// Retrieve a snapshot of the buffer's current state. This is computationally
1088 /// cheap, and allows reading from the buffer on a background thread.
1089 pub fn snapshot(&self) -> BufferSnapshot {
1090 let text = self.text.snapshot();
1091 let mut syntax_map = self.syntax_map.lock();
1092 syntax_map.interpolate(&text);
1093 let syntax = syntax_map.snapshot();
1094
1095 BufferSnapshot {
1096 text,
1097 syntax,
1098 file: self.file.clone(),
1099 remote_selections: self.remote_selections.clone(),
1100 diagnostics: self.diagnostics.clone(),
1101 language: self.language.clone(),
1102 non_text_state_update_count: self.non_text_state_update_count,
1103 }
1104 }
1105
1106 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1107 let this = cx.entity();
1108 cx.new(|cx| {
1109 let mut branch = Self {
1110 branch_state: Some(BufferBranchState {
1111 base_buffer: this.clone(),
1112 merged_operations: Default::default(),
1113 }),
1114 language: self.language.clone(),
1115 has_conflict: self.has_conflict,
1116 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1117 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1118 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1119 };
1120 if let Some(language_registry) = self.language_registry() {
1121 branch.set_language_registry(language_registry);
1122 }
1123
1124 // Reparse the branch buffer so that we get syntax highlighting immediately.
1125 branch.reparse(cx);
1126
1127 branch
1128 })
1129 }
1130
1131 pub fn preview_edits(
1132 &self,
1133 edits: Arc<[(Range<Anchor>, String)]>,
1134 cx: &App,
1135 ) -> Task<EditPreview> {
1136 let registry = self.language_registry();
1137 let language = self.language().cloned();
1138 let old_snapshot = self.text.snapshot();
1139 let mut branch_buffer = self.text.branch();
1140 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1141 cx.background_spawn(async move {
1142 if !edits.is_empty() {
1143 if let Some(language) = language.clone() {
1144 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1145 }
1146
1147 branch_buffer.edit(edits.iter().cloned());
1148 let snapshot = branch_buffer.snapshot();
1149 syntax_snapshot.interpolate(&snapshot);
1150
1151 if let Some(language) = language {
1152 syntax_snapshot.reparse(&snapshot, registry, language);
1153 }
1154 }
1155 EditPreview {
1156 old_snapshot,
1157 applied_edits_snapshot: branch_buffer.snapshot(),
1158 syntax_snapshot,
1159 }
1160 })
1161 }
1162
1163 /// Applies all of the changes in this buffer that intersect any of the
1164 /// given `ranges` to its base buffer.
1165 ///
1166 /// If `ranges` is empty, then all changes will be applied. This buffer must
1167 /// be a branch buffer to call this method.
1168 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1169 let Some(base_buffer) = self.base_buffer() else {
1170 debug_panic!("not a branch buffer");
1171 return;
1172 };
1173
1174 let mut ranges = if ranges.is_empty() {
1175 &[0..usize::MAX]
1176 } else {
1177 ranges.as_slice()
1178 }
1179 .iter()
1180 .peekable();
1181
1182 let mut edits = Vec::new();
1183 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1184 let mut is_included = false;
1185 while let Some(range) = ranges.peek() {
1186 if range.end < edit.new.start {
1187 ranges.next().unwrap();
1188 } else {
1189 if range.start <= edit.new.end {
1190 is_included = true;
1191 }
1192 break;
1193 }
1194 }
1195
1196 if is_included {
1197 edits.push((
1198 edit.old.clone(),
1199 self.text_for_range(edit.new.clone()).collect::<String>(),
1200 ));
1201 }
1202 }
1203
1204 let operation = base_buffer.update(cx, |base_buffer, cx| {
1205 // cx.emit(BufferEvent::DiffBaseChanged);
1206 base_buffer.edit(edits, None, cx)
1207 });
1208
1209 if let Some(operation) = operation
1210 && let Some(BufferBranchState {
1211 merged_operations, ..
1212 }) = &mut self.branch_state
1213 {
1214 merged_operations.push(operation);
1215 }
1216 }
1217
1218 fn on_base_buffer_event(
1219 &mut self,
1220 _: Entity<Buffer>,
1221 event: &BufferEvent,
1222 cx: &mut Context<Self>,
1223 ) {
1224 let BufferEvent::Operation { operation, .. } = event else {
1225 return;
1226 };
1227 let Some(BufferBranchState {
1228 merged_operations, ..
1229 }) = &mut self.branch_state
1230 else {
1231 return;
1232 };
1233
1234 let mut operation_to_undo = None;
1235 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1236 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1237 {
1238 merged_operations.remove(ix);
1239 operation_to_undo = Some(operation.timestamp);
1240 }
1241
1242 self.apply_ops([operation.clone()], cx);
1243
1244 if let Some(timestamp) = operation_to_undo {
1245 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1246 self.undo_operations(counts, cx);
1247 }
1248 }
1249
1250 #[cfg(test)]
1251 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1252 &self.text
1253 }
1254
1255 /// Retrieve a snapshot of the buffer's raw text, without any
1256 /// language-related state like the syntax tree or diagnostics.
1257 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1258 self.text.snapshot()
1259 }
1260
1261 /// The file associated with the buffer, if any.
1262 pub fn file(&self) -> Option<&Arc<dyn File>> {
1263 self.file.as_ref()
1264 }
1265
1266 /// The version of the buffer that was last saved or reloaded from disk.
1267 pub fn saved_version(&self) -> &clock::Global {
1268 &self.saved_version
1269 }
1270
1271 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1272 pub fn saved_mtime(&self) -> Option<MTime> {
1273 self.saved_mtime
1274 }
1275
1276 /// Assign a language to the buffer.
1277 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1278 self.non_text_state_update_count += 1;
1279 self.syntax_map.lock().clear(&self.text);
1280 self.language = language;
1281 self.was_changed();
1282 self.reparse(cx);
1283 cx.emit(BufferEvent::LanguageChanged);
1284 }
1285
1286 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1287 /// other languages if parts of the buffer are written in different languages.
1288 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1289 self.syntax_map
1290 .lock()
1291 .set_language_registry(language_registry);
1292 }
1293
1294 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1295 self.syntax_map.lock().language_registry()
1296 }
1297
1298 /// Assign the line ending type to the buffer.
1299 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1300 self.text.set_line_ending(line_ending);
1301
1302 let lamport_timestamp = self.text.lamport_clock.tick();
1303 self.send_operation(
1304 Operation::UpdateLineEnding {
1305 line_ending,
1306 lamport_timestamp,
1307 },
1308 true,
1309 cx,
1310 );
1311 }
1312
1313 /// Assign the buffer a new [`Capability`].
1314 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1315 if self.capability != capability {
1316 self.capability = capability;
1317 cx.emit(BufferEvent::CapabilityChanged)
1318 }
1319 }
1320
1321 /// This method is called to signal that the buffer has been saved.
1322 pub fn did_save(
1323 &mut self,
1324 version: clock::Global,
1325 mtime: Option<MTime>,
1326 cx: &mut Context<Self>,
1327 ) {
1328 self.saved_version = version.clone();
1329 self.has_unsaved_edits.set((version, false));
1330 self.has_conflict = false;
1331 self.saved_mtime = mtime;
1332 self.was_changed();
1333 cx.emit(BufferEvent::Saved);
1334 cx.notify();
1335 }
1336
1337 /// Reloads the contents of the buffer from disk.
1338 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1339 let (tx, rx) = futures::channel::oneshot::channel();
1340 let prev_version = self.text.version();
1341 self.reload_task = Some(cx.spawn(async move |this, cx| {
1342 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1343 let file = this.file.as_ref()?.as_local()?;
1344
1345 Some((file.disk_state().mtime(), file.load(cx)))
1346 })?
1347 else {
1348 return Ok(());
1349 };
1350
1351 let new_text = new_text.await?;
1352 let diff = this
1353 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1354 .await;
1355 this.update(cx, |this, cx| {
1356 if this.version() == diff.base_version {
1357 this.finalize_last_transaction();
1358 this.apply_diff(diff, cx);
1359 tx.send(this.finalize_last_transaction().cloned()).ok();
1360 this.has_conflict = false;
1361 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1362 } else {
1363 if !diff.edits.is_empty()
1364 || this
1365 .edits_since::<usize>(&diff.base_version)
1366 .next()
1367 .is_some()
1368 {
1369 this.has_conflict = true;
1370 }
1371
1372 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1373 }
1374
1375 this.reload_task.take();
1376 })
1377 }));
1378 rx
1379 }
1380
1381 /// This method is called to signal that the buffer has been reloaded.
1382 pub fn did_reload(
1383 &mut self,
1384 version: clock::Global,
1385 line_ending: LineEnding,
1386 mtime: Option<MTime>,
1387 cx: &mut Context<Self>,
1388 ) {
1389 self.saved_version = version;
1390 self.has_unsaved_edits
1391 .set((self.saved_version.clone(), false));
1392 self.text.set_line_ending(line_ending);
1393 self.saved_mtime = mtime;
1394 cx.emit(BufferEvent::Reloaded);
1395 cx.notify();
1396 }
1397
1398 /// Updates the [`File`] backing this buffer. This should be called when
1399 /// the file has changed or has been deleted.
1400 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1401 let was_dirty = self.is_dirty();
1402 let mut file_changed = false;
1403
1404 if let Some(old_file) = self.file.as_ref() {
1405 if new_file.path() != old_file.path() {
1406 file_changed = true;
1407 }
1408
1409 let old_state = old_file.disk_state();
1410 let new_state = new_file.disk_state();
1411 if old_state != new_state {
1412 file_changed = true;
1413 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1414 cx.emit(BufferEvent::ReloadNeeded)
1415 }
1416 }
1417 } else {
1418 file_changed = true;
1419 };
1420
1421 self.file = Some(new_file);
1422 if file_changed {
1423 self.was_changed();
1424 self.non_text_state_update_count += 1;
1425 if was_dirty != self.is_dirty() {
1426 cx.emit(BufferEvent::DirtyChanged);
1427 }
1428 cx.emit(BufferEvent::FileHandleChanged);
1429 cx.notify();
1430 }
1431 }
1432
1433 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1434 Some(self.branch_state.as_ref()?.base_buffer.clone())
1435 }
1436
1437 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1438 pub fn language(&self) -> Option<&Arc<Language>> {
1439 self.language.as_ref()
1440 }
1441
1442 /// Returns the [`Language`] at the given location.
1443 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1444 let offset = position.to_offset(self);
1445 let mut is_first = true;
1446 let start_anchor = self.anchor_before(offset);
1447 let end_anchor = self.anchor_after(offset);
1448 self.syntax_map
1449 .lock()
1450 .layers_for_range(offset..offset, &self.text, false)
1451 .filter(|layer| {
1452 if is_first {
1453 is_first = false;
1454 return true;
1455 }
1456
1457 layer
1458 .included_sub_ranges
1459 .map(|sub_ranges| {
1460 sub_ranges.iter().any(|sub_range| {
1461 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1462 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1463 !is_before_start && !is_after_end
1464 })
1465 })
1466 .unwrap_or(true)
1467 })
1468 .last()
1469 .map(|info| info.language.clone())
1470 .or_else(|| self.language.clone())
1471 }
1472
1473 /// Returns each [`Language`] for the active syntax layers at the given location.
1474 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1475 let offset = position.to_offset(self);
1476 let mut languages: Vec<Arc<Language>> = self
1477 .syntax_map
1478 .lock()
1479 .layers_for_range(offset..offset, &self.text, false)
1480 .map(|info| info.language.clone())
1481 .collect();
1482
1483 if languages.is_empty()
1484 && let Some(buffer_language) = self.language()
1485 {
1486 languages.push(buffer_language.clone());
1487 }
1488
1489 languages
1490 }
1491
1492 /// An integer version number that accounts for all updates besides
1493 /// the buffer's text itself (which is versioned via a version vector).
1494 pub fn non_text_state_update_count(&self) -> usize {
1495 self.non_text_state_update_count
1496 }
1497
1498 /// Whether the buffer is being parsed in the background.
1499 #[cfg(any(test, feature = "test-support"))]
1500 pub fn is_parsing(&self) -> bool {
1501 self.reparse.is_some()
1502 }
1503
1504 /// Indicates whether the buffer contains any regions that may be
1505 /// written in a language that hasn't been loaded yet.
1506 pub fn contains_unknown_injections(&self) -> bool {
1507 self.syntax_map.lock().contains_unknown_injections()
1508 }
1509
1510 #[cfg(any(test, feature = "test-support"))]
1511 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1512 self.sync_parse_timeout = timeout;
1513 }
1514
1515 /// Called after an edit to synchronize the buffer's main parse tree with
1516 /// the buffer's new underlying state.
1517 ///
1518 /// Locks the syntax map and interpolates the edits since the last reparse
1519 /// into the foreground syntax tree.
1520 ///
1521 /// Then takes a stable snapshot of the syntax map before unlocking it.
1522 /// The snapshot with the interpolated edits is sent to a background thread,
1523 /// where we ask Tree-sitter to perform an incremental parse.
1524 ///
1525 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1526 /// waiting on the parse to complete. As soon as it completes, we proceed
1527 /// synchronously, unless a 1ms timeout elapses.
1528 ///
1529 /// If we time out waiting on the parse, we spawn a second task waiting
1530 /// until the parse does complete and return with the interpolated tree still
1531 /// in the foreground. When the background parse completes, call back into
1532 /// the main thread and assign the foreground parse state.
1533 ///
1534 /// If the buffer or grammar changed since the start of the background parse,
1535 /// initiate an additional reparse recursively. To avoid concurrent parses
1536 /// for the same buffer, we only initiate a new parse if we are not already
1537 /// parsing in the background.
1538 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1539 if self.reparse.is_some() {
1540 return;
1541 }
1542 let language = if let Some(language) = self.language.clone() {
1543 language
1544 } else {
1545 return;
1546 };
1547
1548 let text = self.text_snapshot();
1549 let parsed_version = self.version();
1550
1551 let mut syntax_map = self.syntax_map.lock();
1552 syntax_map.interpolate(&text);
1553 let language_registry = syntax_map.language_registry();
1554 let mut syntax_snapshot = syntax_map.snapshot();
1555 drop(syntax_map);
1556
1557 let parse_task = cx.background_spawn({
1558 let language = language.clone();
1559 let language_registry = language_registry.clone();
1560 async move {
1561 syntax_snapshot.reparse(&text, language_registry, language);
1562 syntax_snapshot
1563 }
1564 });
1565
1566 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1567 match cx
1568 .background_executor()
1569 .block_with_timeout(self.sync_parse_timeout, parse_task)
1570 {
1571 Ok(new_syntax_snapshot) => {
1572 self.did_finish_parsing(new_syntax_snapshot, cx);
1573 self.reparse = None;
1574 }
1575 Err(parse_task) => {
1576 self.reparse = Some(cx.spawn(async move |this, cx| {
1577 let new_syntax_map = parse_task.await;
1578 this.update(cx, move |this, cx| {
1579 let grammar_changed =
1580 this.language.as_ref().is_none_or(|current_language| {
1581 !Arc::ptr_eq(&language, current_language)
1582 });
1583 let language_registry_changed = new_syntax_map
1584 .contains_unknown_injections()
1585 && language_registry.is_some_and(|registry| {
1586 registry.version() != new_syntax_map.language_registry_version()
1587 });
1588 let parse_again = language_registry_changed
1589 || grammar_changed
1590 || this.version.changed_since(&parsed_version);
1591 this.did_finish_parsing(new_syntax_map, cx);
1592 this.reparse = None;
1593 if parse_again {
1594 this.reparse(cx);
1595 }
1596 })
1597 .ok();
1598 }));
1599 }
1600 }
1601 }
1602
1603 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1604 self.was_changed();
1605 self.non_text_state_update_count += 1;
1606 self.syntax_map.lock().did_parse(syntax_snapshot);
1607 self.request_autoindent(cx);
1608 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1609 cx.emit(BufferEvent::Reparsed);
1610 cx.notify();
1611 }
1612
1613 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1614 self.parse_status.1.clone()
1615 }
1616
1617 /// Assign to the buffer a set of diagnostics created by a given language server.
1618 pub fn update_diagnostics(
1619 &mut self,
1620 server_id: LanguageServerId,
1621 diagnostics: DiagnosticSet,
1622 cx: &mut Context<Self>,
1623 ) {
1624 let lamport_timestamp = self.text.lamport_clock.tick();
1625 let op = Operation::UpdateDiagnostics {
1626 server_id,
1627 diagnostics: diagnostics.iter().cloned().collect(),
1628 lamport_timestamp,
1629 };
1630
1631 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1632 self.send_operation(op, true, cx);
1633 }
1634
1635 pub fn buffer_diagnostics(
1636 &self,
1637 for_server: Option<LanguageServerId>,
1638 ) -> Vec<&DiagnosticEntry<Anchor>> {
1639 match for_server {
1640 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1641 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1642 Err(_) => Vec::new(),
1643 },
1644 None => self
1645 .diagnostics
1646 .iter()
1647 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1648 .collect(),
1649 }
1650 }
1651
1652 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1653 if let Some(indent_sizes) = self.compute_autoindents() {
1654 let indent_sizes = cx.background_spawn(indent_sizes);
1655 match cx
1656 .background_executor()
1657 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1658 {
1659 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1660 Err(indent_sizes) => {
1661 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1662 let indent_sizes = indent_sizes.await;
1663 this.update(cx, |this, cx| {
1664 this.apply_autoindents(indent_sizes, cx);
1665 })
1666 .ok();
1667 }));
1668 }
1669 }
1670 } else {
1671 self.autoindent_requests.clear();
1672 for tx in self.wait_for_autoindent_txs.drain(..) {
1673 tx.send(()).ok();
1674 }
1675 }
1676 }
1677
1678 fn compute_autoindents(
1679 &self,
1680 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1681 let max_rows_between_yields = 100;
1682 let snapshot = self.snapshot();
1683 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1684 return None;
1685 }
1686
1687 let autoindent_requests = self.autoindent_requests.clone();
1688 Some(async move {
1689 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1690 for request in autoindent_requests {
1691 // Resolve each edited range to its row in the current buffer and in the
1692 // buffer before this batch of edits.
1693 let mut row_ranges = Vec::new();
1694 let mut old_to_new_rows = BTreeMap::new();
1695 let mut language_indent_sizes_by_new_row = Vec::new();
1696 for entry in &request.entries {
1697 let position = entry.range.start;
1698 let new_row = position.to_point(&snapshot).row;
1699 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1700 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1701
1702 if !entry.first_line_is_new {
1703 let old_row = position.to_point(&request.before_edit).row;
1704 old_to_new_rows.insert(old_row, new_row);
1705 }
1706 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1707 }
1708
1709 // Build a map containing the suggested indentation for each of the edited lines
1710 // with respect to the state of the buffer before these edits. This map is keyed
1711 // by the rows for these lines in the current state of the buffer.
1712 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1713 let old_edited_ranges =
1714 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1715 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1716 let mut language_indent_size = IndentSize::default();
1717 for old_edited_range in old_edited_ranges {
1718 let suggestions = request
1719 .before_edit
1720 .suggest_autoindents(old_edited_range.clone())
1721 .into_iter()
1722 .flatten();
1723 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1724 if let Some(suggestion) = suggestion {
1725 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1726
1727 // Find the indent size based on the language for this row.
1728 while let Some((row, size)) = language_indent_sizes.peek() {
1729 if *row > new_row {
1730 break;
1731 }
1732 language_indent_size = *size;
1733 language_indent_sizes.next();
1734 }
1735
1736 let suggested_indent = old_to_new_rows
1737 .get(&suggestion.basis_row)
1738 .and_then(|from_row| {
1739 Some(old_suggestions.get(from_row).copied()?.0)
1740 })
1741 .unwrap_or_else(|| {
1742 request
1743 .before_edit
1744 .indent_size_for_line(suggestion.basis_row)
1745 })
1746 .with_delta(suggestion.delta, language_indent_size);
1747 old_suggestions
1748 .insert(new_row, (suggested_indent, suggestion.within_error));
1749 }
1750 }
1751 yield_now().await;
1752 }
1753
1754 // Compute new suggestions for each line, but only include them in the result
1755 // if they differ from the old suggestion for that line.
1756 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1757 let mut language_indent_size = IndentSize::default();
1758 for (row_range, original_indent_column) in row_ranges {
1759 let new_edited_row_range = if request.is_block_mode {
1760 row_range.start..row_range.start + 1
1761 } else {
1762 row_range.clone()
1763 };
1764
1765 let suggestions = snapshot
1766 .suggest_autoindents(new_edited_row_range.clone())
1767 .into_iter()
1768 .flatten();
1769 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1770 if let Some(suggestion) = suggestion {
1771 // Find the indent size based on the language for this row.
1772 while let Some((row, size)) = language_indent_sizes.peek() {
1773 if *row > new_row {
1774 break;
1775 }
1776 language_indent_size = *size;
1777 language_indent_sizes.next();
1778 }
1779
1780 let suggested_indent = indent_sizes
1781 .get(&suggestion.basis_row)
1782 .copied()
1783 .map(|e| e.0)
1784 .unwrap_or_else(|| {
1785 snapshot.indent_size_for_line(suggestion.basis_row)
1786 })
1787 .with_delta(suggestion.delta, language_indent_size);
1788
1789 if old_suggestions.get(&new_row).is_none_or(
1790 |(old_indentation, was_within_error)| {
1791 suggested_indent != *old_indentation
1792 && (!suggestion.within_error || *was_within_error)
1793 },
1794 ) {
1795 indent_sizes.insert(
1796 new_row,
1797 (suggested_indent, request.ignore_empty_lines),
1798 );
1799 }
1800 }
1801 }
1802
1803 if let (true, Some(original_indent_column)) =
1804 (request.is_block_mode, original_indent_column)
1805 {
1806 let new_indent =
1807 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1808 *indent
1809 } else {
1810 snapshot.indent_size_for_line(row_range.start)
1811 };
1812 let delta = new_indent.len as i64 - original_indent_column as i64;
1813 if delta != 0 {
1814 for row in row_range.skip(1) {
1815 indent_sizes.entry(row).or_insert_with(|| {
1816 let mut size = snapshot.indent_size_for_line(row);
1817 if size.kind == new_indent.kind {
1818 match delta.cmp(&0) {
1819 Ordering::Greater => size.len += delta as u32,
1820 Ordering::Less => {
1821 size.len = size.len.saturating_sub(-delta as u32)
1822 }
1823 Ordering::Equal => {}
1824 }
1825 }
1826 (size, request.ignore_empty_lines)
1827 });
1828 }
1829 }
1830 }
1831
1832 yield_now().await;
1833 }
1834 }
1835
1836 indent_sizes
1837 .into_iter()
1838 .filter_map(|(row, (indent, ignore_empty_lines))| {
1839 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1840 None
1841 } else {
1842 Some((row, indent))
1843 }
1844 })
1845 .collect()
1846 })
1847 }
1848
1849 fn apply_autoindents(
1850 &mut self,
1851 indent_sizes: BTreeMap<u32, IndentSize>,
1852 cx: &mut Context<Self>,
1853 ) {
1854 self.autoindent_requests.clear();
1855 for tx in self.wait_for_autoindent_txs.drain(..) {
1856 tx.send(()).ok();
1857 }
1858
1859 let edits: Vec<_> = indent_sizes
1860 .into_iter()
1861 .filter_map(|(row, indent_size)| {
1862 let current_size = indent_size_for_line(self, row);
1863 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1864 })
1865 .collect();
1866
1867 let preserve_preview = self.preserve_preview();
1868 self.edit(edits, None, cx);
1869 if preserve_preview {
1870 self.refresh_preview();
1871 }
1872 }
1873
1874 /// Create a minimal edit that will cause the given row to be indented
1875 /// with the given size. After applying this edit, the length of the line
1876 /// will always be at least `new_size.len`.
1877 pub fn edit_for_indent_size_adjustment(
1878 row: u32,
1879 current_size: IndentSize,
1880 new_size: IndentSize,
1881 ) -> Option<(Range<Point>, String)> {
1882 if new_size.kind == current_size.kind {
1883 match new_size.len.cmp(¤t_size.len) {
1884 Ordering::Greater => {
1885 let point = Point::new(row, 0);
1886 Some((
1887 point..point,
1888 iter::repeat(new_size.char())
1889 .take((new_size.len - current_size.len) as usize)
1890 .collect::<String>(),
1891 ))
1892 }
1893
1894 Ordering::Less => Some((
1895 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1896 String::new(),
1897 )),
1898
1899 Ordering::Equal => None,
1900 }
1901 } else {
1902 Some((
1903 Point::new(row, 0)..Point::new(row, current_size.len),
1904 iter::repeat(new_size.char())
1905 .take(new_size.len as usize)
1906 .collect::<String>(),
1907 ))
1908 }
1909 }
1910
1911 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1912 /// and the given new text.
1913 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1914 let old_text = self.as_rope().clone();
1915 let base_version = self.version();
1916 cx.background_executor()
1917 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1918 let old_text = old_text.to_string();
1919 let line_ending = LineEnding::detect(&new_text);
1920 LineEnding::normalize(&mut new_text);
1921 let edits = text_diff(&old_text, &new_text);
1922 Diff {
1923 base_version,
1924 line_ending,
1925 edits,
1926 }
1927 })
1928 }
1929
1930 /// Spawns a background task that searches the buffer for any whitespace
1931 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1932 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1933 let old_text = self.as_rope().clone();
1934 let line_ending = self.line_ending();
1935 let base_version = self.version();
1936 cx.background_spawn(async move {
1937 let ranges = trailing_whitespace_ranges(&old_text);
1938 let empty = Arc::<str>::from("");
1939 Diff {
1940 base_version,
1941 line_ending,
1942 edits: ranges
1943 .into_iter()
1944 .map(|range| (range, empty.clone()))
1945 .collect(),
1946 }
1947 })
1948 }
1949
1950 /// Ensures that the buffer ends with a single newline character, and
1951 /// no other whitespace. Skips if the buffer is empty.
1952 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1953 let len = self.len();
1954 if len == 0 {
1955 return;
1956 }
1957 let mut offset = len;
1958 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1959 let non_whitespace_len = chunk
1960 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1961 .len();
1962 offset -= chunk.len();
1963 offset += non_whitespace_len;
1964 if non_whitespace_len != 0 {
1965 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1966 return;
1967 }
1968 break;
1969 }
1970 }
1971 self.edit([(offset..len, "\n")], None, cx);
1972 }
1973
1974 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1975 /// calculated, then adjust the diff to account for those changes, and discard any
1976 /// parts of the diff that conflict with those changes.
1977 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1978 let snapshot = self.snapshot();
1979 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1980 let mut delta = 0;
1981 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1982 while let Some(edit_since) = edits_since.peek() {
1983 // If the edit occurs after a diff hunk, then it does not
1984 // affect that hunk.
1985 if edit_since.old.start > range.end {
1986 break;
1987 }
1988 // If the edit precedes the diff hunk, then adjust the hunk
1989 // to reflect the edit.
1990 else if edit_since.old.end < range.start {
1991 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1992 edits_since.next();
1993 }
1994 // If the edit intersects a diff hunk, then discard that hunk.
1995 else {
1996 return None;
1997 }
1998 }
1999
2000 let start = (range.start as i64 + delta) as usize;
2001 let end = (range.end as i64 + delta) as usize;
2002 Some((start..end, new_text))
2003 });
2004
2005 self.start_transaction();
2006 self.text.set_line_ending(diff.line_ending);
2007 self.edit(adjusted_edits, None, cx);
2008 self.end_transaction(cx)
2009 }
2010
2011 pub fn has_unsaved_edits(&self) -> bool {
2012 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2013
2014 if last_version == self.version {
2015 self.has_unsaved_edits
2016 .set((last_version, has_unsaved_edits));
2017 return has_unsaved_edits;
2018 }
2019
2020 let has_edits = self.has_edits_since(&self.saved_version);
2021 self.has_unsaved_edits
2022 .set((self.version.clone(), has_edits));
2023 has_edits
2024 }
2025
2026 /// Checks if the buffer has unsaved changes.
2027 pub fn is_dirty(&self) -> bool {
2028 if self.capability == Capability::ReadOnly {
2029 return false;
2030 }
2031 if self.has_conflict {
2032 return true;
2033 }
2034 match self.file.as_ref().map(|f| f.disk_state()) {
2035 Some(DiskState::New) | Some(DiskState::Deleted) => {
2036 !self.is_empty() && self.has_unsaved_edits()
2037 }
2038 _ => self.has_unsaved_edits(),
2039 }
2040 }
2041
2042 /// Checks if the buffer and its file have both changed since the buffer
2043 /// was last saved or reloaded.
2044 pub fn has_conflict(&self) -> bool {
2045 if self.has_conflict {
2046 return true;
2047 }
2048 let Some(file) = self.file.as_ref() else {
2049 return false;
2050 };
2051 match file.disk_state() {
2052 DiskState::New => false,
2053 DiskState::Present { mtime } => match self.saved_mtime {
2054 Some(saved_mtime) => {
2055 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2056 }
2057 None => true,
2058 },
2059 DiskState::Deleted => false,
2060 }
2061 }
2062
2063 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2064 pub fn subscribe(&mut self) -> Subscription {
2065 self.text.subscribe()
2066 }
2067
2068 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2069 ///
2070 /// This allows downstream code to check if the buffer's text has changed without
2071 /// waiting for an effect cycle, which would be required if using eents.
2072 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2073 if let Err(ix) = self
2074 .change_bits
2075 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2076 {
2077 self.change_bits.insert(ix, bit);
2078 }
2079 }
2080
2081 /// Set the change bit for all "listeners".
2082 fn was_changed(&mut self) {
2083 self.change_bits.retain(|change_bit| {
2084 change_bit
2085 .upgrade()
2086 .inspect(|bit| {
2087 _ = bit.replace(true);
2088 })
2089 .is_some()
2090 });
2091 }
2092
2093 /// Starts a transaction, if one is not already in-progress. When undoing or
2094 /// redoing edits, all of the edits performed within a transaction are undone
2095 /// or redone together.
2096 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2097 self.start_transaction_at(Instant::now())
2098 }
2099
2100 /// Starts a transaction, providing the current time. Subsequent transactions
2101 /// that occur within a short period of time will be grouped together. This
2102 /// is controlled by the buffer's undo grouping duration.
2103 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2104 self.transaction_depth += 1;
2105 if self.was_dirty_before_starting_transaction.is_none() {
2106 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2107 }
2108 self.text.start_transaction_at(now)
2109 }
2110
2111 /// Terminates the current transaction, if this is the outermost transaction.
2112 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2113 self.end_transaction_at(Instant::now(), cx)
2114 }
2115
2116 /// Terminates the current transaction, providing the current time. Subsequent transactions
2117 /// that occur within a short period of time will be grouped together. This
2118 /// is controlled by the buffer's undo grouping duration.
2119 pub fn end_transaction_at(
2120 &mut self,
2121 now: Instant,
2122 cx: &mut Context<Self>,
2123 ) -> Option<TransactionId> {
2124 assert!(self.transaction_depth > 0);
2125 self.transaction_depth -= 1;
2126 let was_dirty = if self.transaction_depth == 0 {
2127 self.was_dirty_before_starting_transaction.take().unwrap()
2128 } else {
2129 false
2130 };
2131 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2132 self.did_edit(&start_version, was_dirty, cx);
2133 Some(transaction_id)
2134 } else {
2135 None
2136 }
2137 }
2138
2139 /// Manually add a transaction to the buffer's undo history.
2140 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2141 self.text.push_transaction(transaction, now);
2142 }
2143
2144 /// Differs from `push_transaction` in that it does not clear the redo
2145 /// stack. Intended to be used to create a parent transaction to merge
2146 /// potential child transactions into.
2147 ///
2148 /// The caller is responsible for removing it from the undo history using
2149 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2150 /// are merged into this transaction, the caller is responsible for ensuring
2151 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2152 /// cleared is to create transactions with the usual `start_transaction` and
2153 /// `end_transaction` methods and merging the resulting transactions into
2154 /// the transaction created by this method
2155 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2156 self.text.push_empty_transaction(now)
2157 }
2158
2159 /// Prevent the last transaction from being grouped with any subsequent transactions,
2160 /// even if they occur with the buffer's undo grouping duration.
2161 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2162 self.text.finalize_last_transaction()
2163 }
2164
2165 /// Manually group all changes since a given transaction.
2166 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2167 self.text.group_until_transaction(transaction_id);
2168 }
2169
2170 /// Manually remove a transaction from the buffer's undo history
2171 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2172 self.text.forget_transaction(transaction_id)
2173 }
2174
2175 /// Retrieve a transaction from the buffer's undo history
2176 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2177 self.text.get_transaction(transaction_id)
2178 }
2179
2180 /// Manually merge two transactions in the buffer's undo history.
2181 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2182 self.text.merge_transactions(transaction, destination);
2183 }
2184
2185 /// Waits for the buffer to receive operations with the given timestamps.
2186 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2187 &mut self,
2188 edit_ids: It,
2189 ) -> impl Future<Output = Result<()>> + use<It> {
2190 self.text.wait_for_edits(edit_ids)
2191 }
2192
2193 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2194 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2195 &mut self,
2196 anchors: It,
2197 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2198 self.text.wait_for_anchors(anchors)
2199 }
2200
2201 /// Waits for the buffer to receive operations up to the given version.
2202 pub fn wait_for_version(
2203 &mut self,
2204 version: clock::Global,
2205 ) -> impl Future<Output = Result<()>> + use<> {
2206 self.text.wait_for_version(version)
2207 }
2208
2209 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2210 /// [`Buffer::wait_for_version`] to resolve with an error.
2211 pub fn give_up_waiting(&mut self) {
2212 self.text.give_up_waiting();
2213 }
2214
2215 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2216 let mut rx = None;
2217 if !self.autoindent_requests.is_empty() {
2218 let channel = oneshot::channel();
2219 self.wait_for_autoindent_txs.push(channel.0);
2220 rx = Some(channel.1);
2221 }
2222 rx
2223 }
2224
2225 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2226 pub fn set_active_selections(
2227 &mut self,
2228 selections: Arc<[Selection<Anchor>]>,
2229 line_mode: bool,
2230 cursor_shape: CursorShape,
2231 cx: &mut Context<Self>,
2232 ) {
2233 let lamport_timestamp = self.text.lamport_clock.tick();
2234 self.remote_selections.insert(
2235 self.text.replica_id(),
2236 SelectionSet {
2237 selections: selections.clone(),
2238 lamport_timestamp,
2239 line_mode,
2240 cursor_shape,
2241 },
2242 );
2243 self.send_operation(
2244 Operation::UpdateSelections {
2245 selections,
2246 line_mode,
2247 lamport_timestamp,
2248 cursor_shape,
2249 },
2250 true,
2251 cx,
2252 );
2253 self.non_text_state_update_count += 1;
2254 cx.notify();
2255 }
2256
2257 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2258 /// this replica.
2259 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2260 if self
2261 .remote_selections
2262 .get(&self.text.replica_id())
2263 .is_none_or(|set| !set.selections.is_empty())
2264 {
2265 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2266 }
2267 }
2268
2269 pub fn set_agent_selections(
2270 &mut self,
2271 selections: Arc<[Selection<Anchor>]>,
2272 line_mode: bool,
2273 cursor_shape: CursorShape,
2274 cx: &mut Context<Self>,
2275 ) {
2276 let lamport_timestamp = self.text.lamport_clock.tick();
2277 self.remote_selections.insert(
2278 ReplicaId::AGENT,
2279 SelectionSet {
2280 selections,
2281 lamport_timestamp,
2282 line_mode,
2283 cursor_shape,
2284 },
2285 );
2286 self.non_text_state_update_count += 1;
2287 cx.notify();
2288 }
2289
2290 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2291 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2292 }
2293
2294 /// Replaces the buffer's entire text.
2295 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2296 where
2297 T: Into<Arc<str>>,
2298 {
2299 self.autoindent_requests.clear();
2300 self.edit([(0..self.len(), text)], None, cx)
2301 }
2302
2303 /// Appends the given text to the end of the buffer.
2304 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2305 where
2306 T: Into<Arc<str>>,
2307 {
2308 self.edit([(self.len()..self.len(), text)], None, cx)
2309 }
2310
2311 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2312 /// delete, and a string of text to insert at that location.
2313 ///
2314 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2315 /// request for the edited ranges, which will be processed when the buffer finishes
2316 /// parsing.
2317 ///
2318 /// Parsing takes place at the end of a transaction, and may compute synchronously
2319 /// or asynchronously, depending on the changes.
2320 pub fn edit<I, S, T>(
2321 &mut self,
2322 edits_iter: I,
2323 autoindent_mode: Option<AutoindentMode>,
2324 cx: &mut Context<Self>,
2325 ) -> Option<clock::Lamport>
2326 where
2327 I: IntoIterator<Item = (Range<S>, T)>,
2328 S: ToOffset,
2329 T: Into<Arc<str>>,
2330 {
2331 // Skip invalid edits and coalesce contiguous ones.
2332 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2333
2334 for (range, new_text) in edits_iter {
2335 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2336
2337 if range.start > range.end {
2338 mem::swap(&mut range.start, &mut range.end);
2339 }
2340 let new_text = new_text.into();
2341 if !new_text.is_empty() || !range.is_empty() {
2342 if let Some((prev_range, prev_text)) = edits.last_mut()
2343 && prev_range.end >= range.start
2344 {
2345 prev_range.end = cmp::max(prev_range.end, range.end);
2346 *prev_text = format!("{prev_text}{new_text}").into();
2347 } else {
2348 edits.push((range, new_text));
2349 }
2350 }
2351 }
2352 if edits.is_empty() {
2353 return None;
2354 }
2355
2356 self.start_transaction();
2357 self.pending_autoindent.take();
2358 let autoindent_request = autoindent_mode
2359 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2360
2361 let edit_operation = self.text.edit(edits.iter().cloned());
2362 let edit_id = edit_operation.timestamp();
2363
2364 if let Some((before_edit, mode)) = autoindent_request {
2365 let mut delta = 0isize;
2366 let mut previous_setting = None;
2367 let entries: Vec<_> = edits
2368 .into_iter()
2369 .enumerate()
2370 .zip(&edit_operation.as_edit().unwrap().new_text)
2371 .filter(|((_, (range, _)), _)| {
2372 let language = before_edit.language_at(range.start);
2373 let language_id = language.map(|l| l.id());
2374 if let Some((cached_language_id, auto_indent)) = previous_setting
2375 && cached_language_id == language_id
2376 {
2377 auto_indent
2378 } else {
2379 // The auto-indent setting is not present in editorconfigs, hence
2380 // we can avoid passing the file here.
2381 let auto_indent =
2382 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2383 previous_setting = Some((language_id, auto_indent));
2384 auto_indent
2385 }
2386 })
2387 .map(|((ix, (range, _)), new_text)| {
2388 let new_text_length = new_text.len();
2389 let old_start = range.start.to_point(&before_edit);
2390 let new_start = (delta + range.start as isize) as usize;
2391 let range_len = range.end - range.start;
2392 delta += new_text_length as isize - range_len as isize;
2393
2394 // Decide what range of the insertion to auto-indent, and whether
2395 // the first line of the insertion should be considered a newly-inserted line
2396 // or an edit to an existing line.
2397 let mut range_of_insertion_to_indent = 0..new_text_length;
2398 let mut first_line_is_new = true;
2399
2400 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2401 let old_line_end = before_edit.line_len(old_start.row);
2402
2403 if old_start.column > old_line_start {
2404 first_line_is_new = false;
2405 }
2406
2407 if !new_text.contains('\n')
2408 && (old_start.column + (range_len as u32) < old_line_end
2409 || old_line_end == old_line_start)
2410 {
2411 first_line_is_new = false;
2412 }
2413
2414 // When inserting text starting with a newline, avoid auto-indenting the
2415 // previous line.
2416 if new_text.starts_with('\n') {
2417 range_of_insertion_to_indent.start += 1;
2418 first_line_is_new = true;
2419 }
2420
2421 let mut original_indent_column = None;
2422 if let AutoindentMode::Block {
2423 original_indent_columns,
2424 } = &mode
2425 {
2426 original_indent_column = Some(if new_text.starts_with('\n') {
2427 indent_size_for_text(
2428 new_text[range_of_insertion_to_indent.clone()].chars(),
2429 )
2430 .len
2431 } else {
2432 original_indent_columns
2433 .get(ix)
2434 .copied()
2435 .flatten()
2436 .unwrap_or_else(|| {
2437 indent_size_for_text(
2438 new_text[range_of_insertion_to_indent.clone()].chars(),
2439 )
2440 .len
2441 })
2442 });
2443
2444 // Avoid auto-indenting the line after the edit.
2445 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2446 range_of_insertion_to_indent.end -= 1;
2447 }
2448 }
2449
2450 AutoindentRequestEntry {
2451 first_line_is_new,
2452 original_indent_column,
2453 indent_size: before_edit.language_indent_size_at(range.start, cx),
2454 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2455 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2456 }
2457 })
2458 .collect();
2459
2460 if !entries.is_empty() {
2461 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2462 before_edit,
2463 entries,
2464 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2465 ignore_empty_lines: false,
2466 }));
2467 }
2468 }
2469
2470 self.end_transaction(cx);
2471 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2472 Some(edit_id)
2473 }
2474
2475 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2476 self.was_changed();
2477
2478 if self.edits_since::<usize>(old_version).next().is_none() {
2479 return;
2480 }
2481
2482 self.reparse(cx);
2483 cx.emit(BufferEvent::Edited);
2484 if was_dirty != self.is_dirty() {
2485 cx.emit(BufferEvent::DirtyChanged);
2486 }
2487 cx.notify();
2488 }
2489
2490 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2491 where
2492 I: IntoIterator<Item = Range<T>>,
2493 T: ToOffset + Copy,
2494 {
2495 let before_edit = self.snapshot();
2496 let entries = ranges
2497 .into_iter()
2498 .map(|range| AutoindentRequestEntry {
2499 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2500 first_line_is_new: true,
2501 indent_size: before_edit.language_indent_size_at(range.start, cx),
2502 original_indent_column: None,
2503 })
2504 .collect();
2505 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2506 before_edit,
2507 entries,
2508 is_block_mode: false,
2509 ignore_empty_lines: true,
2510 }));
2511 self.request_autoindent(cx);
2512 }
2513
2514 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2515 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2516 pub fn insert_empty_line(
2517 &mut self,
2518 position: impl ToPoint,
2519 space_above: bool,
2520 space_below: bool,
2521 cx: &mut Context<Self>,
2522 ) -> Point {
2523 let mut position = position.to_point(self);
2524
2525 self.start_transaction();
2526
2527 self.edit(
2528 [(position..position, "\n")],
2529 Some(AutoindentMode::EachLine),
2530 cx,
2531 );
2532
2533 if position.column > 0 {
2534 position += Point::new(1, 0);
2535 }
2536
2537 if !self.is_line_blank(position.row) {
2538 self.edit(
2539 [(position..position, "\n")],
2540 Some(AutoindentMode::EachLine),
2541 cx,
2542 );
2543 }
2544
2545 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2546 self.edit(
2547 [(position..position, "\n")],
2548 Some(AutoindentMode::EachLine),
2549 cx,
2550 );
2551 position.row += 1;
2552 }
2553
2554 if space_below
2555 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2556 {
2557 self.edit(
2558 [(position..position, "\n")],
2559 Some(AutoindentMode::EachLine),
2560 cx,
2561 );
2562 }
2563
2564 self.end_transaction(cx);
2565
2566 position
2567 }
2568
2569 /// Applies the given remote operations to the buffer.
2570 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2571 self.pending_autoindent.take();
2572 let was_dirty = self.is_dirty();
2573 let old_version = self.version.clone();
2574 let mut deferred_ops = Vec::new();
2575 let buffer_ops = ops
2576 .into_iter()
2577 .filter_map(|op| match op {
2578 Operation::Buffer(op) => Some(op),
2579 _ => {
2580 if self.can_apply_op(&op) {
2581 self.apply_op(op, cx);
2582 } else {
2583 deferred_ops.push(op);
2584 }
2585 None
2586 }
2587 })
2588 .collect::<Vec<_>>();
2589 for operation in buffer_ops.iter() {
2590 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2591 }
2592 self.text.apply_ops(buffer_ops);
2593 self.deferred_ops.insert(deferred_ops);
2594 self.flush_deferred_ops(cx);
2595 self.did_edit(&old_version, was_dirty, cx);
2596 // Notify independently of whether the buffer was edited as the operations could include a
2597 // selection update.
2598 cx.notify();
2599 }
2600
2601 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2602 let mut deferred_ops = Vec::new();
2603 for op in self.deferred_ops.drain().iter().cloned() {
2604 if self.can_apply_op(&op) {
2605 self.apply_op(op, cx);
2606 } else {
2607 deferred_ops.push(op);
2608 }
2609 }
2610 self.deferred_ops.insert(deferred_ops);
2611 }
2612
2613 pub fn has_deferred_ops(&self) -> bool {
2614 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2615 }
2616
2617 fn can_apply_op(&self, operation: &Operation) -> bool {
2618 match operation {
2619 Operation::Buffer(_) => {
2620 unreachable!("buffer operations should never be applied at this layer")
2621 }
2622 Operation::UpdateDiagnostics {
2623 diagnostics: diagnostic_set,
2624 ..
2625 } => diagnostic_set.iter().all(|diagnostic| {
2626 self.text.can_resolve(&diagnostic.range.start)
2627 && self.text.can_resolve(&diagnostic.range.end)
2628 }),
2629 Operation::UpdateSelections { selections, .. } => selections
2630 .iter()
2631 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2632 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2633 }
2634 }
2635
2636 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2637 match operation {
2638 Operation::Buffer(_) => {
2639 unreachable!("buffer operations should never be applied at this layer")
2640 }
2641 Operation::UpdateDiagnostics {
2642 server_id,
2643 diagnostics: diagnostic_set,
2644 lamport_timestamp,
2645 } => {
2646 let snapshot = self.snapshot();
2647 self.apply_diagnostic_update(
2648 server_id,
2649 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2650 lamport_timestamp,
2651 cx,
2652 );
2653 }
2654 Operation::UpdateSelections {
2655 selections,
2656 lamport_timestamp,
2657 line_mode,
2658 cursor_shape,
2659 } => {
2660 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2661 && set.lamport_timestamp > lamport_timestamp
2662 {
2663 return;
2664 }
2665
2666 self.remote_selections.insert(
2667 lamport_timestamp.replica_id,
2668 SelectionSet {
2669 selections,
2670 lamport_timestamp,
2671 line_mode,
2672 cursor_shape,
2673 },
2674 );
2675 self.text.lamport_clock.observe(lamport_timestamp);
2676 self.non_text_state_update_count += 1;
2677 }
2678 Operation::UpdateCompletionTriggers {
2679 triggers,
2680 lamport_timestamp,
2681 server_id,
2682 } => {
2683 if triggers.is_empty() {
2684 self.completion_triggers_per_language_server
2685 .remove(&server_id);
2686 self.completion_triggers = self
2687 .completion_triggers_per_language_server
2688 .values()
2689 .flat_map(|triggers| triggers.iter().cloned())
2690 .collect();
2691 } else {
2692 self.completion_triggers_per_language_server
2693 .insert(server_id, triggers.iter().cloned().collect());
2694 self.completion_triggers.extend(triggers);
2695 }
2696 self.text.lamport_clock.observe(lamport_timestamp);
2697 }
2698 Operation::UpdateLineEnding {
2699 line_ending,
2700 lamport_timestamp,
2701 } => {
2702 self.text.set_line_ending(line_ending);
2703 self.text.lamport_clock.observe(lamport_timestamp);
2704 }
2705 }
2706 }
2707
2708 fn apply_diagnostic_update(
2709 &mut self,
2710 server_id: LanguageServerId,
2711 diagnostics: DiagnosticSet,
2712 lamport_timestamp: clock::Lamport,
2713 cx: &mut Context<Self>,
2714 ) {
2715 if lamport_timestamp > self.diagnostics_timestamp {
2716 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2717 if diagnostics.is_empty() {
2718 if let Ok(ix) = ix {
2719 self.diagnostics.remove(ix);
2720 }
2721 } else {
2722 match ix {
2723 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2724 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2725 };
2726 }
2727 self.diagnostics_timestamp = lamport_timestamp;
2728 self.non_text_state_update_count += 1;
2729 self.text.lamport_clock.observe(lamport_timestamp);
2730 cx.notify();
2731 cx.emit(BufferEvent::DiagnosticsUpdated);
2732 }
2733 }
2734
2735 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2736 self.was_changed();
2737 cx.emit(BufferEvent::Operation {
2738 operation,
2739 is_local,
2740 });
2741 }
2742
2743 /// Removes the selections for a given peer.
2744 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2745 self.remote_selections.remove(&replica_id);
2746 cx.notify();
2747 }
2748
2749 /// Undoes the most recent transaction.
2750 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2751 let was_dirty = self.is_dirty();
2752 let old_version = self.version.clone();
2753
2754 if let Some((transaction_id, operation)) = self.text.undo() {
2755 self.send_operation(Operation::Buffer(operation), true, cx);
2756 self.did_edit(&old_version, was_dirty, cx);
2757 Some(transaction_id)
2758 } else {
2759 None
2760 }
2761 }
2762
2763 /// Manually undoes a specific transaction in the buffer's undo history.
2764 pub fn undo_transaction(
2765 &mut self,
2766 transaction_id: TransactionId,
2767 cx: &mut Context<Self>,
2768 ) -> bool {
2769 let was_dirty = self.is_dirty();
2770 let old_version = self.version.clone();
2771 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2772 self.send_operation(Operation::Buffer(operation), true, cx);
2773 self.did_edit(&old_version, was_dirty, cx);
2774 true
2775 } else {
2776 false
2777 }
2778 }
2779
2780 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2781 pub fn undo_to_transaction(
2782 &mut self,
2783 transaction_id: TransactionId,
2784 cx: &mut Context<Self>,
2785 ) -> bool {
2786 let was_dirty = self.is_dirty();
2787 let old_version = self.version.clone();
2788
2789 let operations = self.text.undo_to_transaction(transaction_id);
2790 let undone = !operations.is_empty();
2791 for operation in operations {
2792 self.send_operation(Operation::Buffer(operation), true, cx);
2793 }
2794 if undone {
2795 self.did_edit(&old_version, was_dirty, cx)
2796 }
2797 undone
2798 }
2799
2800 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2801 let was_dirty = self.is_dirty();
2802 let operation = self.text.undo_operations(counts);
2803 let old_version = self.version.clone();
2804 self.send_operation(Operation::Buffer(operation), true, cx);
2805 self.did_edit(&old_version, was_dirty, cx);
2806 }
2807
2808 /// Manually redoes a specific transaction in the buffer's redo history.
2809 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2810 let was_dirty = self.is_dirty();
2811 let old_version = self.version.clone();
2812
2813 if let Some((transaction_id, operation)) = self.text.redo() {
2814 self.send_operation(Operation::Buffer(operation), true, cx);
2815 self.did_edit(&old_version, was_dirty, cx);
2816 Some(transaction_id)
2817 } else {
2818 None
2819 }
2820 }
2821
2822 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2823 pub fn redo_to_transaction(
2824 &mut self,
2825 transaction_id: TransactionId,
2826 cx: &mut Context<Self>,
2827 ) -> bool {
2828 let was_dirty = self.is_dirty();
2829 let old_version = self.version.clone();
2830
2831 let operations = self.text.redo_to_transaction(transaction_id);
2832 let redone = !operations.is_empty();
2833 for operation in operations {
2834 self.send_operation(Operation::Buffer(operation), true, cx);
2835 }
2836 if redone {
2837 self.did_edit(&old_version, was_dirty, cx)
2838 }
2839 redone
2840 }
2841
2842 /// Override current completion triggers with the user-provided completion triggers.
2843 pub fn set_completion_triggers(
2844 &mut self,
2845 server_id: LanguageServerId,
2846 triggers: BTreeSet<String>,
2847 cx: &mut Context<Self>,
2848 ) {
2849 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2850 if triggers.is_empty() {
2851 self.completion_triggers_per_language_server
2852 .remove(&server_id);
2853 self.completion_triggers = self
2854 .completion_triggers_per_language_server
2855 .values()
2856 .flat_map(|triggers| triggers.iter().cloned())
2857 .collect();
2858 } else {
2859 self.completion_triggers_per_language_server
2860 .insert(server_id, triggers.clone());
2861 self.completion_triggers.extend(triggers.iter().cloned());
2862 }
2863 self.send_operation(
2864 Operation::UpdateCompletionTriggers {
2865 triggers: triggers.into_iter().collect(),
2866 lamport_timestamp: self.completion_triggers_timestamp,
2867 server_id,
2868 },
2869 true,
2870 cx,
2871 );
2872 cx.notify();
2873 }
2874
2875 /// Returns a list of strings which trigger a completion menu for this language.
2876 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2877 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2878 &self.completion_triggers
2879 }
2880
2881 /// Call this directly after performing edits to prevent the preview tab
2882 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2883 /// to return false until there are additional edits.
2884 pub fn refresh_preview(&mut self) {
2885 self.preview_version = self.version.clone();
2886 }
2887
2888 /// Whether we should preserve the preview status of a tab containing this buffer.
2889 pub fn preserve_preview(&self) -> bool {
2890 !self.has_edits_since(&self.preview_version)
2891 }
2892}
2893
2894#[doc(hidden)]
2895#[cfg(any(test, feature = "test-support"))]
2896impl Buffer {
2897 pub fn edit_via_marked_text(
2898 &mut self,
2899 marked_string: &str,
2900 autoindent_mode: Option<AutoindentMode>,
2901 cx: &mut Context<Self>,
2902 ) {
2903 let edits = self.edits_for_marked_text(marked_string);
2904 self.edit(edits, autoindent_mode, cx);
2905 }
2906
2907 pub fn set_group_interval(&mut self, group_interval: Duration) {
2908 self.text.set_group_interval(group_interval);
2909 }
2910
2911 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2912 where
2913 T: rand::Rng,
2914 {
2915 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2916 let mut last_end = None;
2917 for _ in 0..old_range_count {
2918 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2919 break;
2920 }
2921
2922 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2923 let mut range = self.random_byte_range(new_start, rng);
2924 if rng.random_bool(0.2) {
2925 mem::swap(&mut range.start, &mut range.end);
2926 }
2927 last_end = Some(range.end);
2928
2929 let new_text_len = rng.random_range(0..10);
2930 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2931 new_text = new_text.to_uppercase();
2932
2933 edits.push((range, new_text));
2934 }
2935 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2936 self.edit(edits, None, cx);
2937 }
2938
2939 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2940 let was_dirty = self.is_dirty();
2941 let old_version = self.version.clone();
2942
2943 let ops = self.text.randomly_undo_redo(rng);
2944 if !ops.is_empty() {
2945 for op in ops {
2946 self.send_operation(Operation::Buffer(op), true, cx);
2947 self.did_edit(&old_version, was_dirty, cx);
2948 }
2949 }
2950 }
2951}
2952
2953impl EventEmitter<BufferEvent> for Buffer {}
2954
2955impl Deref for Buffer {
2956 type Target = TextBuffer;
2957
2958 fn deref(&self) -> &Self::Target {
2959 &self.text
2960 }
2961}
2962
2963impl BufferSnapshot {
2964 /// Returns [`IndentSize`] for a given line that respects user settings and
2965 /// language preferences.
2966 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2967 indent_size_for_line(self, row)
2968 }
2969
2970 /// Returns [`IndentSize`] for a given position that respects user settings
2971 /// and language preferences.
2972 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2973 let settings = language_settings(
2974 self.language_at(position).map(|l| l.name()),
2975 self.file(),
2976 cx,
2977 );
2978 if settings.hard_tabs {
2979 IndentSize::tab()
2980 } else {
2981 IndentSize::spaces(settings.tab_size.get())
2982 }
2983 }
2984
2985 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2986 /// is passed in as `single_indent_size`.
2987 pub fn suggested_indents(
2988 &self,
2989 rows: impl Iterator<Item = u32>,
2990 single_indent_size: IndentSize,
2991 ) -> BTreeMap<u32, IndentSize> {
2992 let mut result = BTreeMap::new();
2993
2994 for row_range in contiguous_ranges(rows, 10) {
2995 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2996 Some(suggestions) => suggestions,
2997 _ => break,
2998 };
2999
3000 for (row, suggestion) in row_range.zip(suggestions) {
3001 let indent_size = if let Some(suggestion) = suggestion {
3002 result
3003 .get(&suggestion.basis_row)
3004 .copied()
3005 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3006 .with_delta(suggestion.delta, single_indent_size)
3007 } else {
3008 self.indent_size_for_line(row)
3009 };
3010
3011 result.insert(row, indent_size);
3012 }
3013 }
3014
3015 result
3016 }
3017
3018 fn suggest_autoindents(
3019 &self,
3020 row_range: Range<u32>,
3021 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3022 let config = &self.language.as_ref()?.config;
3023 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3024
3025 #[derive(Debug, Clone)]
3026 struct StartPosition {
3027 start: Point,
3028 suffix: SharedString,
3029 }
3030
3031 // Find the suggested indentation ranges based on the syntax tree.
3032 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3033 let end = Point::new(row_range.end, 0);
3034 let range = (start..end).to_offset(&self.text);
3035 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3036 Some(&grammar.indents_config.as_ref()?.query)
3037 });
3038 let indent_configs = matches
3039 .grammars()
3040 .iter()
3041 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3042 .collect::<Vec<_>>();
3043
3044 let mut indent_ranges = Vec::<Range<Point>>::new();
3045 let mut start_positions = Vec::<StartPosition>::new();
3046 let mut outdent_positions = Vec::<Point>::new();
3047 while let Some(mat) = matches.peek() {
3048 let mut start: Option<Point> = None;
3049 let mut end: Option<Point> = None;
3050
3051 let config = indent_configs[mat.grammar_index];
3052 for capture in mat.captures {
3053 if capture.index == config.indent_capture_ix {
3054 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3055 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3056 } else if Some(capture.index) == config.start_capture_ix {
3057 start = Some(Point::from_ts_point(capture.node.end_position()));
3058 } else if Some(capture.index) == config.end_capture_ix {
3059 end = Some(Point::from_ts_point(capture.node.start_position()));
3060 } else if Some(capture.index) == config.outdent_capture_ix {
3061 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3062 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3063 start_positions.push(StartPosition {
3064 start: Point::from_ts_point(capture.node.start_position()),
3065 suffix: suffix.clone(),
3066 });
3067 }
3068 }
3069
3070 matches.advance();
3071 if let Some((start, end)) = start.zip(end) {
3072 if start.row == end.row {
3073 continue;
3074 }
3075 let range = start..end;
3076 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3077 Err(ix) => indent_ranges.insert(ix, range),
3078 Ok(ix) => {
3079 let prev_range = &mut indent_ranges[ix];
3080 prev_range.end = prev_range.end.max(range.end);
3081 }
3082 }
3083 }
3084 }
3085
3086 let mut error_ranges = Vec::<Range<Point>>::new();
3087 let mut matches = self
3088 .syntax
3089 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3090 while let Some(mat) = matches.peek() {
3091 let node = mat.captures[0].node;
3092 let start = Point::from_ts_point(node.start_position());
3093 let end = Point::from_ts_point(node.end_position());
3094 let range = start..end;
3095 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3096 Ok(ix) | Err(ix) => ix,
3097 };
3098 let mut end_ix = ix;
3099 while let Some(existing_range) = error_ranges.get(end_ix) {
3100 if existing_range.end < end {
3101 end_ix += 1;
3102 } else {
3103 break;
3104 }
3105 }
3106 error_ranges.splice(ix..end_ix, [range]);
3107 matches.advance();
3108 }
3109
3110 outdent_positions.sort();
3111 for outdent_position in outdent_positions {
3112 // find the innermost indent range containing this outdent_position
3113 // set its end to the outdent position
3114 if let Some(range_to_truncate) = indent_ranges
3115 .iter_mut()
3116 .filter(|indent_range| indent_range.contains(&outdent_position))
3117 .next_back()
3118 {
3119 range_to_truncate.end = outdent_position;
3120 }
3121 }
3122
3123 start_positions.sort_by_key(|b| b.start);
3124
3125 // Find the suggested indentation increases and decreased based on regexes.
3126 let mut regex_outdent_map = HashMap::default();
3127 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3128 let mut start_positions_iter = start_positions.iter().peekable();
3129
3130 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3131 self.for_each_line(
3132 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3133 ..Point::new(row_range.end, 0),
3134 |row, line| {
3135 if config
3136 .decrease_indent_pattern
3137 .as_ref()
3138 .is_some_and(|regex| regex.is_match(line))
3139 {
3140 indent_change_rows.push((row, Ordering::Less));
3141 }
3142 if config
3143 .increase_indent_pattern
3144 .as_ref()
3145 .is_some_and(|regex| regex.is_match(line))
3146 {
3147 indent_change_rows.push((row + 1, Ordering::Greater));
3148 }
3149 while let Some(pos) = start_positions_iter.peek() {
3150 if pos.start.row < row {
3151 let pos = start_positions_iter.next().unwrap();
3152 last_seen_suffix
3153 .entry(pos.suffix.to_string())
3154 .or_default()
3155 .push(pos.start);
3156 } else {
3157 break;
3158 }
3159 }
3160 for rule in &config.decrease_indent_patterns {
3161 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3162 let row_start_column = self.indent_size_for_line(row).len;
3163 let basis_row = rule
3164 .valid_after
3165 .iter()
3166 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3167 .flatten()
3168 .filter(|start_point| start_point.column <= row_start_column)
3169 .max_by_key(|start_point| start_point.row);
3170 if let Some(outdent_to_row) = basis_row {
3171 regex_outdent_map.insert(row, outdent_to_row.row);
3172 }
3173 break;
3174 }
3175 }
3176 },
3177 );
3178
3179 let mut indent_changes = indent_change_rows.into_iter().peekable();
3180 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3181 prev_non_blank_row.unwrap_or(0)
3182 } else {
3183 row_range.start.saturating_sub(1)
3184 };
3185
3186 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3187 Some(row_range.map(move |row| {
3188 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3189
3190 let mut indent_from_prev_row = false;
3191 let mut outdent_from_prev_row = false;
3192 let mut outdent_to_row = u32::MAX;
3193 let mut from_regex = false;
3194
3195 while let Some((indent_row, delta)) = indent_changes.peek() {
3196 match indent_row.cmp(&row) {
3197 Ordering::Equal => match delta {
3198 Ordering::Less => {
3199 from_regex = true;
3200 outdent_from_prev_row = true
3201 }
3202 Ordering::Greater => {
3203 indent_from_prev_row = true;
3204 from_regex = true
3205 }
3206 _ => {}
3207 },
3208
3209 Ordering::Greater => break,
3210 Ordering::Less => {}
3211 }
3212
3213 indent_changes.next();
3214 }
3215
3216 for range in &indent_ranges {
3217 if range.start.row >= row {
3218 break;
3219 }
3220 if range.start.row == prev_row && range.end > row_start {
3221 indent_from_prev_row = true;
3222 }
3223 if range.end > prev_row_start && range.end <= row_start {
3224 outdent_to_row = outdent_to_row.min(range.start.row);
3225 }
3226 }
3227
3228 if let Some(basis_row) = regex_outdent_map.get(&row) {
3229 indent_from_prev_row = false;
3230 outdent_to_row = *basis_row;
3231 from_regex = true;
3232 }
3233
3234 let within_error = error_ranges
3235 .iter()
3236 .any(|e| e.start.row < row && e.end > row_start);
3237
3238 let suggestion = if outdent_to_row == prev_row
3239 || (outdent_from_prev_row && indent_from_prev_row)
3240 {
3241 Some(IndentSuggestion {
3242 basis_row: prev_row,
3243 delta: Ordering::Equal,
3244 within_error: within_error && !from_regex,
3245 })
3246 } else if indent_from_prev_row {
3247 Some(IndentSuggestion {
3248 basis_row: prev_row,
3249 delta: Ordering::Greater,
3250 within_error: within_error && !from_regex,
3251 })
3252 } else if outdent_to_row < prev_row {
3253 Some(IndentSuggestion {
3254 basis_row: outdent_to_row,
3255 delta: Ordering::Equal,
3256 within_error: within_error && !from_regex,
3257 })
3258 } else if outdent_from_prev_row {
3259 Some(IndentSuggestion {
3260 basis_row: prev_row,
3261 delta: Ordering::Less,
3262 within_error: within_error && !from_regex,
3263 })
3264 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3265 {
3266 Some(IndentSuggestion {
3267 basis_row: prev_row,
3268 delta: Ordering::Equal,
3269 within_error: within_error && !from_regex,
3270 })
3271 } else {
3272 None
3273 };
3274
3275 prev_row = row;
3276 prev_row_start = row_start;
3277 suggestion
3278 }))
3279 }
3280
3281 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3282 while row > 0 {
3283 row -= 1;
3284 if !self.is_line_blank(row) {
3285 return Some(row);
3286 }
3287 }
3288 None
3289 }
3290
3291 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3292 let captures = self.syntax.captures(range, &self.text, |grammar| {
3293 grammar
3294 .highlights_config
3295 .as_ref()
3296 .map(|config| &config.query)
3297 });
3298 let highlight_maps = captures
3299 .grammars()
3300 .iter()
3301 .map(|grammar| grammar.highlight_map())
3302 .collect();
3303 (captures, highlight_maps)
3304 }
3305
3306 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3307 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3308 /// returned in chunks where each chunk has a single syntax highlighting style and
3309 /// diagnostic status.
3310 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3311 let range = range.start.to_offset(self)..range.end.to_offset(self);
3312
3313 let mut syntax = None;
3314 if language_aware {
3315 syntax = Some(self.get_highlights(range.clone()));
3316 }
3317 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3318 let diagnostics = language_aware;
3319 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3320 }
3321
3322 pub fn highlighted_text_for_range<T: ToOffset>(
3323 &self,
3324 range: Range<T>,
3325 override_style: Option<HighlightStyle>,
3326 syntax_theme: &SyntaxTheme,
3327 ) -> HighlightedText {
3328 HighlightedText::from_buffer_range(
3329 range,
3330 &self.text,
3331 &self.syntax,
3332 override_style,
3333 syntax_theme,
3334 )
3335 }
3336
3337 /// Invokes the given callback for each line of text in the given range of the buffer.
3338 /// Uses callback to avoid allocating a string for each line.
3339 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3340 let mut line = String::new();
3341 let mut row = range.start.row;
3342 for chunk in self
3343 .as_rope()
3344 .chunks_in_range(range.to_offset(self))
3345 .chain(["\n"])
3346 {
3347 for (newline_ix, text) in chunk.split('\n').enumerate() {
3348 if newline_ix > 0 {
3349 callback(row, &line);
3350 row += 1;
3351 line.clear();
3352 }
3353 line.push_str(text);
3354 }
3355 }
3356 }
3357
3358 /// Iterates over every [`SyntaxLayer`] in the buffer.
3359 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3360 self.syntax_layers_for_range(0..self.len(), true)
3361 }
3362
3363 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3364 let offset = position.to_offset(self);
3365 self.syntax_layers_for_range(offset..offset, false)
3366 .filter(|l| l.node().end_byte() > offset)
3367 .last()
3368 }
3369
3370 pub fn syntax_layers_for_range<D: ToOffset>(
3371 &self,
3372 range: Range<D>,
3373 include_hidden: bool,
3374 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3375 self.syntax
3376 .layers_for_range(range, &self.text, include_hidden)
3377 }
3378
3379 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3380 &self,
3381 range: Range<D>,
3382 ) -> Option<SyntaxLayer<'_>> {
3383 let range = range.to_offset(self);
3384 self.syntax
3385 .layers_for_range(range, &self.text, false)
3386 .max_by(|a, b| {
3387 if a.depth != b.depth {
3388 a.depth.cmp(&b.depth)
3389 } else if a.offset.0 != b.offset.0 {
3390 a.offset.0.cmp(&b.offset.0)
3391 } else {
3392 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3393 }
3394 })
3395 }
3396
3397 /// Returns the main [`Language`].
3398 pub fn language(&self) -> Option<&Arc<Language>> {
3399 self.language.as_ref()
3400 }
3401
3402 /// Returns the [`Language`] at the given location.
3403 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3404 self.syntax_layer_at(position)
3405 .map(|info| info.language)
3406 .or(self.language.as_ref())
3407 }
3408
3409 /// Returns the settings for the language at the given location.
3410 pub fn settings_at<'a, D: ToOffset>(
3411 &'a self,
3412 position: D,
3413 cx: &'a App,
3414 ) -> Cow<'a, LanguageSettings> {
3415 language_settings(
3416 self.language_at(position).map(|l| l.name()),
3417 self.file.as_ref(),
3418 cx,
3419 )
3420 }
3421
3422 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3423 CharClassifier::new(self.language_scope_at(point))
3424 }
3425
3426 /// Returns the [`LanguageScope`] at the given location.
3427 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3428 let offset = position.to_offset(self);
3429 let mut scope = None;
3430 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3431
3432 // Use the layer that has the smallest node intersecting the given point.
3433 for layer in self
3434 .syntax
3435 .layers_for_range(offset..offset, &self.text, false)
3436 {
3437 let mut cursor = layer.node().walk();
3438
3439 let mut range = None;
3440 loop {
3441 let child_range = cursor.node().byte_range();
3442 if !child_range.contains(&offset) {
3443 break;
3444 }
3445
3446 range = Some(child_range);
3447 if cursor.goto_first_child_for_byte(offset).is_none() {
3448 break;
3449 }
3450 }
3451
3452 if let Some(range) = range
3453 && smallest_range_and_depth.as_ref().is_none_or(
3454 |(smallest_range, smallest_range_depth)| {
3455 if layer.depth > *smallest_range_depth {
3456 true
3457 } else if layer.depth == *smallest_range_depth {
3458 range.len() < smallest_range.len()
3459 } else {
3460 false
3461 }
3462 },
3463 )
3464 {
3465 smallest_range_and_depth = Some((range, layer.depth));
3466 scope = Some(LanguageScope {
3467 language: layer.language.clone(),
3468 override_id: layer.override_id(offset, &self.text),
3469 });
3470 }
3471 }
3472
3473 scope.or_else(|| {
3474 self.language.clone().map(|language| LanguageScope {
3475 language,
3476 override_id: None,
3477 })
3478 })
3479 }
3480
3481 /// Returns a tuple of the range and character kind of the word
3482 /// surrounding the given position.
3483 pub fn surrounding_word<T: ToOffset>(
3484 &self,
3485 start: T,
3486 scope_context: Option<CharScopeContext>,
3487 ) -> (Range<usize>, Option<CharKind>) {
3488 let mut start = start.to_offset(self);
3489 let mut end = start;
3490 let mut next_chars = self.chars_at(start).take(128).peekable();
3491 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3492
3493 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3494 let word_kind = cmp::max(
3495 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3496 next_chars.peek().copied().map(|c| classifier.kind(c)),
3497 );
3498
3499 for ch in prev_chars {
3500 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3501 start -= ch.len_utf8();
3502 } else {
3503 break;
3504 }
3505 }
3506
3507 for ch in next_chars {
3508 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3509 end += ch.len_utf8();
3510 } else {
3511 break;
3512 }
3513 }
3514
3515 (start..end, word_kind)
3516 }
3517
3518 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3519 /// range. When `require_larger` is true, the node found must be larger than the query range.
3520 ///
3521 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3522 /// be moved to the root of the tree.
3523 fn goto_node_enclosing_range(
3524 cursor: &mut tree_sitter::TreeCursor,
3525 query_range: &Range<usize>,
3526 require_larger: bool,
3527 ) -> bool {
3528 let mut ascending = false;
3529 loop {
3530 let mut range = cursor.node().byte_range();
3531 if query_range.is_empty() {
3532 // When the query range is empty and the current node starts after it, move to the
3533 // previous sibling to find the node the containing node.
3534 if range.start > query_range.start {
3535 cursor.goto_previous_sibling();
3536 range = cursor.node().byte_range();
3537 }
3538 } else {
3539 // When the query range is non-empty and the current node ends exactly at the start,
3540 // move to the next sibling to find a node that extends beyond the start.
3541 if range.end == query_range.start {
3542 cursor.goto_next_sibling();
3543 range = cursor.node().byte_range();
3544 }
3545 }
3546
3547 let encloses = range.contains_inclusive(query_range)
3548 && (!require_larger || range.len() > query_range.len());
3549 if !encloses {
3550 ascending = true;
3551 if !cursor.goto_parent() {
3552 return false;
3553 }
3554 continue;
3555 } else if ascending {
3556 return true;
3557 }
3558
3559 // Descend into the current node.
3560 if cursor
3561 .goto_first_child_for_byte(query_range.start)
3562 .is_none()
3563 {
3564 return true;
3565 }
3566 }
3567 }
3568
3569 pub fn syntax_ancestor<'a, T: ToOffset>(
3570 &'a self,
3571 range: Range<T>,
3572 ) -> Option<tree_sitter::Node<'a>> {
3573 let range = range.start.to_offset(self)..range.end.to_offset(self);
3574 let mut result: Option<tree_sitter::Node<'a>> = None;
3575 for layer in self
3576 .syntax
3577 .layers_for_range(range.clone(), &self.text, true)
3578 {
3579 let mut cursor = layer.node().walk();
3580
3581 // Find the node that both contains the range and is larger than it.
3582 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3583 continue;
3584 }
3585
3586 let left_node = cursor.node();
3587 let mut layer_result = left_node;
3588
3589 // For an empty range, try to find another node immediately to the right of the range.
3590 if left_node.end_byte() == range.start {
3591 let mut right_node = None;
3592 while !cursor.goto_next_sibling() {
3593 if !cursor.goto_parent() {
3594 break;
3595 }
3596 }
3597
3598 while cursor.node().start_byte() == range.start {
3599 right_node = Some(cursor.node());
3600 if !cursor.goto_first_child() {
3601 break;
3602 }
3603 }
3604
3605 // If there is a candidate node on both sides of the (empty) range, then
3606 // decide between the two by favoring a named node over an anonymous token.
3607 // If both nodes are the same in that regard, favor the right one.
3608 if let Some(right_node) = right_node
3609 && (right_node.is_named() || !left_node.is_named())
3610 {
3611 layer_result = right_node;
3612 }
3613 }
3614
3615 if let Some(previous_result) = &result
3616 && previous_result.byte_range().len() < layer_result.byte_range().len()
3617 {
3618 continue;
3619 }
3620 result = Some(layer_result);
3621 }
3622
3623 result
3624 }
3625
3626 /// Find the previous sibling syntax node at the given range.
3627 ///
3628 /// This function locates the syntax node that precedes the node containing
3629 /// the given range. It searches hierarchically by:
3630 /// 1. Finding the node that contains the given range
3631 /// 2. Looking for the previous sibling at the same tree level
3632 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3633 ///
3634 /// Returns `None` if there is no previous sibling at any ancestor level.
3635 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3636 &'a self,
3637 range: Range<T>,
3638 ) -> Option<tree_sitter::Node<'a>> {
3639 let range = range.start.to_offset(self)..range.end.to_offset(self);
3640 let mut result: Option<tree_sitter::Node<'a>> = None;
3641
3642 for layer in self
3643 .syntax
3644 .layers_for_range(range.clone(), &self.text, true)
3645 {
3646 let mut cursor = layer.node().walk();
3647
3648 // Find the node that contains the range
3649 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3650 continue;
3651 }
3652
3653 // Look for the previous sibling, moving up ancestor levels if needed
3654 loop {
3655 if cursor.goto_previous_sibling() {
3656 let layer_result = cursor.node();
3657
3658 if let Some(previous_result) = &result {
3659 if previous_result.byte_range().end < layer_result.byte_range().end {
3660 continue;
3661 }
3662 }
3663 result = Some(layer_result);
3664 break;
3665 }
3666
3667 // No sibling found at this level, try moving up to parent
3668 if !cursor.goto_parent() {
3669 break;
3670 }
3671 }
3672 }
3673
3674 result
3675 }
3676
3677 /// Find the next sibling syntax node at the given range.
3678 ///
3679 /// This function locates the syntax node that follows the node containing
3680 /// the given range. It searches hierarchically by:
3681 /// 1. Finding the node that contains the given range
3682 /// 2. Looking for the next sibling at the same tree level
3683 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3684 ///
3685 /// Returns `None` if there is no next sibling at any ancestor level.
3686 pub fn syntax_next_sibling<'a, T: ToOffset>(
3687 &'a self,
3688 range: Range<T>,
3689 ) -> Option<tree_sitter::Node<'a>> {
3690 let range = range.start.to_offset(self)..range.end.to_offset(self);
3691 let mut result: Option<tree_sitter::Node<'a>> = None;
3692
3693 for layer in self
3694 .syntax
3695 .layers_for_range(range.clone(), &self.text, true)
3696 {
3697 let mut cursor = layer.node().walk();
3698
3699 // Find the node that contains the range
3700 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3701 continue;
3702 }
3703
3704 // Look for the next sibling, moving up ancestor levels if needed
3705 loop {
3706 if cursor.goto_next_sibling() {
3707 let layer_result = cursor.node();
3708
3709 if let Some(previous_result) = &result {
3710 if previous_result.byte_range().start > layer_result.byte_range().start {
3711 continue;
3712 }
3713 }
3714 result = Some(layer_result);
3715 break;
3716 }
3717
3718 // No sibling found at this level, try moving up to parent
3719 if !cursor.goto_parent() {
3720 break;
3721 }
3722 }
3723 }
3724
3725 result
3726 }
3727
3728 /// Returns the root syntax node within the given row
3729 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3730 let start_offset = position.to_offset(self);
3731
3732 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3733
3734 let layer = self
3735 .syntax
3736 .layers_for_range(start_offset..start_offset, &self.text, true)
3737 .next()?;
3738
3739 let mut cursor = layer.node().walk();
3740
3741 // Descend to the first leaf that touches the start of the range.
3742 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3743 if cursor.node().end_byte() == start_offset {
3744 cursor.goto_next_sibling();
3745 }
3746 }
3747
3748 // Ascend to the root node within the same row.
3749 while cursor.goto_parent() {
3750 if cursor.node().start_position().row != row {
3751 break;
3752 }
3753 }
3754
3755 Some(cursor.node())
3756 }
3757
3758 /// Returns the outline for the buffer.
3759 ///
3760 /// This method allows passing an optional [`SyntaxTheme`] to
3761 /// syntax-highlight the returned symbols.
3762 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3763 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3764 }
3765
3766 /// Returns all the symbols that contain the given position.
3767 ///
3768 /// This method allows passing an optional [`SyntaxTheme`] to
3769 /// syntax-highlight the returned symbols.
3770 pub fn symbols_containing<T: ToOffset>(
3771 &self,
3772 position: T,
3773 theme: Option<&SyntaxTheme>,
3774 ) -> Vec<OutlineItem<Anchor>> {
3775 let position = position.to_offset(self);
3776 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3777 let end = self.clip_offset(position + 1, Bias::Right);
3778 let mut items = self.outline_items_containing(start..end, false, theme);
3779 let mut prev_depth = None;
3780 items.retain(|item| {
3781 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3782 prev_depth = Some(item.depth);
3783 result
3784 });
3785 items
3786 }
3787
3788 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3789 let range = range.to_offset(self);
3790 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3791 grammar.outline_config.as_ref().map(|c| &c.query)
3792 });
3793 let configs = matches
3794 .grammars()
3795 .iter()
3796 .map(|g| g.outline_config.as_ref().unwrap())
3797 .collect::<Vec<_>>();
3798
3799 while let Some(mat) = matches.peek() {
3800 let config = &configs[mat.grammar_index];
3801 let containing_item_node = maybe!({
3802 let item_node = mat.captures.iter().find_map(|cap| {
3803 if cap.index == config.item_capture_ix {
3804 Some(cap.node)
3805 } else {
3806 None
3807 }
3808 })?;
3809
3810 let item_byte_range = item_node.byte_range();
3811 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3812 None
3813 } else {
3814 Some(item_node)
3815 }
3816 });
3817
3818 if let Some(item_node) = containing_item_node {
3819 return Some(
3820 Point::from_ts_point(item_node.start_position())
3821 ..Point::from_ts_point(item_node.end_position()),
3822 );
3823 }
3824
3825 matches.advance();
3826 }
3827 None
3828 }
3829
3830 pub fn outline_items_containing<T: ToOffset>(
3831 &self,
3832 range: Range<T>,
3833 include_extra_context: bool,
3834 theme: Option<&SyntaxTheme>,
3835 ) -> Vec<OutlineItem<Anchor>> {
3836 self.outline_items_containing_internal(
3837 range,
3838 include_extra_context,
3839 theme,
3840 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3841 )
3842 }
3843
3844 pub fn outline_items_as_points_containing<T: ToOffset>(
3845 &self,
3846 range: Range<T>,
3847 include_extra_context: bool,
3848 theme: Option<&SyntaxTheme>,
3849 ) -> Vec<OutlineItem<Point>> {
3850 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3851 range
3852 })
3853 }
3854
3855 fn outline_items_containing_internal<T: ToOffset, U>(
3856 &self,
3857 range: Range<T>,
3858 include_extra_context: bool,
3859 theme: Option<&SyntaxTheme>,
3860 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3861 ) -> Vec<OutlineItem<U>> {
3862 let range = range.to_offset(self);
3863 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3864 grammar.outline_config.as_ref().map(|c| &c.query)
3865 });
3866
3867 let mut items = Vec::new();
3868 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3869 while let Some(mat) = matches.peek() {
3870 let config = matches.grammars()[mat.grammar_index]
3871 .outline_config
3872 .as_ref()
3873 .unwrap();
3874 if let Some(item) =
3875 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3876 {
3877 items.push(item);
3878 } else if let Some(capture) = mat
3879 .captures
3880 .iter()
3881 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3882 {
3883 let capture_range = capture.node.start_position()..capture.node.end_position();
3884 let mut capture_row_range =
3885 capture_range.start.row as u32..capture_range.end.row as u32;
3886 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3887 {
3888 capture_row_range.end -= 1;
3889 }
3890 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3891 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3892 last_row_range.end = capture_row_range.end;
3893 } else {
3894 annotation_row_ranges.push(capture_row_range);
3895 }
3896 } else {
3897 annotation_row_ranges.push(capture_row_range);
3898 }
3899 }
3900 matches.advance();
3901 }
3902
3903 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3904
3905 // Assign depths based on containment relationships and convert to anchors.
3906 let mut item_ends_stack = Vec::<Point>::new();
3907 let mut anchor_items = Vec::new();
3908 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3909 for item in items {
3910 while let Some(last_end) = item_ends_stack.last().copied() {
3911 if last_end < item.range.end {
3912 item_ends_stack.pop();
3913 } else {
3914 break;
3915 }
3916 }
3917
3918 let mut annotation_row_range = None;
3919 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3920 let row_preceding_item = item.range.start.row.saturating_sub(1);
3921 if next_annotation_row_range.end < row_preceding_item {
3922 annotation_row_ranges.next();
3923 } else {
3924 if next_annotation_row_range.end == row_preceding_item {
3925 annotation_row_range = Some(next_annotation_row_range.clone());
3926 annotation_row_ranges.next();
3927 }
3928 break;
3929 }
3930 }
3931
3932 anchor_items.push(OutlineItem {
3933 depth: item_ends_stack.len(),
3934 range: range_callback(self, item.range.clone()),
3935 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3936 text: item.text,
3937 highlight_ranges: item.highlight_ranges,
3938 name_ranges: item.name_ranges,
3939 body_range: item.body_range.map(|r| range_callback(self, r)),
3940 annotation_range: annotation_row_range.map(|annotation_range| {
3941 let point_range = Point::new(annotation_range.start, 0)
3942 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3943 range_callback(self, point_range)
3944 }),
3945 });
3946 item_ends_stack.push(item.range.end);
3947 }
3948
3949 anchor_items
3950 }
3951
3952 fn next_outline_item(
3953 &self,
3954 config: &OutlineConfig,
3955 mat: &SyntaxMapMatch,
3956 range: &Range<usize>,
3957 include_extra_context: bool,
3958 theme: Option<&SyntaxTheme>,
3959 ) -> Option<OutlineItem<Point>> {
3960 let item_node = mat.captures.iter().find_map(|cap| {
3961 if cap.index == config.item_capture_ix {
3962 Some(cap.node)
3963 } else {
3964 None
3965 }
3966 })?;
3967
3968 let item_byte_range = item_node.byte_range();
3969 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3970 return None;
3971 }
3972 let item_point_range = Point::from_ts_point(item_node.start_position())
3973 ..Point::from_ts_point(item_node.end_position());
3974
3975 let mut open_point = None;
3976 let mut close_point = None;
3977
3978 let mut buffer_ranges = Vec::new();
3979 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3980 let mut range = node.start_byte()..node.end_byte();
3981 let start = node.start_position();
3982 if node.end_position().row > start.row {
3983 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3984 }
3985
3986 if !range.is_empty() {
3987 buffer_ranges.push((range, node_is_name));
3988 }
3989 };
3990
3991 for capture in mat.captures {
3992 if capture.index == config.name_capture_ix {
3993 add_to_buffer_ranges(capture.node, true);
3994 } else if Some(capture.index) == config.context_capture_ix
3995 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3996 {
3997 add_to_buffer_ranges(capture.node, false);
3998 } else {
3999 if Some(capture.index) == config.open_capture_ix {
4000 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4001 } else if Some(capture.index) == config.close_capture_ix {
4002 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4003 }
4004 }
4005 }
4006
4007 if buffer_ranges.is_empty() {
4008 return None;
4009 }
4010 let source_range_for_text =
4011 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4012
4013 let mut text = String::new();
4014 let mut highlight_ranges = Vec::new();
4015 let mut name_ranges = Vec::new();
4016 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4017 let mut last_buffer_range_end = 0;
4018 for (buffer_range, is_name) in buffer_ranges {
4019 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4020 if space_added {
4021 text.push(' ');
4022 }
4023 let before_append_len = text.len();
4024 let mut offset = buffer_range.start;
4025 chunks.seek(buffer_range.clone());
4026 for mut chunk in chunks.by_ref() {
4027 if chunk.text.len() > buffer_range.end - offset {
4028 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4029 offset = buffer_range.end;
4030 } else {
4031 offset += chunk.text.len();
4032 }
4033 let style = chunk
4034 .syntax_highlight_id
4035 .zip(theme)
4036 .and_then(|(highlight, theme)| highlight.style(theme));
4037 if let Some(style) = style {
4038 let start = text.len();
4039 let end = start + chunk.text.len();
4040 highlight_ranges.push((start..end, style));
4041 }
4042 text.push_str(chunk.text);
4043 if offset >= buffer_range.end {
4044 break;
4045 }
4046 }
4047 if is_name {
4048 let after_append_len = text.len();
4049 let start = if space_added && !name_ranges.is_empty() {
4050 before_append_len - 1
4051 } else {
4052 before_append_len
4053 };
4054 name_ranges.push(start..after_append_len);
4055 }
4056 last_buffer_range_end = buffer_range.end;
4057 }
4058
4059 Some(OutlineItem {
4060 depth: 0, // We'll calculate the depth later
4061 range: item_point_range,
4062 source_range_for_text: source_range_for_text.to_point(self),
4063 text,
4064 highlight_ranges,
4065 name_ranges,
4066 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4067 annotation_range: None,
4068 })
4069 }
4070
4071 pub fn function_body_fold_ranges<T: ToOffset>(
4072 &self,
4073 within: Range<T>,
4074 ) -> impl Iterator<Item = Range<usize>> + '_ {
4075 self.text_object_ranges(within, TreeSitterOptions::default())
4076 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4077 }
4078
4079 /// For each grammar in the language, runs the provided
4080 /// [`tree_sitter::Query`] against the given range.
4081 pub fn matches(
4082 &self,
4083 range: Range<usize>,
4084 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4085 ) -> SyntaxMapMatches<'_> {
4086 self.syntax.matches(range, self, query)
4087 }
4088
4089 pub fn all_bracket_ranges(
4090 &self,
4091 range: Range<usize>,
4092 ) -> impl Iterator<Item = BracketMatch> + '_ {
4093 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4094 grammar.brackets_config.as_ref().map(|c| &c.query)
4095 });
4096 let configs = matches
4097 .grammars()
4098 .iter()
4099 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4100 .collect::<Vec<_>>();
4101
4102 iter::from_fn(move || {
4103 while let Some(mat) = matches.peek() {
4104 let mut open = None;
4105 let mut close = None;
4106 let config = &configs[mat.grammar_index];
4107 let pattern = &config.patterns[mat.pattern_index];
4108 for capture in mat.captures {
4109 if capture.index == config.open_capture_ix {
4110 open = Some(capture.node.byte_range());
4111 } else if capture.index == config.close_capture_ix {
4112 close = Some(capture.node.byte_range());
4113 }
4114 }
4115
4116 matches.advance();
4117
4118 let Some((open_range, close_range)) = open.zip(close) else {
4119 continue;
4120 };
4121
4122 let bracket_range = open_range.start..=close_range.end;
4123 if !bracket_range.overlaps(&range) {
4124 continue;
4125 }
4126
4127 return Some(BracketMatch {
4128 open_range,
4129 close_range,
4130 newline_only: pattern.newline_only,
4131 });
4132 }
4133 None
4134 })
4135 }
4136
4137 /// Returns bracket range pairs overlapping or adjacent to `range`
4138 pub fn bracket_ranges<T: ToOffset>(
4139 &self,
4140 range: Range<T>,
4141 ) -> impl Iterator<Item = BracketMatch> + '_ {
4142 // Find bracket pairs that *inclusively* contain the given range.
4143 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4144 self.all_bracket_ranges(range)
4145 .filter(|pair| !pair.newline_only)
4146 }
4147
4148 pub fn debug_variables_query<T: ToOffset>(
4149 &self,
4150 range: Range<T>,
4151 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4152 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4153
4154 let mut matches = self.syntax.matches_with_options(
4155 range.clone(),
4156 &self.text,
4157 TreeSitterOptions::default(),
4158 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4159 );
4160
4161 let configs = matches
4162 .grammars()
4163 .iter()
4164 .map(|grammar| grammar.debug_variables_config.as_ref())
4165 .collect::<Vec<_>>();
4166
4167 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4168
4169 iter::from_fn(move || {
4170 loop {
4171 while let Some(capture) = captures.pop() {
4172 if capture.0.overlaps(&range) {
4173 return Some(capture);
4174 }
4175 }
4176
4177 let mat = matches.peek()?;
4178
4179 let Some(config) = configs[mat.grammar_index].as_ref() else {
4180 matches.advance();
4181 continue;
4182 };
4183
4184 for capture in mat.captures {
4185 let Some(ix) = config
4186 .objects_by_capture_ix
4187 .binary_search_by_key(&capture.index, |e| e.0)
4188 .ok()
4189 else {
4190 continue;
4191 };
4192 let text_object = config.objects_by_capture_ix[ix].1;
4193 let byte_range = capture.node.byte_range();
4194
4195 let mut found = false;
4196 for (range, existing) in captures.iter_mut() {
4197 if existing == &text_object {
4198 range.start = range.start.min(byte_range.start);
4199 range.end = range.end.max(byte_range.end);
4200 found = true;
4201 break;
4202 }
4203 }
4204
4205 if !found {
4206 captures.push((byte_range, text_object));
4207 }
4208 }
4209
4210 matches.advance();
4211 }
4212 })
4213 }
4214
4215 pub fn text_object_ranges<T: ToOffset>(
4216 &self,
4217 range: Range<T>,
4218 options: TreeSitterOptions,
4219 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4220 let range =
4221 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4222
4223 let mut matches =
4224 self.syntax
4225 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4226 grammar.text_object_config.as_ref().map(|c| &c.query)
4227 });
4228
4229 let configs = matches
4230 .grammars()
4231 .iter()
4232 .map(|grammar| grammar.text_object_config.as_ref())
4233 .collect::<Vec<_>>();
4234
4235 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4236
4237 iter::from_fn(move || {
4238 loop {
4239 while let Some(capture) = captures.pop() {
4240 if capture.0.overlaps(&range) {
4241 return Some(capture);
4242 }
4243 }
4244
4245 let mat = matches.peek()?;
4246
4247 let Some(config) = configs[mat.grammar_index].as_ref() else {
4248 matches.advance();
4249 continue;
4250 };
4251
4252 for capture in mat.captures {
4253 let Some(ix) = config
4254 .text_objects_by_capture_ix
4255 .binary_search_by_key(&capture.index, |e| e.0)
4256 .ok()
4257 else {
4258 continue;
4259 };
4260 let text_object = config.text_objects_by_capture_ix[ix].1;
4261 let byte_range = capture.node.byte_range();
4262
4263 let mut found = false;
4264 for (range, existing) in captures.iter_mut() {
4265 if existing == &text_object {
4266 range.start = range.start.min(byte_range.start);
4267 range.end = range.end.max(byte_range.end);
4268 found = true;
4269 break;
4270 }
4271 }
4272
4273 if !found {
4274 captures.push((byte_range, text_object));
4275 }
4276 }
4277
4278 matches.advance();
4279 }
4280 })
4281 }
4282
4283 /// Returns enclosing bracket ranges containing the given range
4284 pub fn enclosing_bracket_ranges<T: ToOffset>(
4285 &self,
4286 range: Range<T>,
4287 ) -> impl Iterator<Item = BracketMatch> + '_ {
4288 let range = range.start.to_offset(self)..range.end.to_offset(self);
4289
4290 self.bracket_ranges(range.clone()).filter(move |pair| {
4291 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4292 })
4293 }
4294
4295 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4296 ///
4297 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4298 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4299 &self,
4300 range: Range<T>,
4301 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4302 ) -> Option<(Range<usize>, Range<usize>)> {
4303 let range = range.start.to_offset(self)..range.end.to_offset(self);
4304
4305 // Get the ranges of the innermost pair of brackets.
4306 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4307
4308 for pair in self.enclosing_bracket_ranges(range) {
4309 if let Some(range_filter) = range_filter
4310 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4311 {
4312 continue;
4313 }
4314
4315 let len = pair.close_range.end - pair.open_range.start;
4316
4317 if let Some((existing_open, existing_close)) = &result {
4318 let existing_len = existing_close.end - existing_open.start;
4319 if len > existing_len {
4320 continue;
4321 }
4322 }
4323
4324 result = Some((pair.open_range, pair.close_range));
4325 }
4326
4327 result
4328 }
4329
4330 /// Returns anchor ranges for any matches of the redaction query.
4331 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4332 /// will be run on the relevant section of the buffer.
4333 pub fn redacted_ranges<T: ToOffset>(
4334 &self,
4335 range: Range<T>,
4336 ) -> impl Iterator<Item = Range<usize>> + '_ {
4337 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4338 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4339 grammar
4340 .redactions_config
4341 .as_ref()
4342 .map(|config| &config.query)
4343 });
4344
4345 let configs = syntax_matches
4346 .grammars()
4347 .iter()
4348 .map(|grammar| grammar.redactions_config.as_ref())
4349 .collect::<Vec<_>>();
4350
4351 iter::from_fn(move || {
4352 let redacted_range = syntax_matches
4353 .peek()
4354 .and_then(|mat| {
4355 configs[mat.grammar_index].and_then(|config| {
4356 mat.captures
4357 .iter()
4358 .find(|capture| capture.index == config.redaction_capture_ix)
4359 })
4360 })
4361 .map(|mat| mat.node.byte_range());
4362 syntax_matches.advance();
4363 redacted_range
4364 })
4365 }
4366
4367 pub fn injections_intersecting_range<T: ToOffset>(
4368 &self,
4369 range: Range<T>,
4370 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4371 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4372
4373 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4374 grammar
4375 .injection_config
4376 .as_ref()
4377 .map(|config| &config.query)
4378 });
4379
4380 let configs = syntax_matches
4381 .grammars()
4382 .iter()
4383 .map(|grammar| grammar.injection_config.as_ref())
4384 .collect::<Vec<_>>();
4385
4386 iter::from_fn(move || {
4387 let ranges = syntax_matches.peek().and_then(|mat| {
4388 let config = &configs[mat.grammar_index]?;
4389 let content_capture_range = mat.captures.iter().find_map(|capture| {
4390 if capture.index == config.content_capture_ix {
4391 Some(capture.node.byte_range())
4392 } else {
4393 None
4394 }
4395 })?;
4396 let language = self.language_at(content_capture_range.start)?;
4397 Some((content_capture_range, language))
4398 });
4399 syntax_matches.advance();
4400 ranges
4401 })
4402 }
4403
4404 pub fn runnable_ranges(
4405 &self,
4406 offset_range: Range<usize>,
4407 ) -> impl Iterator<Item = RunnableRange> + '_ {
4408 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4409 grammar.runnable_config.as_ref().map(|config| &config.query)
4410 });
4411
4412 let test_configs = syntax_matches
4413 .grammars()
4414 .iter()
4415 .map(|grammar| grammar.runnable_config.as_ref())
4416 .collect::<Vec<_>>();
4417
4418 iter::from_fn(move || {
4419 loop {
4420 let mat = syntax_matches.peek()?;
4421
4422 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4423 let mut run_range = None;
4424 let full_range = mat.captures.iter().fold(
4425 Range {
4426 start: usize::MAX,
4427 end: 0,
4428 },
4429 |mut acc, next| {
4430 let byte_range = next.node.byte_range();
4431 if acc.start > byte_range.start {
4432 acc.start = byte_range.start;
4433 }
4434 if acc.end < byte_range.end {
4435 acc.end = byte_range.end;
4436 }
4437 acc
4438 },
4439 );
4440 if full_range.start > full_range.end {
4441 // We did not find a full spanning range of this match.
4442 return None;
4443 }
4444 let extra_captures: SmallVec<[_; 1]> =
4445 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4446 test_configs
4447 .extra_captures
4448 .get(capture.index as usize)
4449 .cloned()
4450 .and_then(|tag_name| match tag_name {
4451 RunnableCapture::Named(name) => {
4452 Some((capture.node.byte_range(), name))
4453 }
4454 RunnableCapture::Run => {
4455 let _ = run_range.insert(capture.node.byte_range());
4456 None
4457 }
4458 })
4459 }));
4460 let run_range = run_range?;
4461 let tags = test_configs
4462 .query
4463 .property_settings(mat.pattern_index)
4464 .iter()
4465 .filter_map(|property| {
4466 if *property.key == *"tag" {
4467 property
4468 .value
4469 .as_ref()
4470 .map(|value| RunnableTag(value.to_string().into()))
4471 } else {
4472 None
4473 }
4474 })
4475 .collect();
4476 let extra_captures = extra_captures
4477 .into_iter()
4478 .map(|(range, name)| {
4479 (
4480 name.to_string(),
4481 self.text_for_range(range).collect::<String>(),
4482 )
4483 })
4484 .collect();
4485 // All tags should have the same range.
4486 Some(RunnableRange {
4487 run_range,
4488 full_range,
4489 runnable: Runnable {
4490 tags,
4491 language: mat.language,
4492 buffer: self.remote_id(),
4493 },
4494 extra_captures,
4495 buffer_id: self.remote_id(),
4496 })
4497 });
4498
4499 syntax_matches.advance();
4500 if test_range.is_some() {
4501 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4502 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4503 return test_range;
4504 }
4505 }
4506 })
4507 }
4508
4509 /// Returns selections for remote peers intersecting the given range.
4510 #[allow(clippy::type_complexity)]
4511 pub fn selections_in_range(
4512 &self,
4513 range: Range<Anchor>,
4514 include_local: bool,
4515 ) -> impl Iterator<
4516 Item = (
4517 ReplicaId,
4518 bool,
4519 CursorShape,
4520 impl Iterator<Item = &Selection<Anchor>> + '_,
4521 ),
4522 > + '_ {
4523 self.remote_selections
4524 .iter()
4525 .filter(move |(replica_id, set)| {
4526 (include_local || **replica_id != self.text.replica_id())
4527 && !set.selections.is_empty()
4528 })
4529 .map(move |(replica_id, set)| {
4530 let start_ix = match set.selections.binary_search_by(|probe| {
4531 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4532 }) {
4533 Ok(ix) | Err(ix) => ix,
4534 };
4535 let end_ix = match set.selections.binary_search_by(|probe| {
4536 probe.start.cmp(&range.end, self).then(Ordering::Less)
4537 }) {
4538 Ok(ix) | Err(ix) => ix,
4539 };
4540
4541 (
4542 *replica_id,
4543 set.line_mode,
4544 set.cursor_shape,
4545 set.selections[start_ix..end_ix].iter(),
4546 )
4547 })
4548 }
4549
4550 /// Returns if the buffer contains any diagnostics.
4551 pub fn has_diagnostics(&self) -> bool {
4552 !self.diagnostics.is_empty()
4553 }
4554
4555 /// Returns all the diagnostics intersecting the given range.
4556 pub fn diagnostics_in_range<'a, T, O>(
4557 &'a self,
4558 search_range: Range<T>,
4559 reversed: bool,
4560 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4561 where
4562 T: 'a + Clone + ToOffset,
4563 O: 'a + FromAnchor,
4564 {
4565 let mut iterators: Vec<_> = self
4566 .diagnostics
4567 .iter()
4568 .map(|(_, collection)| {
4569 collection
4570 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4571 .peekable()
4572 })
4573 .collect();
4574
4575 std::iter::from_fn(move || {
4576 let (next_ix, _) = iterators
4577 .iter_mut()
4578 .enumerate()
4579 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4580 .min_by(|(_, a), (_, b)| {
4581 let cmp = a
4582 .range
4583 .start
4584 .cmp(&b.range.start, self)
4585 // when range is equal, sort by diagnostic severity
4586 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4587 // and stabilize order with group_id
4588 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4589 if reversed { cmp.reverse() } else { cmp }
4590 })?;
4591 iterators[next_ix]
4592 .next()
4593 .map(
4594 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4595 diagnostic,
4596 range: FromAnchor::from_anchor(&range.start, self)
4597 ..FromAnchor::from_anchor(&range.end, self),
4598 },
4599 )
4600 })
4601 }
4602
4603 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4604 /// should be used instead.
4605 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4606 &self.diagnostics
4607 }
4608
4609 /// Returns all the diagnostic groups associated with the given
4610 /// language server ID. If no language server ID is provided,
4611 /// all diagnostics groups are returned.
4612 pub fn diagnostic_groups(
4613 &self,
4614 language_server_id: Option<LanguageServerId>,
4615 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4616 let mut groups = Vec::new();
4617
4618 if let Some(language_server_id) = language_server_id {
4619 if let Ok(ix) = self
4620 .diagnostics
4621 .binary_search_by_key(&language_server_id, |e| e.0)
4622 {
4623 self.diagnostics[ix]
4624 .1
4625 .groups(language_server_id, &mut groups, self);
4626 }
4627 } else {
4628 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4629 diagnostics.groups(*language_server_id, &mut groups, self);
4630 }
4631 }
4632
4633 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4634 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4635 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4636 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4637 });
4638
4639 groups
4640 }
4641
4642 /// Returns an iterator over the diagnostics for the given group.
4643 pub fn diagnostic_group<O>(
4644 &self,
4645 group_id: usize,
4646 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4647 where
4648 O: FromAnchor + 'static,
4649 {
4650 self.diagnostics
4651 .iter()
4652 .flat_map(move |(_, set)| set.group(group_id, self))
4653 }
4654
4655 /// An integer version number that accounts for all updates besides
4656 /// the buffer's text itself (which is versioned via a version vector).
4657 pub fn non_text_state_update_count(&self) -> usize {
4658 self.non_text_state_update_count
4659 }
4660
4661 /// An integer version that changes when the buffer's syntax changes.
4662 pub fn syntax_update_count(&self) -> usize {
4663 self.syntax.update_count()
4664 }
4665
4666 /// Returns a snapshot of underlying file.
4667 pub fn file(&self) -> Option<&Arc<dyn File>> {
4668 self.file.as_ref()
4669 }
4670
4671 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4672 if let Some(file) = self.file() {
4673 if file.path().file_name().is_none() || include_root {
4674 Some(file.full_path(cx).to_string_lossy().into_owned())
4675 } else {
4676 Some(file.path().display(file.path_style(cx)).to_string())
4677 }
4678 } else {
4679 None
4680 }
4681 }
4682
4683 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4684 let query_str = query.fuzzy_contents;
4685 if query_str.is_some_and(|query| query.is_empty()) {
4686 return BTreeMap::default();
4687 }
4688
4689 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4690 language,
4691 override_id: None,
4692 }));
4693
4694 let mut query_ix = 0;
4695 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4696 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4697
4698 let mut words = BTreeMap::default();
4699 let mut current_word_start_ix = None;
4700 let mut chunk_ix = query.range.start;
4701 for chunk in self.chunks(query.range, false) {
4702 for (i, c) in chunk.text.char_indices() {
4703 let ix = chunk_ix + i;
4704 if classifier.is_word(c) {
4705 if current_word_start_ix.is_none() {
4706 current_word_start_ix = Some(ix);
4707 }
4708
4709 if let Some(query_chars) = &query_chars
4710 && query_ix < query_len
4711 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4712 {
4713 query_ix += 1;
4714 }
4715 continue;
4716 } else if let Some(word_start) = current_word_start_ix.take()
4717 && query_ix == query_len
4718 {
4719 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4720 let mut word_text = self.text_for_range(word_start..ix).peekable();
4721 let first_char = word_text
4722 .peek()
4723 .and_then(|first_chunk| first_chunk.chars().next());
4724 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4725 if !query.skip_digits
4726 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4727 {
4728 words.insert(word_text.collect(), word_range);
4729 }
4730 }
4731 query_ix = 0;
4732 }
4733 chunk_ix += chunk.text.len();
4734 }
4735
4736 words
4737 }
4738}
4739
4740pub struct WordsQuery<'a> {
4741 /// Only returns words with all chars from the fuzzy string in them.
4742 pub fuzzy_contents: Option<&'a str>,
4743 /// Skips words that start with a digit.
4744 pub skip_digits: bool,
4745 /// Buffer offset range, to look for words.
4746 pub range: Range<usize>,
4747}
4748
4749fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4750 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4751}
4752
4753fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4754 let mut result = IndentSize::spaces(0);
4755 for c in text {
4756 let kind = match c {
4757 ' ' => IndentKind::Space,
4758 '\t' => IndentKind::Tab,
4759 _ => break,
4760 };
4761 if result.len == 0 {
4762 result.kind = kind;
4763 }
4764 result.len += 1;
4765 }
4766 result
4767}
4768
4769impl Clone for BufferSnapshot {
4770 fn clone(&self) -> Self {
4771 Self {
4772 text: self.text.clone(),
4773 syntax: self.syntax.clone(),
4774 file: self.file.clone(),
4775 remote_selections: self.remote_selections.clone(),
4776 diagnostics: self.diagnostics.clone(),
4777 language: self.language.clone(),
4778 non_text_state_update_count: self.non_text_state_update_count,
4779 }
4780 }
4781}
4782
4783impl Deref for BufferSnapshot {
4784 type Target = text::BufferSnapshot;
4785
4786 fn deref(&self) -> &Self::Target {
4787 &self.text
4788 }
4789}
4790
4791unsafe impl Send for BufferChunks<'_> {}
4792
4793impl<'a> BufferChunks<'a> {
4794 pub(crate) fn new(
4795 text: &'a Rope,
4796 range: Range<usize>,
4797 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4798 diagnostics: bool,
4799 buffer_snapshot: Option<&'a BufferSnapshot>,
4800 ) -> Self {
4801 let mut highlights = None;
4802 if let Some((captures, highlight_maps)) = syntax {
4803 highlights = Some(BufferChunkHighlights {
4804 captures,
4805 next_capture: None,
4806 stack: Default::default(),
4807 highlight_maps,
4808 })
4809 }
4810
4811 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4812 let chunks = text.chunks_in_range(range.clone());
4813
4814 let mut this = BufferChunks {
4815 range,
4816 buffer_snapshot,
4817 chunks,
4818 diagnostic_endpoints,
4819 error_depth: 0,
4820 warning_depth: 0,
4821 information_depth: 0,
4822 hint_depth: 0,
4823 unnecessary_depth: 0,
4824 underline: true,
4825 highlights,
4826 };
4827 this.initialize_diagnostic_endpoints();
4828 this
4829 }
4830
4831 /// Seeks to the given byte offset in the buffer.
4832 pub fn seek(&mut self, range: Range<usize>) {
4833 let old_range = std::mem::replace(&mut self.range, range.clone());
4834 self.chunks.set_range(self.range.clone());
4835 if let Some(highlights) = self.highlights.as_mut() {
4836 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4837 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4838 highlights
4839 .stack
4840 .retain(|(end_offset, _)| *end_offset > range.start);
4841 if let Some(capture) = &highlights.next_capture
4842 && range.start >= capture.node.start_byte()
4843 {
4844 let next_capture_end = capture.node.end_byte();
4845 if range.start < next_capture_end {
4846 highlights.stack.push((
4847 next_capture_end,
4848 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4849 ));
4850 }
4851 highlights.next_capture.take();
4852 }
4853 } else if let Some(snapshot) = self.buffer_snapshot {
4854 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4855 *highlights = BufferChunkHighlights {
4856 captures,
4857 next_capture: None,
4858 stack: Default::default(),
4859 highlight_maps,
4860 };
4861 } else {
4862 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4863 // Seeking such BufferChunks is not supported.
4864 debug_assert!(
4865 false,
4866 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4867 );
4868 }
4869
4870 highlights.captures.set_byte_range(self.range.clone());
4871 self.initialize_diagnostic_endpoints();
4872 }
4873 }
4874
4875 fn initialize_diagnostic_endpoints(&mut self) {
4876 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4877 && let Some(buffer) = self.buffer_snapshot
4878 {
4879 let mut diagnostic_endpoints = Vec::new();
4880 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4881 diagnostic_endpoints.push(DiagnosticEndpoint {
4882 offset: entry.range.start,
4883 is_start: true,
4884 severity: entry.diagnostic.severity,
4885 is_unnecessary: entry.diagnostic.is_unnecessary,
4886 underline: entry.diagnostic.underline,
4887 });
4888 diagnostic_endpoints.push(DiagnosticEndpoint {
4889 offset: entry.range.end,
4890 is_start: false,
4891 severity: entry.diagnostic.severity,
4892 is_unnecessary: entry.diagnostic.is_unnecessary,
4893 underline: entry.diagnostic.underline,
4894 });
4895 }
4896 diagnostic_endpoints
4897 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4898 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4899 self.hint_depth = 0;
4900 self.error_depth = 0;
4901 self.warning_depth = 0;
4902 self.information_depth = 0;
4903 }
4904 }
4905
4906 /// The current byte offset in the buffer.
4907 pub fn offset(&self) -> usize {
4908 self.range.start
4909 }
4910
4911 pub fn range(&self) -> Range<usize> {
4912 self.range.clone()
4913 }
4914
4915 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4916 let depth = match endpoint.severity {
4917 DiagnosticSeverity::ERROR => &mut self.error_depth,
4918 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4919 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4920 DiagnosticSeverity::HINT => &mut self.hint_depth,
4921 _ => return,
4922 };
4923 if endpoint.is_start {
4924 *depth += 1;
4925 } else {
4926 *depth -= 1;
4927 }
4928
4929 if endpoint.is_unnecessary {
4930 if endpoint.is_start {
4931 self.unnecessary_depth += 1;
4932 } else {
4933 self.unnecessary_depth -= 1;
4934 }
4935 }
4936 }
4937
4938 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4939 if self.error_depth > 0 {
4940 Some(DiagnosticSeverity::ERROR)
4941 } else if self.warning_depth > 0 {
4942 Some(DiagnosticSeverity::WARNING)
4943 } else if self.information_depth > 0 {
4944 Some(DiagnosticSeverity::INFORMATION)
4945 } else if self.hint_depth > 0 {
4946 Some(DiagnosticSeverity::HINT)
4947 } else {
4948 None
4949 }
4950 }
4951
4952 fn current_code_is_unnecessary(&self) -> bool {
4953 self.unnecessary_depth > 0
4954 }
4955}
4956
4957impl<'a> Iterator for BufferChunks<'a> {
4958 type Item = Chunk<'a>;
4959
4960 fn next(&mut self) -> Option<Self::Item> {
4961 let mut next_capture_start = usize::MAX;
4962 let mut next_diagnostic_endpoint = usize::MAX;
4963
4964 if let Some(highlights) = self.highlights.as_mut() {
4965 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4966 if *parent_capture_end <= self.range.start {
4967 highlights.stack.pop();
4968 } else {
4969 break;
4970 }
4971 }
4972
4973 if highlights.next_capture.is_none() {
4974 highlights.next_capture = highlights.captures.next();
4975 }
4976
4977 while let Some(capture) = highlights.next_capture.as_ref() {
4978 if self.range.start < capture.node.start_byte() {
4979 next_capture_start = capture.node.start_byte();
4980 break;
4981 } else {
4982 let highlight_id =
4983 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4984 highlights
4985 .stack
4986 .push((capture.node.end_byte(), highlight_id));
4987 highlights.next_capture = highlights.captures.next();
4988 }
4989 }
4990 }
4991
4992 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4993 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4994 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4995 if endpoint.offset <= self.range.start {
4996 self.update_diagnostic_depths(endpoint);
4997 diagnostic_endpoints.next();
4998 self.underline = endpoint.underline;
4999 } else {
5000 next_diagnostic_endpoint = endpoint.offset;
5001 break;
5002 }
5003 }
5004 }
5005 self.diagnostic_endpoints = diagnostic_endpoints;
5006
5007 if let Some(ChunkBitmaps {
5008 text: chunk,
5009 chars: chars_map,
5010 tabs,
5011 }) = self.chunks.peek_with_bitmaps()
5012 {
5013 let chunk_start = self.range.start;
5014 let mut chunk_end = (self.chunks.offset() + chunk.len())
5015 .min(next_capture_start)
5016 .min(next_diagnostic_endpoint);
5017 let mut highlight_id = None;
5018 if let Some(highlights) = self.highlights.as_ref()
5019 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5020 {
5021 chunk_end = chunk_end.min(*parent_capture_end);
5022 highlight_id = Some(*parent_highlight_id);
5023 }
5024 let bit_start = chunk_start - self.chunks.offset();
5025 let bit_end = chunk_end - self.chunks.offset();
5026
5027 let slice = &chunk[bit_start..bit_end];
5028
5029 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5030 let tabs = (tabs >> bit_start) & mask;
5031 let chars = (chars_map >> bit_start) & mask;
5032
5033 self.range.start = chunk_end;
5034 if self.range.start == self.chunks.offset() + chunk.len() {
5035 self.chunks.next().unwrap();
5036 }
5037
5038 Some(Chunk {
5039 text: slice,
5040 syntax_highlight_id: highlight_id,
5041 underline: self.underline,
5042 diagnostic_severity: self.current_diagnostic_severity(),
5043 is_unnecessary: self.current_code_is_unnecessary(),
5044 tabs,
5045 chars,
5046 ..Chunk::default()
5047 })
5048 } else {
5049 None
5050 }
5051 }
5052}
5053
5054impl operation_queue::Operation for Operation {
5055 fn lamport_timestamp(&self) -> clock::Lamport {
5056 match self {
5057 Operation::Buffer(_) => {
5058 unreachable!("buffer operations should never be deferred at this layer")
5059 }
5060 Operation::UpdateDiagnostics {
5061 lamport_timestamp, ..
5062 }
5063 | Operation::UpdateSelections {
5064 lamport_timestamp, ..
5065 }
5066 | Operation::UpdateCompletionTriggers {
5067 lamport_timestamp, ..
5068 }
5069 | Operation::UpdateLineEnding {
5070 lamport_timestamp, ..
5071 } => *lamport_timestamp,
5072 }
5073 }
5074}
5075
5076impl Default for Diagnostic {
5077 fn default() -> Self {
5078 Self {
5079 source: Default::default(),
5080 source_kind: DiagnosticSourceKind::Other,
5081 code: None,
5082 code_description: None,
5083 severity: DiagnosticSeverity::ERROR,
5084 message: Default::default(),
5085 markdown: None,
5086 group_id: 0,
5087 is_primary: false,
5088 is_disk_based: false,
5089 is_unnecessary: false,
5090 underline: true,
5091 data: None,
5092 }
5093 }
5094}
5095
5096impl IndentSize {
5097 /// Returns an [`IndentSize`] representing the given spaces.
5098 pub fn spaces(len: u32) -> Self {
5099 Self {
5100 len,
5101 kind: IndentKind::Space,
5102 }
5103 }
5104
5105 /// Returns an [`IndentSize`] representing a tab.
5106 pub fn tab() -> Self {
5107 Self {
5108 len: 1,
5109 kind: IndentKind::Tab,
5110 }
5111 }
5112
5113 /// An iterator over the characters represented by this [`IndentSize`].
5114 pub fn chars(&self) -> impl Iterator<Item = char> {
5115 iter::repeat(self.char()).take(self.len as usize)
5116 }
5117
5118 /// The character representation of this [`IndentSize`].
5119 pub fn char(&self) -> char {
5120 match self.kind {
5121 IndentKind::Space => ' ',
5122 IndentKind::Tab => '\t',
5123 }
5124 }
5125
5126 /// Consumes the current [`IndentSize`] and returns a new one that has
5127 /// been shrunk or enlarged by the given size along the given direction.
5128 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5129 match direction {
5130 Ordering::Less => {
5131 if self.kind == size.kind && self.len >= size.len {
5132 self.len -= size.len;
5133 }
5134 }
5135 Ordering::Equal => {}
5136 Ordering::Greater => {
5137 if self.len == 0 {
5138 self = size;
5139 } else if self.kind == size.kind {
5140 self.len += size.len;
5141 }
5142 }
5143 }
5144 self
5145 }
5146
5147 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5148 match self.kind {
5149 IndentKind::Space => self.len as usize,
5150 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5151 }
5152 }
5153}
5154
5155#[cfg(any(test, feature = "test-support"))]
5156pub struct TestFile {
5157 pub path: Arc<RelPath>,
5158 pub root_name: String,
5159 pub local_root: Option<PathBuf>,
5160}
5161
5162#[cfg(any(test, feature = "test-support"))]
5163impl File for TestFile {
5164 fn path(&self) -> &Arc<RelPath> {
5165 &self.path
5166 }
5167
5168 fn full_path(&self, _: &gpui::App) -> PathBuf {
5169 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5170 }
5171
5172 fn as_local(&self) -> Option<&dyn LocalFile> {
5173 if self.local_root.is_some() {
5174 Some(self)
5175 } else {
5176 None
5177 }
5178 }
5179
5180 fn disk_state(&self) -> DiskState {
5181 unimplemented!()
5182 }
5183
5184 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5185 self.path().file_name().unwrap_or(self.root_name.as_ref())
5186 }
5187
5188 fn worktree_id(&self, _: &App) -> WorktreeId {
5189 WorktreeId::from_usize(0)
5190 }
5191
5192 fn to_proto(&self, _: &App) -> rpc::proto::File {
5193 unimplemented!()
5194 }
5195
5196 fn is_private(&self) -> bool {
5197 false
5198 }
5199
5200 fn path_style(&self, _cx: &App) -> PathStyle {
5201 PathStyle::local()
5202 }
5203}
5204
5205#[cfg(any(test, feature = "test-support"))]
5206impl LocalFile for TestFile {
5207 fn abs_path(&self, _cx: &App) -> PathBuf {
5208 PathBuf::from(self.local_root.as_ref().unwrap())
5209 .join(&self.root_name)
5210 .join(self.path.as_std_path())
5211 }
5212
5213 fn load(&self, _cx: &App) -> Task<Result<String>> {
5214 unimplemented!()
5215 }
5216
5217 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5218 unimplemented!()
5219 }
5220}
5221
5222pub(crate) fn contiguous_ranges(
5223 values: impl Iterator<Item = u32>,
5224 max_len: usize,
5225) -> impl Iterator<Item = Range<u32>> {
5226 let mut values = values;
5227 let mut current_range: Option<Range<u32>> = None;
5228 std::iter::from_fn(move || {
5229 loop {
5230 if let Some(value) = values.next() {
5231 if let Some(range) = &mut current_range
5232 && value == range.end
5233 && range.len() < max_len
5234 {
5235 range.end += 1;
5236 continue;
5237 }
5238
5239 let prev_range = current_range.clone();
5240 current_range = Some(value..(value + 1));
5241 if prev_range.is_some() {
5242 return prev_range;
5243 }
5244 } else {
5245 return current_range.take();
5246 }
5247 }
5248 })
5249}
5250
5251#[derive(Default, Debug)]
5252pub struct CharClassifier {
5253 scope: Option<LanguageScope>,
5254 scope_context: Option<CharScopeContext>,
5255 ignore_punctuation: bool,
5256}
5257
5258impl CharClassifier {
5259 pub fn new(scope: Option<LanguageScope>) -> Self {
5260 Self {
5261 scope,
5262 scope_context: None,
5263 ignore_punctuation: false,
5264 }
5265 }
5266
5267 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5268 Self {
5269 scope_context,
5270 ..self
5271 }
5272 }
5273
5274 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5275 Self {
5276 ignore_punctuation,
5277 ..self
5278 }
5279 }
5280
5281 pub fn is_whitespace(&self, c: char) -> bool {
5282 self.kind(c) == CharKind::Whitespace
5283 }
5284
5285 pub fn is_word(&self, c: char) -> bool {
5286 self.kind(c) == CharKind::Word
5287 }
5288
5289 pub fn is_punctuation(&self, c: char) -> bool {
5290 self.kind(c) == CharKind::Punctuation
5291 }
5292
5293 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5294 if c.is_alphanumeric() || c == '_' {
5295 return CharKind::Word;
5296 }
5297
5298 if let Some(scope) = &self.scope {
5299 let characters = match self.scope_context {
5300 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5301 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5302 None => scope.word_characters(),
5303 };
5304 if let Some(characters) = characters
5305 && characters.contains(&c)
5306 {
5307 return CharKind::Word;
5308 }
5309 }
5310
5311 if c.is_whitespace() {
5312 return CharKind::Whitespace;
5313 }
5314
5315 if ignore_punctuation {
5316 CharKind::Word
5317 } else {
5318 CharKind::Punctuation
5319 }
5320 }
5321
5322 pub fn kind(&self, c: char) -> CharKind {
5323 self.kind_with(c, self.ignore_punctuation)
5324 }
5325}
5326
5327/// Find all of the ranges of whitespace that occur at the ends of lines
5328/// in the given rope.
5329///
5330/// This could also be done with a regex search, but this implementation
5331/// avoids copying text.
5332pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5333 let mut ranges = Vec::new();
5334
5335 let mut offset = 0;
5336 let mut prev_chunk_trailing_whitespace_range = 0..0;
5337 for chunk in rope.chunks() {
5338 let mut prev_line_trailing_whitespace_range = 0..0;
5339 for (i, line) in chunk.split('\n').enumerate() {
5340 let line_end_offset = offset + line.len();
5341 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5342 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5343
5344 if i == 0 && trimmed_line_len == 0 {
5345 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5346 }
5347 if !prev_line_trailing_whitespace_range.is_empty() {
5348 ranges.push(prev_line_trailing_whitespace_range);
5349 }
5350
5351 offset = line_end_offset + 1;
5352 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5353 }
5354
5355 offset -= 1;
5356 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5357 }
5358
5359 if !prev_chunk_trailing_whitespace_range.is_empty() {
5360 ranges.push(prev_chunk_trailing_whitespace_range);
5361 }
5362
5363 ranges
5364}