1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189impl From<settings::CursorShape> for CursorShape {
190 fn from(shape: settings::CursorShape) -> Self {
191 match shape {
192 settings::CursorShape::Bar => CursorShape::Bar,
193 settings::CursorShape::Block => CursorShape::Block,
194 settings::CursorShape::Underline => CursorShape::Underline,
195 settings::CursorShape::Hollow => CursorShape::Hollow,
196 }
197 }
198}
199
200#[derive(Clone, Debug)]
201struct SelectionSet {
202 line_mode: bool,
203 cursor_shape: CursorShape,
204 selections: Arc<[Selection<Anchor>]>,
205 lamport_timestamp: clock::Lamport,
206}
207
208/// A diagnostic associated with a certain range of a buffer.
209#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
210pub struct Diagnostic {
211 /// The name of the service that produced this diagnostic.
212 pub source: Option<String>,
213 /// A machine-readable code that identifies this diagnostic.
214 pub code: Option<NumberOrString>,
215 pub code_description: Option<lsp::Uri>,
216 /// Whether this diagnostic is a hint, warning, or error.
217 pub severity: DiagnosticSeverity,
218 /// The human-readable message associated with this diagnostic.
219 pub message: String,
220 /// The human-readable message (in markdown format)
221 pub markdown: Option<String>,
222 /// An id that identifies the group to which this diagnostic belongs.
223 ///
224 /// When a language server produces a diagnostic with
225 /// one or more associated diagnostics, those diagnostics are all
226 /// assigned a single group ID.
227 pub group_id: usize,
228 /// Whether this diagnostic is the primary diagnostic for its group.
229 ///
230 /// In a given group, the primary diagnostic is the top-level diagnostic
231 /// returned by the language server. The non-primary diagnostics are the
232 /// associated diagnostics.
233 pub is_primary: bool,
234 /// Whether this diagnostic is considered to originate from an analysis of
235 /// files on disk, as opposed to any unsaved buffer contents. This is a
236 /// property of a given diagnostic source, and is configured for a given
237 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
238 /// for the language server.
239 pub is_disk_based: bool,
240 /// Whether this diagnostic marks unnecessary code.
241 pub is_unnecessary: bool,
242 /// Quick separation of diagnostics groups based by their source.
243 pub source_kind: DiagnosticSourceKind,
244 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
245 pub data: Option<Value>,
246 /// Whether to underline the corresponding text range in the editor.
247 pub underline: bool,
248}
249
250#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
251pub enum DiagnosticSourceKind {
252 Pulled,
253 Pushed,
254 Other,
255}
256
257/// An operation used to synchronize this buffer with its other replicas.
258#[derive(Clone, Debug, PartialEq)]
259pub enum Operation {
260 /// A text operation.
261 Buffer(text::Operation),
262
263 /// An update to the buffer's diagnostics.
264 UpdateDiagnostics {
265 /// The id of the language server that produced the new diagnostics.
266 server_id: LanguageServerId,
267 /// The diagnostics.
268 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 },
272
273 /// An update to the most recent selections in this buffer.
274 UpdateSelections {
275 /// The selections.
276 selections: Arc<[Selection<Anchor>]>,
277 /// The buffer's lamport timestamp.
278 lamport_timestamp: clock::Lamport,
279 /// Whether the selections are in 'line mode'.
280 line_mode: bool,
281 /// The [`CursorShape`] associated with these selections.
282 cursor_shape: CursorShape,
283 },
284
285 /// An update to the characters that should trigger autocompletion
286 /// for this buffer.
287 UpdateCompletionTriggers {
288 /// The characters that trigger autocompletion.
289 triggers: Vec<String>,
290 /// The buffer's lamport timestamp.
291 lamport_timestamp: clock::Lamport,
292 /// The language server ID.
293 server_id: LanguageServerId,
294 },
295
296 /// An update to the line ending type of this buffer.
297 UpdateLineEnding {
298 /// The line ending type.
299 line_ending: LineEnding,
300 /// The buffer's lamport timestamp.
301 lamport_timestamp: clock::Lamport,
302 },
303}
304
305/// An event that occurs in a buffer.
306#[derive(Clone, Debug, PartialEq)]
307pub enum BufferEvent {
308 /// The buffer was changed in a way that must be
309 /// propagated to its other replicas.
310 Operation {
311 operation: Operation,
312 is_local: bool,
313 },
314 /// The buffer was edited.
315 Edited,
316 /// The buffer's `dirty` bit changed.
317 DirtyChanged,
318 /// The buffer was saved.
319 Saved,
320 /// The buffer's file was changed on disk.
321 FileHandleChanged,
322 /// The buffer was reloaded.
323 Reloaded,
324 /// The buffer is in need of a reload
325 ReloadNeeded,
326 /// The buffer's language was changed.
327 LanguageChanged,
328 /// The buffer's syntax trees were updated.
329 Reparsed,
330 /// The buffer's diagnostics were updated.
331 DiagnosticsUpdated,
332 /// The buffer gained or lost editing capabilities.
333 CapabilityChanged,
334}
335
336/// The file associated with a buffer.
337pub trait File: Send + Sync + Any {
338 /// Returns the [`LocalFile`] associated with this file, if the
339 /// file is local.
340 fn as_local(&self) -> Option<&dyn LocalFile>;
341
342 /// Returns whether this file is local.
343 fn is_local(&self) -> bool {
344 self.as_local().is_some()
345 }
346
347 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
348 /// only available in some states, such as modification time.
349 fn disk_state(&self) -> DiskState;
350
351 /// Returns the path of this file relative to the worktree's root directory.
352 fn path(&self) -> &Arc<Path>;
353
354 /// Returns the path of this file relative to the worktree's parent directory (this means it
355 /// includes the name of the worktree's root folder).
356 fn full_path(&self, cx: &App) -> PathBuf;
357
358 /// Returns the last component of this handle's absolute path. If this handle refers to the root
359 /// of its worktree, then this method will return the name of the worktree itself.
360 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
361
362 /// Returns the id of the worktree to which this file belongs.
363 ///
364 /// This is needed for looking up project-specific settings.
365 fn worktree_id(&self, cx: &App) -> WorktreeId;
366
367 /// Converts this file into a protobuf message.
368 fn to_proto(&self, cx: &App) -> rpc::proto::File;
369
370 /// Return whether Zed considers this to be a private file.
371 fn is_private(&self) -> bool;
372}
373
374/// The file's storage status - whether it's stored (`Present`), and if so when it was last
375/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
376/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
377/// indicator for new files.
378#[derive(Copy, Clone, Debug, PartialEq)]
379pub enum DiskState {
380 /// File created in Zed that has not been saved.
381 New,
382 /// File present on the filesystem.
383 Present { mtime: MTime },
384 /// Deleted file that was previously present.
385 Deleted,
386}
387
388impl DiskState {
389 /// Returns the file's last known modification time on disk.
390 pub fn mtime(self) -> Option<MTime> {
391 match self {
392 DiskState::New => None,
393 DiskState::Present { mtime } => Some(mtime),
394 DiskState::Deleted => None,
395 }
396 }
397
398 pub fn exists(&self) -> bool {
399 match self {
400 DiskState::New => false,
401 DiskState::Present { .. } => true,
402 DiskState::Deleted => false,
403 }
404 }
405}
406
407/// The file associated with a buffer, in the case where the file is on the local disk.
408pub trait LocalFile: File {
409 /// Returns the absolute path of this file
410 fn abs_path(&self, cx: &App) -> PathBuf;
411
412 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
413 fn load(&self, cx: &App) -> Task<Result<String>>;
414
415 /// Loads the file's contents from disk.
416 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
417}
418
419/// The auto-indent behavior associated with an editing operation.
420/// For some editing operations, each affected line of text has its
421/// indentation recomputed. For other operations, the entire block
422/// of edited text is adjusted uniformly.
423#[derive(Clone, Debug)]
424pub enum AutoindentMode {
425 /// Indent each line of inserted text.
426 EachLine,
427 /// Apply the same indentation adjustment to all of the lines
428 /// in a given insertion.
429 Block {
430 /// The original indentation column of the first line of each
431 /// insertion, if it has been copied.
432 ///
433 /// Knowing this makes it possible to preserve the relative indentation
434 /// of every line in the insertion from when it was copied.
435 ///
436 /// If the original indent column is `a`, and the first line of insertion
437 /// is then auto-indented to column `b`, then every other line of
438 /// the insertion will be auto-indented to column `b - a`
439 original_indent_columns: Vec<Option<u32>>,
440 },
441}
442
443#[derive(Clone)]
444struct AutoindentRequest {
445 before_edit: BufferSnapshot,
446 entries: Vec<AutoindentRequestEntry>,
447 is_block_mode: bool,
448 ignore_empty_lines: bool,
449}
450
451#[derive(Debug, Clone)]
452struct AutoindentRequestEntry {
453 /// A range of the buffer whose indentation should be adjusted.
454 range: Range<Anchor>,
455 /// Whether or not these lines should be considered brand new, for the
456 /// purpose of auto-indent. When text is not new, its indentation will
457 /// only be adjusted if the suggested indentation level has *changed*
458 /// since the edit was made.
459 first_line_is_new: bool,
460 indent_size: IndentSize,
461 original_indent_column: Option<u32>,
462}
463
464#[derive(Debug)]
465struct IndentSuggestion {
466 basis_row: u32,
467 delta: Ordering,
468 within_error: bool,
469}
470
471struct BufferChunkHighlights<'a> {
472 captures: SyntaxMapCaptures<'a>,
473 next_capture: Option<SyntaxMapCapture<'a>>,
474 stack: Vec<(usize, HighlightId)>,
475 highlight_maps: Vec<HighlightMap>,
476}
477
478/// An iterator that yields chunks of a buffer's text, along with their
479/// syntax highlights and diagnostic status.
480pub struct BufferChunks<'a> {
481 buffer_snapshot: Option<&'a BufferSnapshot>,
482 range: Range<usize>,
483 chunks: text::Chunks<'a>,
484 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
485 error_depth: usize,
486 warning_depth: usize,
487 information_depth: usize,
488 hint_depth: usize,
489 unnecessary_depth: usize,
490 underline: bool,
491 highlights: Option<BufferChunkHighlights<'a>>,
492}
493
494/// A chunk of a buffer's text, along with its syntax highlight and
495/// diagnostic status.
496#[derive(Clone, Debug, Default)]
497pub struct Chunk<'a> {
498 /// The text of the chunk.
499 pub text: &'a str,
500 /// The syntax highlighting style of the chunk.
501 pub syntax_highlight_id: Option<HighlightId>,
502 /// The highlight style that has been applied to this chunk in
503 /// the editor.
504 pub highlight_style: Option<HighlightStyle>,
505 /// The severity of diagnostic associated with this chunk, if any.
506 pub diagnostic_severity: Option<DiagnosticSeverity>,
507 /// Whether this chunk of text is marked as unnecessary.
508 pub is_unnecessary: bool,
509 /// Whether this chunk of text was originally a tab character.
510 pub is_tab: bool,
511 /// A bitset of which characters are tabs in this string.
512 pub tabs: u128,
513 /// Bitmap of character indices in this chunk
514 pub chars: u128,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_inlay: bool,
517 /// Whether to underline the corresponding text range in the editor.
518 pub underline: bool,
519}
520
521/// A set of edits to a given version of a buffer, computed asynchronously.
522#[derive(Debug)]
523pub struct Diff {
524 pub base_version: clock::Global,
525 pub line_ending: LineEnding,
526 pub edits: Vec<(Range<usize>, Arc<str>)>,
527}
528
529#[derive(Debug, Clone, Copy)]
530pub(crate) struct DiagnosticEndpoint {
531 offset: usize,
532 is_start: bool,
533 underline: bool,
534 severity: DiagnosticSeverity,
535 is_unnecessary: bool,
536}
537
538/// A class of characters, used for characterizing a run of text.
539#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
540pub enum CharKind {
541 /// Whitespace.
542 Whitespace,
543 /// Punctuation.
544 Punctuation,
545 /// Word.
546 Word,
547}
548
549/// Context for character classification within a specific scope.
550#[derive(Copy, Clone, Eq, PartialEq, Debug)]
551pub enum CharScopeContext {
552 /// Character classification for completion queries.
553 ///
554 /// This context treats certain characters as word constituents that would
555 /// normally be considered punctuation, such as '-' in Tailwind classes
556 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
557 Completion,
558 /// Character classification for linked edits.
559 ///
560 /// This context handles characters that should be treated as part of
561 /// identifiers during linked editing operations, such as '.' in JSX
562 /// component names like `<Animated.View>`.
563 LinkedEdit,
564}
565
566/// A runnable is a set of data about a region that could be resolved into a task
567pub struct Runnable {
568 pub tags: SmallVec<[RunnableTag; 1]>,
569 pub language: Arc<Language>,
570 pub buffer: BufferId,
571}
572
573#[derive(Default, Clone, Debug)]
574pub struct HighlightedText {
575 pub text: SharedString,
576 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
577}
578
579#[derive(Default, Debug)]
580struct HighlightedTextBuilder {
581 pub text: String,
582 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
583}
584
585impl HighlightedText {
586 pub fn from_buffer_range<T: ToOffset>(
587 range: Range<T>,
588 snapshot: &text::BufferSnapshot,
589 syntax_snapshot: &SyntaxSnapshot,
590 override_style: Option<HighlightStyle>,
591 syntax_theme: &SyntaxTheme,
592 ) -> Self {
593 let mut highlighted_text = HighlightedTextBuilder::default();
594 highlighted_text.add_text_from_buffer_range(
595 range,
596 snapshot,
597 syntax_snapshot,
598 override_style,
599 syntax_theme,
600 );
601 highlighted_text.build()
602 }
603
604 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
605 gpui::StyledText::new(self.text.clone())
606 .with_default_highlights(default_style, self.highlights.iter().cloned())
607 }
608
609 /// Returns the first line without leading whitespace unless highlighted
610 /// and a boolean indicating if there are more lines after
611 pub fn first_line_preview(self) -> (Self, bool) {
612 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
613 let first_line = &self.text[..newline_ix];
614
615 // Trim leading whitespace, unless an edit starts prior to it.
616 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
617 if let Some((first_highlight_range, _)) = self.highlights.first() {
618 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
619 }
620
621 let preview_text = &first_line[preview_start_ix..];
622 let preview_highlights = self
623 .highlights
624 .into_iter()
625 .take_while(|(range, _)| range.start < newline_ix)
626 .filter_map(|(mut range, highlight)| {
627 range.start = range.start.saturating_sub(preview_start_ix);
628 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
629 if range.is_empty() {
630 None
631 } else {
632 Some((range, highlight))
633 }
634 });
635
636 let preview = Self {
637 text: SharedString::new(preview_text),
638 highlights: preview_highlights.collect(),
639 };
640
641 (preview, self.text.len() > newline_ix)
642 }
643}
644
645impl HighlightedTextBuilder {
646 pub fn build(self) -> HighlightedText {
647 HighlightedText {
648 text: self.text.into(),
649 highlights: self.highlights,
650 }
651 }
652
653 pub fn add_text_from_buffer_range<T: ToOffset>(
654 &mut self,
655 range: Range<T>,
656 snapshot: &text::BufferSnapshot,
657 syntax_snapshot: &SyntaxSnapshot,
658 override_style: Option<HighlightStyle>,
659 syntax_theme: &SyntaxTheme,
660 ) {
661 let range = range.to_offset(snapshot);
662 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
663 let start = self.text.len();
664 self.text.push_str(chunk.text);
665 let end = self.text.len();
666
667 if let Some(highlight_style) = chunk
668 .syntax_highlight_id
669 .and_then(|id| id.style(syntax_theme))
670 {
671 let highlight_style = override_style.map_or(highlight_style, |override_style| {
672 highlight_style.highlight(override_style)
673 });
674 self.highlights.push((start..end, highlight_style));
675 } else if let Some(override_style) = override_style {
676 self.highlights.push((start..end, override_style));
677 }
678 }
679 }
680
681 fn highlighted_chunks<'a>(
682 range: Range<usize>,
683 snapshot: &'a text::BufferSnapshot,
684 syntax_snapshot: &'a SyntaxSnapshot,
685 ) -> BufferChunks<'a> {
686 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
687 grammar
688 .highlights_config
689 .as_ref()
690 .map(|config| &config.query)
691 });
692
693 let highlight_maps = captures
694 .grammars()
695 .iter()
696 .map(|grammar| grammar.highlight_map())
697 .collect();
698
699 BufferChunks::new(
700 snapshot.as_rope(),
701 range,
702 Some((captures, highlight_maps)),
703 false,
704 None,
705 )
706 }
707}
708
709#[derive(Clone)]
710pub struct EditPreview {
711 old_snapshot: text::BufferSnapshot,
712 applied_edits_snapshot: text::BufferSnapshot,
713 syntax_snapshot: SyntaxSnapshot,
714}
715
716impl EditPreview {
717 pub fn highlight_edits(
718 &self,
719 current_snapshot: &BufferSnapshot,
720 edits: &[(Range<Anchor>, String)],
721 include_deletions: bool,
722 cx: &App,
723 ) -> HighlightedText {
724 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
725 return HighlightedText::default();
726 };
727
728 let mut highlighted_text = HighlightedTextBuilder::default();
729
730 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
731
732 let insertion_highlight_style = HighlightStyle {
733 background_color: Some(cx.theme().status().created_background),
734 ..Default::default()
735 };
736 let deletion_highlight_style = HighlightStyle {
737 background_color: Some(cx.theme().status().deleted_background),
738 ..Default::default()
739 };
740 let syntax_theme = cx.theme().syntax();
741
742 for (range, edit_text) in edits {
743 let edit_new_end_in_preview_snapshot = range
744 .end
745 .bias_right(&self.old_snapshot)
746 .to_offset(&self.applied_edits_snapshot);
747 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
748
749 let unchanged_range_in_preview_snapshot =
750 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
751 if !unchanged_range_in_preview_snapshot.is_empty() {
752 highlighted_text.add_text_from_buffer_range(
753 unchanged_range_in_preview_snapshot,
754 &self.applied_edits_snapshot,
755 &self.syntax_snapshot,
756 None,
757 syntax_theme,
758 );
759 }
760
761 let range_in_current_snapshot = range.to_offset(current_snapshot);
762 if include_deletions && !range_in_current_snapshot.is_empty() {
763 highlighted_text.add_text_from_buffer_range(
764 range_in_current_snapshot,
765 ¤t_snapshot.text,
766 ¤t_snapshot.syntax,
767 Some(deletion_highlight_style),
768 syntax_theme,
769 );
770 }
771
772 if !edit_text.is_empty() {
773 highlighted_text.add_text_from_buffer_range(
774 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
775 &self.applied_edits_snapshot,
776 &self.syntax_snapshot,
777 Some(insertion_highlight_style),
778 syntax_theme,
779 );
780 }
781
782 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
783 }
784
785 highlighted_text.add_text_from_buffer_range(
786 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
787 &self.applied_edits_snapshot,
788 &self.syntax_snapshot,
789 None,
790 syntax_theme,
791 );
792
793 highlighted_text.build()
794 }
795
796 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
797 let (first, _) = edits.first()?;
798 let (last, _) = edits.last()?;
799
800 let start = first
801 .start
802 .bias_left(&self.old_snapshot)
803 .to_point(&self.applied_edits_snapshot);
804 let end = last
805 .end
806 .bias_right(&self.old_snapshot)
807 .to_point(&self.applied_edits_snapshot);
808
809 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
810 let range = Point::new(start.row, 0)
811 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
812
813 Some(range.to_offset(&self.applied_edits_snapshot))
814 }
815}
816
817#[derive(Clone, Debug, PartialEq, Eq)]
818pub struct BracketMatch {
819 pub open_range: Range<usize>,
820 pub close_range: Range<usize>,
821 pub newline_only: bool,
822}
823
824impl Buffer {
825 /// Create a new buffer with the given base text.
826 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
827 Self::build(
828 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
829 None,
830 Capability::ReadWrite,
831 )
832 }
833
834 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
835 pub fn local_normalized(
836 base_text_normalized: Rope,
837 line_ending: LineEnding,
838 cx: &Context<Self>,
839 ) -> Self {
840 Self::build(
841 TextBuffer::new_normalized(
842 0,
843 cx.entity_id().as_non_zero_u64().into(),
844 line_ending,
845 base_text_normalized,
846 ),
847 None,
848 Capability::ReadWrite,
849 )
850 }
851
852 /// Create a new buffer that is a replica of a remote buffer.
853 pub fn remote(
854 remote_id: BufferId,
855 replica_id: ReplicaId,
856 capability: Capability,
857 base_text: impl Into<String>,
858 ) -> Self {
859 Self::build(
860 TextBuffer::new(replica_id, remote_id, base_text.into()),
861 None,
862 capability,
863 )
864 }
865
866 /// Create a new buffer that is a replica of a remote buffer, populating its
867 /// state from the given protobuf message.
868 pub fn from_proto(
869 replica_id: ReplicaId,
870 capability: Capability,
871 message: proto::BufferState,
872 file: Option<Arc<dyn File>>,
873 ) -> Result<Self> {
874 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
875 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
876 let mut this = Self::build(buffer, file, capability);
877 this.text.set_line_ending(proto::deserialize_line_ending(
878 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
879 ));
880 this.saved_version = proto::deserialize_version(&message.saved_version);
881 this.saved_mtime = message.saved_mtime.map(|time| time.into());
882 Ok(this)
883 }
884
885 /// Serialize the buffer's state to a protobuf message.
886 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
887 proto::BufferState {
888 id: self.remote_id().into(),
889 file: self.file.as_ref().map(|f| f.to_proto(cx)),
890 base_text: self.base_text().to_string(),
891 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
892 saved_version: proto::serialize_version(&self.saved_version),
893 saved_mtime: self.saved_mtime.map(|time| time.into()),
894 }
895 }
896
897 /// Serialize as protobufs all of the changes to the buffer since the given version.
898 pub fn serialize_ops(
899 &self,
900 since: Option<clock::Global>,
901 cx: &App,
902 ) -> Task<Vec<proto::Operation>> {
903 let mut operations = Vec::new();
904 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
905
906 operations.extend(self.remote_selections.iter().map(|(_, set)| {
907 proto::serialize_operation(&Operation::UpdateSelections {
908 selections: set.selections.clone(),
909 lamport_timestamp: set.lamport_timestamp,
910 line_mode: set.line_mode,
911 cursor_shape: set.cursor_shape,
912 })
913 }));
914
915 for (server_id, diagnostics) in &self.diagnostics {
916 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
917 lamport_timestamp: self.diagnostics_timestamp,
918 server_id: *server_id,
919 diagnostics: diagnostics.iter().cloned().collect(),
920 }));
921 }
922
923 for (server_id, completions) in &self.completion_triggers_per_language_server {
924 operations.push(proto::serialize_operation(
925 &Operation::UpdateCompletionTriggers {
926 triggers: completions.iter().cloned().collect(),
927 lamport_timestamp: self.completion_triggers_timestamp,
928 server_id: *server_id,
929 },
930 ));
931 }
932
933 let text_operations = self.text.operations().clone();
934 cx.background_spawn(async move {
935 let since = since.unwrap_or_default();
936 operations.extend(
937 text_operations
938 .iter()
939 .filter(|(_, op)| !since.observed(op.timestamp()))
940 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
941 );
942 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
943 operations
944 })
945 }
946
947 /// Assign a language to the buffer, returning the buffer.
948 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
949 self.set_language(Some(language), cx);
950 self
951 }
952
953 /// Returns the [`Capability`] of this buffer.
954 pub fn capability(&self) -> Capability {
955 self.capability
956 }
957
958 /// Whether this buffer can only be read.
959 pub fn read_only(&self) -> bool {
960 self.capability == Capability::ReadOnly
961 }
962
963 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
964 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
965 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
966 let snapshot = buffer.snapshot();
967 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
968 Self {
969 saved_mtime,
970 saved_version: buffer.version(),
971 preview_version: buffer.version(),
972 reload_task: None,
973 transaction_depth: 0,
974 was_dirty_before_starting_transaction: None,
975 has_unsaved_edits: Cell::new((buffer.version(), false)),
976 text: buffer,
977 branch_state: None,
978 file,
979 capability,
980 syntax_map,
981 reparse: None,
982 non_text_state_update_count: 0,
983 sync_parse_timeout: Duration::from_millis(1),
984 parse_status: watch::channel(ParseStatus::Idle),
985 autoindent_requests: Default::default(),
986 wait_for_autoindent_txs: Default::default(),
987 pending_autoindent: Default::default(),
988 language: None,
989 remote_selections: Default::default(),
990 diagnostics: Default::default(),
991 diagnostics_timestamp: Default::default(),
992 completion_triggers: Default::default(),
993 completion_triggers_per_language_server: Default::default(),
994 completion_triggers_timestamp: Default::default(),
995 deferred_ops: OperationQueue::new(),
996 has_conflict: false,
997 change_bits: Default::default(),
998 _subscriptions: Vec::new(),
999 }
1000 }
1001
1002 pub fn build_snapshot(
1003 text: Rope,
1004 language: Option<Arc<Language>>,
1005 language_registry: Option<Arc<LanguageRegistry>>,
1006 cx: &mut App,
1007 ) -> impl Future<Output = BufferSnapshot> + use<> {
1008 let entity_id = cx.reserve_entity::<Self>().entity_id();
1009 let buffer_id = entity_id.as_non_zero_u64().into();
1010 async move {
1011 let text =
1012 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1013 let mut syntax = SyntaxMap::new(&text).snapshot();
1014 if let Some(language) = language.clone() {
1015 let language_registry = language_registry.clone();
1016 syntax.reparse(&text, language_registry, language);
1017 }
1018 BufferSnapshot {
1019 text,
1020 syntax,
1021 file: None,
1022 diagnostics: Default::default(),
1023 remote_selections: Default::default(),
1024 language,
1025 non_text_state_update_count: 0,
1026 }
1027 }
1028 }
1029
1030 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1031 let entity_id = cx.reserve_entity::<Self>().entity_id();
1032 let buffer_id = entity_id.as_non_zero_u64().into();
1033 let text =
1034 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1035 let syntax = SyntaxMap::new(&text).snapshot();
1036 BufferSnapshot {
1037 text,
1038 syntax,
1039 file: None,
1040 diagnostics: Default::default(),
1041 remote_selections: Default::default(),
1042 language: None,
1043 non_text_state_update_count: 0,
1044 }
1045 }
1046
1047 #[cfg(any(test, feature = "test-support"))]
1048 pub fn build_snapshot_sync(
1049 text: Rope,
1050 language: Option<Arc<Language>>,
1051 language_registry: Option<Arc<LanguageRegistry>>,
1052 cx: &mut App,
1053 ) -> BufferSnapshot {
1054 let entity_id = cx.reserve_entity::<Self>().entity_id();
1055 let buffer_id = entity_id.as_non_zero_u64().into();
1056 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1057 let mut syntax = SyntaxMap::new(&text).snapshot();
1058 if let Some(language) = language.clone() {
1059 syntax.reparse(&text, language_registry, language);
1060 }
1061 BufferSnapshot {
1062 text,
1063 syntax,
1064 file: None,
1065 diagnostics: Default::default(),
1066 remote_selections: Default::default(),
1067 language,
1068 non_text_state_update_count: 0,
1069 }
1070 }
1071
1072 /// Retrieve a snapshot of the buffer's current state. This is computationally
1073 /// cheap, and allows reading from the buffer on a background thread.
1074 pub fn snapshot(&self) -> BufferSnapshot {
1075 let text = self.text.snapshot();
1076 let mut syntax_map = self.syntax_map.lock();
1077 syntax_map.interpolate(&text);
1078 let syntax = syntax_map.snapshot();
1079
1080 BufferSnapshot {
1081 text,
1082 syntax,
1083 file: self.file.clone(),
1084 remote_selections: self.remote_selections.clone(),
1085 diagnostics: self.diagnostics.clone(),
1086 language: self.language.clone(),
1087 non_text_state_update_count: self.non_text_state_update_count,
1088 }
1089 }
1090
1091 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1092 let this = cx.entity();
1093 cx.new(|cx| {
1094 let mut branch = Self {
1095 branch_state: Some(BufferBranchState {
1096 base_buffer: this.clone(),
1097 merged_operations: Default::default(),
1098 }),
1099 language: self.language.clone(),
1100 has_conflict: self.has_conflict,
1101 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1102 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1103 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1104 };
1105 if let Some(language_registry) = self.language_registry() {
1106 branch.set_language_registry(language_registry);
1107 }
1108
1109 // Reparse the branch buffer so that we get syntax highlighting immediately.
1110 branch.reparse(cx);
1111
1112 branch
1113 })
1114 }
1115
1116 pub fn preview_edits(
1117 &self,
1118 edits: Arc<[(Range<Anchor>, String)]>,
1119 cx: &App,
1120 ) -> Task<EditPreview> {
1121 let registry = self.language_registry();
1122 let language = self.language().cloned();
1123 let old_snapshot = self.text.snapshot();
1124 let mut branch_buffer = self.text.branch();
1125 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1126 cx.background_spawn(async move {
1127 if !edits.is_empty() {
1128 if let Some(language) = language.clone() {
1129 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1130 }
1131
1132 branch_buffer.edit(edits.iter().cloned());
1133 let snapshot = branch_buffer.snapshot();
1134 syntax_snapshot.interpolate(&snapshot);
1135
1136 if let Some(language) = language {
1137 syntax_snapshot.reparse(&snapshot, registry, language);
1138 }
1139 }
1140 EditPreview {
1141 old_snapshot,
1142 applied_edits_snapshot: branch_buffer.snapshot(),
1143 syntax_snapshot,
1144 }
1145 })
1146 }
1147
1148 /// Applies all of the changes in this buffer that intersect any of the
1149 /// given `ranges` to its base buffer.
1150 ///
1151 /// If `ranges` is empty, then all changes will be applied. This buffer must
1152 /// be a branch buffer to call this method.
1153 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1154 let Some(base_buffer) = self.base_buffer() else {
1155 debug_panic!("not a branch buffer");
1156 return;
1157 };
1158
1159 let mut ranges = if ranges.is_empty() {
1160 &[0..usize::MAX]
1161 } else {
1162 ranges.as_slice()
1163 }
1164 .iter()
1165 .peekable();
1166
1167 let mut edits = Vec::new();
1168 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1169 let mut is_included = false;
1170 while let Some(range) = ranges.peek() {
1171 if range.end < edit.new.start {
1172 ranges.next().unwrap();
1173 } else {
1174 if range.start <= edit.new.end {
1175 is_included = true;
1176 }
1177 break;
1178 }
1179 }
1180
1181 if is_included {
1182 edits.push((
1183 edit.old.clone(),
1184 self.text_for_range(edit.new.clone()).collect::<String>(),
1185 ));
1186 }
1187 }
1188
1189 let operation = base_buffer.update(cx, |base_buffer, cx| {
1190 // cx.emit(BufferEvent::DiffBaseChanged);
1191 base_buffer.edit(edits, None, cx)
1192 });
1193
1194 if let Some(operation) = operation
1195 && let Some(BufferBranchState {
1196 merged_operations, ..
1197 }) = &mut self.branch_state
1198 {
1199 merged_operations.push(operation);
1200 }
1201 }
1202
1203 fn on_base_buffer_event(
1204 &mut self,
1205 _: Entity<Buffer>,
1206 event: &BufferEvent,
1207 cx: &mut Context<Self>,
1208 ) {
1209 let BufferEvent::Operation { operation, .. } = event else {
1210 return;
1211 };
1212 let Some(BufferBranchState {
1213 merged_operations, ..
1214 }) = &mut self.branch_state
1215 else {
1216 return;
1217 };
1218
1219 let mut operation_to_undo = None;
1220 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1221 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1222 {
1223 merged_operations.remove(ix);
1224 operation_to_undo = Some(operation.timestamp);
1225 }
1226
1227 self.apply_ops([operation.clone()], cx);
1228
1229 if let Some(timestamp) = operation_to_undo {
1230 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1231 self.undo_operations(counts, cx);
1232 }
1233 }
1234
1235 #[cfg(test)]
1236 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1237 &self.text
1238 }
1239
1240 /// Retrieve a snapshot of the buffer's raw text, without any
1241 /// language-related state like the syntax tree or diagnostics.
1242 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1243 self.text.snapshot()
1244 }
1245
1246 /// The file associated with the buffer, if any.
1247 pub fn file(&self) -> Option<&Arc<dyn File>> {
1248 self.file.as_ref()
1249 }
1250
1251 /// The version of the buffer that was last saved or reloaded from disk.
1252 pub fn saved_version(&self) -> &clock::Global {
1253 &self.saved_version
1254 }
1255
1256 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1257 pub fn saved_mtime(&self) -> Option<MTime> {
1258 self.saved_mtime
1259 }
1260
1261 /// Assign a language to the buffer.
1262 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1263 self.non_text_state_update_count += 1;
1264 self.syntax_map.lock().clear(&self.text);
1265 self.language = language;
1266 self.was_changed();
1267 self.reparse(cx);
1268 cx.emit(BufferEvent::LanguageChanged);
1269 }
1270
1271 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1272 /// other languages if parts of the buffer are written in different languages.
1273 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1274 self.syntax_map
1275 .lock()
1276 .set_language_registry(language_registry);
1277 }
1278
1279 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1280 self.syntax_map.lock().language_registry()
1281 }
1282
1283 /// Assign the line ending type to the buffer.
1284 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1285 self.text.set_line_ending(line_ending);
1286
1287 let lamport_timestamp = self.text.lamport_clock.tick();
1288 self.send_operation(
1289 Operation::UpdateLineEnding {
1290 line_ending,
1291 lamport_timestamp,
1292 },
1293 true,
1294 cx,
1295 );
1296 }
1297
1298 /// Assign the buffer a new [`Capability`].
1299 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1300 if self.capability != capability {
1301 self.capability = capability;
1302 cx.emit(BufferEvent::CapabilityChanged)
1303 }
1304 }
1305
1306 /// This method is called to signal that the buffer has been saved.
1307 pub fn did_save(
1308 &mut self,
1309 version: clock::Global,
1310 mtime: Option<MTime>,
1311 cx: &mut Context<Self>,
1312 ) {
1313 self.saved_version = version;
1314 self.has_unsaved_edits
1315 .set((self.saved_version().clone(), false));
1316 self.has_conflict = false;
1317 self.saved_mtime = mtime;
1318 self.was_changed();
1319 cx.emit(BufferEvent::Saved);
1320 cx.notify();
1321 }
1322
1323 /// Reloads the contents of the buffer from disk.
1324 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1325 let (tx, rx) = futures::channel::oneshot::channel();
1326 let prev_version = self.text.version();
1327 self.reload_task = Some(cx.spawn(async move |this, cx| {
1328 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1329 let file = this.file.as_ref()?.as_local()?;
1330
1331 Some((file.disk_state().mtime(), file.load(cx)))
1332 })?
1333 else {
1334 return Ok(());
1335 };
1336
1337 let new_text = new_text.await?;
1338 let diff = this
1339 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1340 .await;
1341 this.update(cx, |this, cx| {
1342 if this.version() == diff.base_version {
1343 this.finalize_last_transaction();
1344 this.apply_diff(diff, cx);
1345 tx.send(this.finalize_last_transaction().cloned()).ok();
1346 this.has_conflict = false;
1347 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1348 } else {
1349 if !diff.edits.is_empty()
1350 || this
1351 .edits_since::<usize>(&diff.base_version)
1352 .next()
1353 .is_some()
1354 {
1355 this.has_conflict = true;
1356 }
1357
1358 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1359 }
1360
1361 this.reload_task.take();
1362 })
1363 }));
1364 rx
1365 }
1366
1367 /// This method is called to signal that the buffer has been reloaded.
1368 pub fn did_reload(
1369 &mut self,
1370 version: clock::Global,
1371 line_ending: LineEnding,
1372 mtime: Option<MTime>,
1373 cx: &mut Context<Self>,
1374 ) {
1375 self.saved_version = version;
1376 self.has_unsaved_edits
1377 .set((self.saved_version.clone(), false));
1378 self.text.set_line_ending(line_ending);
1379 self.saved_mtime = mtime;
1380 cx.emit(BufferEvent::Reloaded);
1381 cx.notify();
1382 }
1383
1384 /// Updates the [`File`] backing this buffer. This should be called when
1385 /// the file has changed or has been deleted.
1386 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1387 let was_dirty = self.is_dirty();
1388 let mut file_changed = false;
1389
1390 if let Some(old_file) = self.file.as_ref() {
1391 if new_file.path() != old_file.path() {
1392 file_changed = true;
1393 }
1394
1395 let old_state = old_file.disk_state();
1396 let new_state = new_file.disk_state();
1397 if old_state != new_state {
1398 file_changed = true;
1399 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1400 cx.emit(BufferEvent::ReloadNeeded)
1401 }
1402 }
1403 } else {
1404 file_changed = true;
1405 };
1406
1407 self.file = Some(new_file);
1408 if file_changed {
1409 self.was_changed();
1410 self.non_text_state_update_count += 1;
1411 if was_dirty != self.is_dirty() {
1412 cx.emit(BufferEvent::DirtyChanged);
1413 }
1414 cx.emit(BufferEvent::FileHandleChanged);
1415 cx.notify();
1416 }
1417 }
1418
1419 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1420 Some(self.branch_state.as_ref()?.base_buffer.clone())
1421 }
1422
1423 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1424 pub fn language(&self) -> Option<&Arc<Language>> {
1425 self.language.as_ref()
1426 }
1427
1428 /// Returns the [`Language`] at the given location.
1429 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1430 let offset = position.to_offset(self);
1431 let mut is_first = true;
1432 let start_anchor = self.anchor_before(offset);
1433 let end_anchor = self.anchor_after(offset);
1434 self.syntax_map
1435 .lock()
1436 .layers_for_range(offset..offset, &self.text, false)
1437 .filter(|layer| {
1438 if is_first {
1439 is_first = false;
1440 return true;
1441 }
1442
1443 layer
1444 .included_sub_ranges
1445 .map(|sub_ranges| {
1446 sub_ranges.iter().any(|sub_range| {
1447 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1448 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1449 !is_before_start && !is_after_end
1450 })
1451 })
1452 .unwrap_or(true)
1453 })
1454 .last()
1455 .map(|info| info.language.clone())
1456 .or_else(|| self.language.clone())
1457 }
1458
1459 /// Returns each [`Language`] for the active syntax layers at the given location.
1460 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1461 let offset = position.to_offset(self);
1462 let mut languages: Vec<Arc<Language>> = self
1463 .syntax_map
1464 .lock()
1465 .layers_for_range(offset..offset, &self.text, false)
1466 .map(|info| info.language.clone())
1467 .collect();
1468
1469 if languages.is_empty()
1470 && let Some(buffer_language) = self.language()
1471 {
1472 languages.push(buffer_language.clone());
1473 }
1474
1475 languages
1476 }
1477
1478 /// An integer version number that accounts for all updates besides
1479 /// the buffer's text itself (which is versioned via a version vector).
1480 pub fn non_text_state_update_count(&self) -> usize {
1481 self.non_text_state_update_count
1482 }
1483
1484 /// Whether the buffer is being parsed in the background.
1485 #[cfg(any(test, feature = "test-support"))]
1486 pub fn is_parsing(&self) -> bool {
1487 self.reparse.is_some()
1488 }
1489
1490 /// Indicates whether the buffer contains any regions that may be
1491 /// written in a language that hasn't been loaded yet.
1492 pub fn contains_unknown_injections(&self) -> bool {
1493 self.syntax_map.lock().contains_unknown_injections()
1494 }
1495
1496 #[cfg(any(test, feature = "test-support"))]
1497 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1498 self.sync_parse_timeout = timeout;
1499 }
1500
1501 /// Called after an edit to synchronize the buffer's main parse tree with
1502 /// the buffer's new underlying state.
1503 ///
1504 /// Locks the syntax map and interpolates the edits since the last reparse
1505 /// into the foreground syntax tree.
1506 ///
1507 /// Then takes a stable snapshot of the syntax map before unlocking it.
1508 /// The snapshot with the interpolated edits is sent to a background thread,
1509 /// where we ask Tree-sitter to perform an incremental parse.
1510 ///
1511 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1512 /// waiting on the parse to complete. As soon as it completes, we proceed
1513 /// synchronously, unless a 1ms timeout elapses.
1514 ///
1515 /// If we time out waiting on the parse, we spawn a second task waiting
1516 /// until the parse does complete and return with the interpolated tree still
1517 /// in the foreground. When the background parse completes, call back into
1518 /// the main thread and assign the foreground parse state.
1519 ///
1520 /// If the buffer or grammar changed since the start of the background parse,
1521 /// initiate an additional reparse recursively. To avoid concurrent parses
1522 /// for the same buffer, we only initiate a new parse if we are not already
1523 /// parsing in the background.
1524 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1525 if self.reparse.is_some() {
1526 return;
1527 }
1528 let language = if let Some(language) = self.language.clone() {
1529 language
1530 } else {
1531 return;
1532 };
1533
1534 let text = self.text_snapshot();
1535 let parsed_version = self.version();
1536
1537 let mut syntax_map = self.syntax_map.lock();
1538 syntax_map.interpolate(&text);
1539 let language_registry = syntax_map.language_registry();
1540 let mut syntax_snapshot = syntax_map.snapshot();
1541 drop(syntax_map);
1542
1543 let parse_task = cx.background_spawn({
1544 let language = language.clone();
1545 let language_registry = language_registry.clone();
1546 async move {
1547 syntax_snapshot.reparse(&text, language_registry, language);
1548 syntax_snapshot
1549 }
1550 });
1551
1552 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1553 match cx
1554 .background_executor()
1555 .block_with_timeout(self.sync_parse_timeout, parse_task)
1556 {
1557 Ok(new_syntax_snapshot) => {
1558 self.did_finish_parsing(new_syntax_snapshot, cx);
1559 self.reparse = None;
1560 }
1561 Err(parse_task) => {
1562 self.reparse = Some(cx.spawn(async move |this, cx| {
1563 let new_syntax_map = parse_task.await;
1564 this.update(cx, move |this, cx| {
1565 let grammar_changed =
1566 this.language.as_ref().is_none_or(|current_language| {
1567 !Arc::ptr_eq(&language, current_language)
1568 });
1569 let language_registry_changed = new_syntax_map
1570 .contains_unknown_injections()
1571 && language_registry.is_some_and(|registry| {
1572 registry.version() != new_syntax_map.language_registry_version()
1573 });
1574 let parse_again = language_registry_changed
1575 || grammar_changed
1576 || this.version.changed_since(&parsed_version);
1577 this.did_finish_parsing(new_syntax_map, cx);
1578 this.reparse = None;
1579 if parse_again {
1580 this.reparse(cx);
1581 }
1582 })
1583 .ok();
1584 }));
1585 }
1586 }
1587 }
1588
1589 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1590 self.was_changed();
1591 self.non_text_state_update_count += 1;
1592 self.syntax_map.lock().did_parse(syntax_snapshot);
1593 self.request_autoindent(cx);
1594 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1595 cx.emit(BufferEvent::Reparsed);
1596 cx.notify();
1597 }
1598
1599 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1600 self.parse_status.1.clone()
1601 }
1602
1603 /// Assign to the buffer a set of diagnostics created by a given language server.
1604 pub fn update_diagnostics(
1605 &mut self,
1606 server_id: LanguageServerId,
1607 diagnostics: DiagnosticSet,
1608 cx: &mut Context<Self>,
1609 ) {
1610 let lamport_timestamp = self.text.lamport_clock.tick();
1611 let op = Operation::UpdateDiagnostics {
1612 server_id,
1613 diagnostics: diagnostics.iter().cloned().collect(),
1614 lamport_timestamp,
1615 };
1616
1617 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1618 self.send_operation(op, true, cx);
1619 }
1620
1621 pub fn buffer_diagnostics(
1622 &self,
1623 for_server: Option<LanguageServerId>,
1624 ) -> Vec<&DiagnosticEntry<Anchor>> {
1625 match for_server {
1626 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1627 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1628 Err(_) => Vec::new(),
1629 },
1630 None => self
1631 .diagnostics
1632 .iter()
1633 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1634 .collect(),
1635 }
1636 }
1637
1638 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1639 if let Some(indent_sizes) = self.compute_autoindents() {
1640 let indent_sizes = cx.background_spawn(indent_sizes);
1641 match cx
1642 .background_executor()
1643 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1644 {
1645 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1646 Err(indent_sizes) => {
1647 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1648 let indent_sizes = indent_sizes.await;
1649 this.update(cx, |this, cx| {
1650 this.apply_autoindents(indent_sizes, cx);
1651 })
1652 .ok();
1653 }));
1654 }
1655 }
1656 } else {
1657 self.autoindent_requests.clear();
1658 for tx in self.wait_for_autoindent_txs.drain(..) {
1659 tx.send(()).ok();
1660 }
1661 }
1662 }
1663
1664 fn compute_autoindents(
1665 &self,
1666 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1667 let max_rows_between_yields = 100;
1668 let snapshot = self.snapshot();
1669 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1670 return None;
1671 }
1672
1673 let autoindent_requests = self.autoindent_requests.clone();
1674 Some(async move {
1675 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1676 for request in autoindent_requests {
1677 // Resolve each edited range to its row in the current buffer and in the
1678 // buffer before this batch of edits.
1679 let mut row_ranges = Vec::new();
1680 let mut old_to_new_rows = BTreeMap::new();
1681 let mut language_indent_sizes_by_new_row = Vec::new();
1682 for entry in &request.entries {
1683 let position = entry.range.start;
1684 let new_row = position.to_point(&snapshot).row;
1685 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1686 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1687
1688 if !entry.first_line_is_new {
1689 let old_row = position.to_point(&request.before_edit).row;
1690 old_to_new_rows.insert(old_row, new_row);
1691 }
1692 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1693 }
1694
1695 // Build a map containing the suggested indentation for each of the edited lines
1696 // with respect to the state of the buffer before these edits. This map is keyed
1697 // by the rows for these lines in the current state of the buffer.
1698 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1699 let old_edited_ranges =
1700 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1701 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1702 let mut language_indent_size = IndentSize::default();
1703 for old_edited_range in old_edited_ranges {
1704 let suggestions = request
1705 .before_edit
1706 .suggest_autoindents(old_edited_range.clone())
1707 .into_iter()
1708 .flatten();
1709 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1710 if let Some(suggestion) = suggestion {
1711 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1712
1713 // Find the indent size based on the language for this row.
1714 while let Some((row, size)) = language_indent_sizes.peek() {
1715 if *row > new_row {
1716 break;
1717 }
1718 language_indent_size = *size;
1719 language_indent_sizes.next();
1720 }
1721
1722 let suggested_indent = old_to_new_rows
1723 .get(&suggestion.basis_row)
1724 .and_then(|from_row| {
1725 Some(old_suggestions.get(from_row).copied()?.0)
1726 })
1727 .unwrap_or_else(|| {
1728 request
1729 .before_edit
1730 .indent_size_for_line(suggestion.basis_row)
1731 })
1732 .with_delta(suggestion.delta, language_indent_size);
1733 old_suggestions
1734 .insert(new_row, (suggested_indent, suggestion.within_error));
1735 }
1736 }
1737 yield_now().await;
1738 }
1739
1740 // Compute new suggestions for each line, but only include them in the result
1741 // if they differ from the old suggestion for that line.
1742 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1743 let mut language_indent_size = IndentSize::default();
1744 for (row_range, original_indent_column) in row_ranges {
1745 let new_edited_row_range = if request.is_block_mode {
1746 row_range.start..row_range.start + 1
1747 } else {
1748 row_range.clone()
1749 };
1750
1751 let suggestions = snapshot
1752 .suggest_autoindents(new_edited_row_range.clone())
1753 .into_iter()
1754 .flatten();
1755 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1756 if let Some(suggestion) = suggestion {
1757 // Find the indent size based on the language for this row.
1758 while let Some((row, size)) = language_indent_sizes.peek() {
1759 if *row > new_row {
1760 break;
1761 }
1762 language_indent_size = *size;
1763 language_indent_sizes.next();
1764 }
1765
1766 let suggested_indent = indent_sizes
1767 .get(&suggestion.basis_row)
1768 .copied()
1769 .map(|e| e.0)
1770 .unwrap_or_else(|| {
1771 snapshot.indent_size_for_line(suggestion.basis_row)
1772 })
1773 .with_delta(suggestion.delta, language_indent_size);
1774
1775 if old_suggestions.get(&new_row).is_none_or(
1776 |(old_indentation, was_within_error)| {
1777 suggested_indent != *old_indentation
1778 && (!suggestion.within_error || *was_within_error)
1779 },
1780 ) {
1781 indent_sizes.insert(
1782 new_row,
1783 (suggested_indent, request.ignore_empty_lines),
1784 );
1785 }
1786 }
1787 }
1788
1789 if let (true, Some(original_indent_column)) =
1790 (request.is_block_mode, original_indent_column)
1791 {
1792 let new_indent =
1793 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1794 *indent
1795 } else {
1796 snapshot.indent_size_for_line(row_range.start)
1797 };
1798 let delta = new_indent.len as i64 - original_indent_column as i64;
1799 if delta != 0 {
1800 for row in row_range.skip(1) {
1801 indent_sizes.entry(row).or_insert_with(|| {
1802 let mut size = snapshot.indent_size_for_line(row);
1803 if size.kind == new_indent.kind {
1804 match delta.cmp(&0) {
1805 Ordering::Greater => size.len += delta as u32,
1806 Ordering::Less => {
1807 size.len = size.len.saturating_sub(-delta as u32)
1808 }
1809 Ordering::Equal => {}
1810 }
1811 }
1812 (size, request.ignore_empty_lines)
1813 });
1814 }
1815 }
1816 }
1817
1818 yield_now().await;
1819 }
1820 }
1821
1822 indent_sizes
1823 .into_iter()
1824 .filter_map(|(row, (indent, ignore_empty_lines))| {
1825 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1826 None
1827 } else {
1828 Some((row, indent))
1829 }
1830 })
1831 .collect()
1832 })
1833 }
1834
1835 fn apply_autoindents(
1836 &mut self,
1837 indent_sizes: BTreeMap<u32, IndentSize>,
1838 cx: &mut Context<Self>,
1839 ) {
1840 self.autoindent_requests.clear();
1841 for tx in self.wait_for_autoindent_txs.drain(..) {
1842 tx.send(()).ok();
1843 }
1844
1845 let edits: Vec<_> = indent_sizes
1846 .into_iter()
1847 .filter_map(|(row, indent_size)| {
1848 let current_size = indent_size_for_line(self, row);
1849 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1850 })
1851 .collect();
1852
1853 let preserve_preview = self.preserve_preview();
1854 self.edit(edits, None, cx);
1855 if preserve_preview {
1856 self.refresh_preview();
1857 }
1858 }
1859
1860 /// Create a minimal edit that will cause the given row to be indented
1861 /// with the given size. After applying this edit, the length of the line
1862 /// will always be at least `new_size.len`.
1863 pub fn edit_for_indent_size_adjustment(
1864 row: u32,
1865 current_size: IndentSize,
1866 new_size: IndentSize,
1867 ) -> Option<(Range<Point>, String)> {
1868 if new_size.kind == current_size.kind {
1869 match new_size.len.cmp(¤t_size.len) {
1870 Ordering::Greater => {
1871 let point = Point::new(row, 0);
1872 Some((
1873 point..point,
1874 iter::repeat(new_size.char())
1875 .take((new_size.len - current_size.len) as usize)
1876 .collect::<String>(),
1877 ))
1878 }
1879
1880 Ordering::Less => Some((
1881 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1882 String::new(),
1883 )),
1884
1885 Ordering::Equal => None,
1886 }
1887 } else {
1888 Some((
1889 Point::new(row, 0)..Point::new(row, current_size.len),
1890 iter::repeat(new_size.char())
1891 .take(new_size.len as usize)
1892 .collect::<String>(),
1893 ))
1894 }
1895 }
1896
1897 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1898 /// and the given new text.
1899 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1900 let old_text = self.as_rope().clone();
1901 let base_version = self.version();
1902 cx.background_executor()
1903 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1904 let old_text = old_text.to_string();
1905 let line_ending = LineEnding::detect(&new_text);
1906 LineEnding::normalize(&mut new_text);
1907 let edits = text_diff(&old_text, &new_text);
1908 Diff {
1909 base_version,
1910 line_ending,
1911 edits,
1912 }
1913 })
1914 }
1915
1916 /// Spawns a background task that searches the buffer for any whitespace
1917 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1918 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1919 let old_text = self.as_rope().clone();
1920 let line_ending = self.line_ending();
1921 let base_version = self.version();
1922 cx.background_spawn(async move {
1923 let ranges = trailing_whitespace_ranges(&old_text);
1924 let empty = Arc::<str>::from("");
1925 Diff {
1926 base_version,
1927 line_ending,
1928 edits: ranges
1929 .into_iter()
1930 .map(|range| (range, empty.clone()))
1931 .collect(),
1932 }
1933 })
1934 }
1935
1936 /// Ensures that the buffer ends with a single newline character, and
1937 /// no other whitespace. Skips if the buffer is empty.
1938 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1939 let len = self.len();
1940 if len == 0 {
1941 return;
1942 }
1943 let mut offset = len;
1944 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1945 let non_whitespace_len = chunk
1946 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1947 .len();
1948 offset -= chunk.len();
1949 offset += non_whitespace_len;
1950 if non_whitespace_len != 0 {
1951 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1952 return;
1953 }
1954 break;
1955 }
1956 }
1957 self.edit([(offset..len, "\n")], None, cx);
1958 }
1959
1960 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1961 /// calculated, then adjust the diff to account for those changes, and discard any
1962 /// parts of the diff that conflict with those changes.
1963 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1964 let snapshot = self.snapshot();
1965 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1966 let mut delta = 0;
1967 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1968 while let Some(edit_since) = edits_since.peek() {
1969 // If the edit occurs after a diff hunk, then it does not
1970 // affect that hunk.
1971 if edit_since.old.start > range.end {
1972 break;
1973 }
1974 // If the edit precedes the diff hunk, then adjust the hunk
1975 // to reflect the edit.
1976 else if edit_since.old.end < range.start {
1977 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1978 edits_since.next();
1979 }
1980 // If the edit intersects a diff hunk, then discard that hunk.
1981 else {
1982 return None;
1983 }
1984 }
1985
1986 let start = (range.start as i64 + delta) as usize;
1987 let end = (range.end as i64 + delta) as usize;
1988 Some((start..end, new_text))
1989 });
1990
1991 self.start_transaction();
1992 self.text.set_line_ending(diff.line_ending);
1993 self.edit(adjusted_edits, None, cx);
1994 self.end_transaction(cx)
1995 }
1996
1997 fn has_unsaved_edits(&self) -> bool {
1998 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1999
2000 if last_version == self.version {
2001 self.has_unsaved_edits
2002 .set((last_version, has_unsaved_edits));
2003 return has_unsaved_edits;
2004 }
2005
2006 let has_edits = self.has_edits_since(&self.saved_version);
2007 self.has_unsaved_edits
2008 .set((self.version.clone(), has_edits));
2009 has_edits
2010 }
2011
2012 /// Checks if the buffer has unsaved changes.
2013 pub fn is_dirty(&self) -> bool {
2014 if self.capability == Capability::ReadOnly {
2015 return false;
2016 }
2017 if self.has_conflict {
2018 return true;
2019 }
2020 match self.file.as_ref().map(|f| f.disk_state()) {
2021 Some(DiskState::New) | Some(DiskState::Deleted) => {
2022 !self.is_empty() && self.has_unsaved_edits()
2023 }
2024 _ => self.has_unsaved_edits(),
2025 }
2026 }
2027
2028 /// Checks if the buffer and its file have both changed since the buffer
2029 /// was last saved or reloaded.
2030 pub fn has_conflict(&self) -> bool {
2031 if self.has_conflict {
2032 return true;
2033 }
2034 let Some(file) = self.file.as_ref() else {
2035 return false;
2036 };
2037 match file.disk_state() {
2038 DiskState::New => false,
2039 DiskState::Present { mtime } => match self.saved_mtime {
2040 Some(saved_mtime) => {
2041 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2042 }
2043 None => true,
2044 },
2045 DiskState::Deleted => false,
2046 }
2047 }
2048
2049 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2050 pub fn subscribe(&mut self) -> Subscription {
2051 self.text.subscribe()
2052 }
2053
2054 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2055 ///
2056 /// This allows downstream code to check if the buffer's text has changed without
2057 /// waiting for an effect cycle, which would be required if using eents.
2058 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2059 if let Err(ix) = self
2060 .change_bits
2061 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2062 {
2063 self.change_bits.insert(ix, bit);
2064 }
2065 }
2066
2067 fn was_changed(&mut self) {
2068 self.change_bits.retain(|change_bit| {
2069 change_bit.upgrade().is_some_and(|bit| {
2070 bit.replace(true);
2071 true
2072 })
2073 });
2074 }
2075
2076 /// Starts a transaction, if one is not already in-progress. When undoing or
2077 /// redoing edits, all of the edits performed within a transaction are undone
2078 /// or redone together.
2079 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2080 self.start_transaction_at(Instant::now())
2081 }
2082
2083 /// Starts a transaction, providing the current time. Subsequent transactions
2084 /// that occur within a short period of time will be grouped together. This
2085 /// is controlled by the buffer's undo grouping duration.
2086 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2087 self.transaction_depth += 1;
2088 if self.was_dirty_before_starting_transaction.is_none() {
2089 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2090 }
2091 self.text.start_transaction_at(now)
2092 }
2093
2094 /// Terminates the current transaction, if this is the outermost transaction.
2095 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2096 self.end_transaction_at(Instant::now(), cx)
2097 }
2098
2099 /// Terminates the current transaction, providing the current time. Subsequent transactions
2100 /// that occur within a short period of time will be grouped together. This
2101 /// is controlled by the buffer's undo grouping duration.
2102 pub fn end_transaction_at(
2103 &mut self,
2104 now: Instant,
2105 cx: &mut Context<Self>,
2106 ) -> Option<TransactionId> {
2107 assert!(self.transaction_depth > 0);
2108 self.transaction_depth -= 1;
2109 let was_dirty = if self.transaction_depth == 0 {
2110 self.was_dirty_before_starting_transaction.take().unwrap()
2111 } else {
2112 false
2113 };
2114 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2115 self.did_edit(&start_version, was_dirty, cx);
2116 Some(transaction_id)
2117 } else {
2118 None
2119 }
2120 }
2121
2122 /// Manually add a transaction to the buffer's undo history.
2123 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2124 self.text.push_transaction(transaction, now);
2125 }
2126
2127 /// Differs from `push_transaction` in that it does not clear the redo
2128 /// stack. Intended to be used to create a parent transaction to merge
2129 /// potential child transactions into.
2130 ///
2131 /// The caller is responsible for removing it from the undo history using
2132 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2133 /// are merged into this transaction, the caller is responsible for ensuring
2134 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2135 /// cleared is to create transactions with the usual `start_transaction` and
2136 /// `end_transaction` methods and merging the resulting transactions into
2137 /// the transaction created by this method
2138 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2139 self.text.push_empty_transaction(now)
2140 }
2141
2142 /// Prevent the last transaction from being grouped with any subsequent transactions,
2143 /// even if they occur with the buffer's undo grouping duration.
2144 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2145 self.text.finalize_last_transaction()
2146 }
2147
2148 /// Manually group all changes since a given transaction.
2149 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2150 self.text.group_until_transaction(transaction_id);
2151 }
2152
2153 /// Manually remove a transaction from the buffer's undo history
2154 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2155 self.text.forget_transaction(transaction_id)
2156 }
2157
2158 /// Retrieve a transaction from the buffer's undo history
2159 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2160 self.text.get_transaction(transaction_id)
2161 }
2162
2163 /// Manually merge two transactions in the buffer's undo history.
2164 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2165 self.text.merge_transactions(transaction, destination);
2166 }
2167
2168 /// Waits for the buffer to receive operations with the given timestamps.
2169 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2170 &mut self,
2171 edit_ids: It,
2172 ) -> impl Future<Output = Result<()>> + use<It> {
2173 self.text.wait_for_edits(edit_ids)
2174 }
2175
2176 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2177 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2178 &mut self,
2179 anchors: It,
2180 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2181 self.text.wait_for_anchors(anchors)
2182 }
2183
2184 /// Waits for the buffer to receive operations up to the given version.
2185 pub fn wait_for_version(
2186 &mut self,
2187 version: clock::Global,
2188 ) -> impl Future<Output = Result<()>> + use<> {
2189 self.text.wait_for_version(version)
2190 }
2191
2192 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2193 /// [`Buffer::wait_for_version`] to resolve with an error.
2194 pub fn give_up_waiting(&mut self) {
2195 self.text.give_up_waiting();
2196 }
2197
2198 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2199 let mut rx = None;
2200 if !self.autoindent_requests.is_empty() {
2201 let channel = oneshot::channel();
2202 self.wait_for_autoindent_txs.push(channel.0);
2203 rx = Some(channel.1);
2204 }
2205 rx
2206 }
2207
2208 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2209 pub fn set_active_selections(
2210 &mut self,
2211 selections: Arc<[Selection<Anchor>]>,
2212 line_mode: bool,
2213 cursor_shape: CursorShape,
2214 cx: &mut Context<Self>,
2215 ) {
2216 let lamport_timestamp = self.text.lamport_clock.tick();
2217 self.remote_selections.insert(
2218 self.text.replica_id(),
2219 SelectionSet {
2220 selections: selections.clone(),
2221 lamport_timestamp,
2222 line_mode,
2223 cursor_shape,
2224 },
2225 );
2226 self.send_operation(
2227 Operation::UpdateSelections {
2228 selections,
2229 line_mode,
2230 lamport_timestamp,
2231 cursor_shape,
2232 },
2233 true,
2234 cx,
2235 );
2236 self.non_text_state_update_count += 1;
2237 cx.notify();
2238 }
2239
2240 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2241 /// this replica.
2242 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2243 if self
2244 .remote_selections
2245 .get(&self.text.replica_id())
2246 .is_none_or(|set| !set.selections.is_empty())
2247 {
2248 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2249 }
2250 }
2251
2252 pub fn set_agent_selections(
2253 &mut self,
2254 selections: Arc<[Selection<Anchor>]>,
2255 line_mode: bool,
2256 cursor_shape: CursorShape,
2257 cx: &mut Context<Self>,
2258 ) {
2259 let lamport_timestamp = self.text.lamport_clock.tick();
2260 self.remote_selections.insert(
2261 AGENT_REPLICA_ID,
2262 SelectionSet {
2263 selections,
2264 lamport_timestamp,
2265 line_mode,
2266 cursor_shape,
2267 },
2268 );
2269 self.non_text_state_update_count += 1;
2270 cx.notify();
2271 }
2272
2273 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2274 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2275 }
2276
2277 /// Replaces the buffer's entire text.
2278 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2279 where
2280 T: Into<Arc<str>>,
2281 {
2282 self.autoindent_requests.clear();
2283 self.edit([(0..self.len(), text)], None, cx)
2284 }
2285
2286 /// Appends the given text to the end of the buffer.
2287 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2288 where
2289 T: Into<Arc<str>>,
2290 {
2291 self.edit([(self.len()..self.len(), text)], None, cx)
2292 }
2293
2294 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2295 /// delete, and a string of text to insert at that location.
2296 ///
2297 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2298 /// request for the edited ranges, which will be processed when the buffer finishes
2299 /// parsing.
2300 ///
2301 /// Parsing takes place at the end of a transaction, and may compute synchronously
2302 /// or asynchronously, depending on the changes.
2303 pub fn edit<I, S, T>(
2304 &mut self,
2305 edits_iter: I,
2306 autoindent_mode: Option<AutoindentMode>,
2307 cx: &mut Context<Self>,
2308 ) -> Option<clock::Lamport>
2309 where
2310 I: IntoIterator<Item = (Range<S>, T)>,
2311 S: ToOffset,
2312 T: Into<Arc<str>>,
2313 {
2314 // Skip invalid edits and coalesce contiguous ones.
2315 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2316
2317 for (range, new_text) in edits_iter {
2318 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2319
2320 if range.start > range.end {
2321 mem::swap(&mut range.start, &mut range.end);
2322 }
2323 let new_text = new_text.into();
2324 if !new_text.is_empty() || !range.is_empty() {
2325 if let Some((prev_range, prev_text)) = edits.last_mut()
2326 && prev_range.end >= range.start
2327 {
2328 prev_range.end = cmp::max(prev_range.end, range.end);
2329 *prev_text = format!("{prev_text}{new_text}").into();
2330 } else {
2331 edits.push((range, new_text));
2332 }
2333 }
2334 }
2335 if edits.is_empty() {
2336 return None;
2337 }
2338
2339 self.start_transaction();
2340 self.pending_autoindent.take();
2341 let autoindent_request = autoindent_mode
2342 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2343
2344 let edit_operation = self.text.edit(edits.iter().cloned());
2345 let edit_id = edit_operation.timestamp();
2346
2347 if let Some((before_edit, mode)) = autoindent_request {
2348 let mut delta = 0isize;
2349 let mut previous_setting = None;
2350 let entries: Vec<_> = edits
2351 .into_iter()
2352 .enumerate()
2353 .zip(&edit_operation.as_edit().unwrap().new_text)
2354 .filter(|((_, (range, _)), _)| {
2355 let language = before_edit.language_at(range.start);
2356 let language_id = language.map(|l| l.id());
2357 if let Some((cached_language_id, auto_indent)) = previous_setting
2358 && cached_language_id == language_id
2359 {
2360 auto_indent
2361 } else {
2362 // The auto-indent setting is not present in editorconfigs, hence
2363 // we can avoid passing the file here.
2364 let auto_indent =
2365 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2366 previous_setting = Some((language_id, auto_indent));
2367 auto_indent
2368 }
2369 })
2370 .map(|((ix, (range, _)), new_text)| {
2371 let new_text_length = new_text.len();
2372 let old_start = range.start.to_point(&before_edit);
2373 let new_start = (delta + range.start as isize) as usize;
2374 let range_len = range.end - range.start;
2375 delta += new_text_length as isize - range_len as isize;
2376
2377 // Decide what range of the insertion to auto-indent, and whether
2378 // the first line of the insertion should be considered a newly-inserted line
2379 // or an edit to an existing line.
2380 let mut range_of_insertion_to_indent = 0..new_text_length;
2381 let mut first_line_is_new = true;
2382
2383 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2384 let old_line_end = before_edit.line_len(old_start.row);
2385
2386 if old_start.column > old_line_start {
2387 first_line_is_new = false;
2388 }
2389
2390 if !new_text.contains('\n')
2391 && (old_start.column + (range_len as u32) < old_line_end
2392 || old_line_end == old_line_start)
2393 {
2394 first_line_is_new = false;
2395 }
2396
2397 // When inserting text starting with a newline, avoid auto-indenting the
2398 // previous line.
2399 if new_text.starts_with('\n') {
2400 range_of_insertion_to_indent.start += 1;
2401 first_line_is_new = true;
2402 }
2403
2404 let mut original_indent_column = None;
2405 if let AutoindentMode::Block {
2406 original_indent_columns,
2407 } = &mode
2408 {
2409 original_indent_column = Some(if new_text.starts_with('\n') {
2410 indent_size_for_text(
2411 new_text[range_of_insertion_to_indent.clone()].chars(),
2412 )
2413 .len
2414 } else {
2415 original_indent_columns
2416 .get(ix)
2417 .copied()
2418 .flatten()
2419 .unwrap_or_else(|| {
2420 indent_size_for_text(
2421 new_text[range_of_insertion_to_indent.clone()].chars(),
2422 )
2423 .len
2424 })
2425 });
2426
2427 // Avoid auto-indenting the line after the edit.
2428 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2429 range_of_insertion_to_indent.end -= 1;
2430 }
2431 }
2432
2433 AutoindentRequestEntry {
2434 first_line_is_new,
2435 original_indent_column,
2436 indent_size: before_edit.language_indent_size_at(range.start, cx),
2437 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2438 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2439 }
2440 })
2441 .collect();
2442
2443 if !entries.is_empty() {
2444 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2445 before_edit,
2446 entries,
2447 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2448 ignore_empty_lines: false,
2449 }));
2450 }
2451 }
2452
2453 self.end_transaction(cx);
2454 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2455 Some(edit_id)
2456 }
2457
2458 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2459 self.was_changed();
2460
2461 if self.edits_since::<usize>(old_version).next().is_none() {
2462 return;
2463 }
2464
2465 self.reparse(cx);
2466 cx.emit(BufferEvent::Edited);
2467 if was_dirty != self.is_dirty() {
2468 cx.emit(BufferEvent::DirtyChanged);
2469 }
2470 cx.notify();
2471 }
2472
2473 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2474 where
2475 I: IntoIterator<Item = Range<T>>,
2476 T: ToOffset + Copy,
2477 {
2478 let before_edit = self.snapshot();
2479 let entries = ranges
2480 .into_iter()
2481 .map(|range| AutoindentRequestEntry {
2482 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2483 first_line_is_new: true,
2484 indent_size: before_edit.language_indent_size_at(range.start, cx),
2485 original_indent_column: None,
2486 })
2487 .collect();
2488 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2489 before_edit,
2490 entries,
2491 is_block_mode: false,
2492 ignore_empty_lines: true,
2493 }));
2494 self.request_autoindent(cx);
2495 }
2496
2497 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2498 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2499 pub fn insert_empty_line(
2500 &mut self,
2501 position: impl ToPoint,
2502 space_above: bool,
2503 space_below: bool,
2504 cx: &mut Context<Self>,
2505 ) -> Point {
2506 let mut position = position.to_point(self);
2507
2508 self.start_transaction();
2509
2510 self.edit(
2511 [(position..position, "\n")],
2512 Some(AutoindentMode::EachLine),
2513 cx,
2514 );
2515
2516 if position.column > 0 {
2517 position += Point::new(1, 0);
2518 }
2519
2520 if !self.is_line_blank(position.row) {
2521 self.edit(
2522 [(position..position, "\n")],
2523 Some(AutoindentMode::EachLine),
2524 cx,
2525 );
2526 }
2527
2528 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2529 self.edit(
2530 [(position..position, "\n")],
2531 Some(AutoindentMode::EachLine),
2532 cx,
2533 );
2534 position.row += 1;
2535 }
2536
2537 if space_below
2538 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2539 {
2540 self.edit(
2541 [(position..position, "\n")],
2542 Some(AutoindentMode::EachLine),
2543 cx,
2544 );
2545 }
2546
2547 self.end_transaction(cx);
2548
2549 position
2550 }
2551
2552 /// Applies the given remote operations to the buffer.
2553 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2554 self.pending_autoindent.take();
2555 let was_dirty = self.is_dirty();
2556 let old_version = self.version.clone();
2557 let mut deferred_ops = Vec::new();
2558 let buffer_ops = ops
2559 .into_iter()
2560 .filter_map(|op| match op {
2561 Operation::Buffer(op) => Some(op),
2562 _ => {
2563 if self.can_apply_op(&op) {
2564 self.apply_op(op, cx);
2565 } else {
2566 deferred_ops.push(op);
2567 }
2568 None
2569 }
2570 })
2571 .collect::<Vec<_>>();
2572 for operation in buffer_ops.iter() {
2573 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2574 }
2575 self.text.apply_ops(buffer_ops);
2576 self.deferred_ops.insert(deferred_ops);
2577 self.flush_deferred_ops(cx);
2578 self.did_edit(&old_version, was_dirty, cx);
2579 // Notify independently of whether the buffer was edited as the operations could include a
2580 // selection update.
2581 cx.notify();
2582 }
2583
2584 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2585 let mut deferred_ops = Vec::new();
2586 for op in self.deferred_ops.drain().iter().cloned() {
2587 if self.can_apply_op(&op) {
2588 self.apply_op(op, cx);
2589 } else {
2590 deferred_ops.push(op);
2591 }
2592 }
2593 self.deferred_ops.insert(deferred_ops);
2594 }
2595
2596 pub fn has_deferred_ops(&self) -> bool {
2597 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2598 }
2599
2600 fn can_apply_op(&self, operation: &Operation) -> bool {
2601 match operation {
2602 Operation::Buffer(_) => {
2603 unreachable!("buffer operations should never be applied at this layer")
2604 }
2605 Operation::UpdateDiagnostics {
2606 diagnostics: diagnostic_set,
2607 ..
2608 } => diagnostic_set.iter().all(|diagnostic| {
2609 self.text.can_resolve(&diagnostic.range.start)
2610 && self.text.can_resolve(&diagnostic.range.end)
2611 }),
2612 Operation::UpdateSelections { selections, .. } => selections
2613 .iter()
2614 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2615 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2616 }
2617 }
2618
2619 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2620 match operation {
2621 Operation::Buffer(_) => {
2622 unreachable!("buffer operations should never be applied at this layer")
2623 }
2624 Operation::UpdateDiagnostics {
2625 server_id,
2626 diagnostics: diagnostic_set,
2627 lamport_timestamp,
2628 } => {
2629 let snapshot = self.snapshot();
2630 self.apply_diagnostic_update(
2631 server_id,
2632 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2633 lamport_timestamp,
2634 cx,
2635 );
2636 }
2637 Operation::UpdateSelections {
2638 selections,
2639 lamport_timestamp,
2640 line_mode,
2641 cursor_shape,
2642 } => {
2643 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2644 && set.lamport_timestamp > lamport_timestamp
2645 {
2646 return;
2647 }
2648
2649 self.remote_selections.insert(
2650 lamport_timestamp.replica_id,
2651 SelectionSet {
2652 selections,
2653 lamport_timestamp,
2654 line_mode,
2655 cursor_shape,
2656 },
2657 );
2658 self.text.lamport_clock.observe(lamport_timestamp);
2659 self.non_text_state_update_count += 1;
2660 }
2661 Operation::UpdateCompletionTriggers {
2662 triggers,
2663 lamport_timestamp,
2664 server_id,
2665 } => {
2666 if triggers.is_empty() {
2667 self.completion_triggers_per_language_server
2668 .remove(&server_id);
2669 self.completion_triggers = self
2670 .completion_triggers_per_language_server
2671 .values()
2672 .flat_map(|triggers| triggers.iter().cloned())
2673 .collect();
2674 } else {
2675 self.completion_triggers_per_language_server
2676 .insert(server_id, triggers.iter().cloned().collect());
2677 self.completion_triggers.extend(triggers);
2678 }
2679 self.text.lamport_clock.observe(lamport_timestamp);
2680 }
2681 Operation::UpdateLineEnding {
2682 line_ending,
2683 lamport_timestamp,
2684 } => {
2685 self.text.set_line_ending(line_ending);
2686 self.text.lamport_clock.observe(lamport_timestamp);
2687 }
2688 }
2689 }
2690
2691 fn apply_diagnostic_update(
2692 &mut self,
2693 server_id: LanguageServerId,
2694 diagnostics: DiagnosticSet,
2695 lamport_timestamp: clock::Lamport,
2696 cx: &mut Context<Self>,
2697 ) {
2698 if lamport_timestamp > self.diagnostics_timestamp {
2699 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2700 if diagnostics.is_empty() {
2701 if let Ok(ix) = ix {
2702 self.diagnostics.remove(ix);
2703 }
2704 } else {
2705 match ix {
2706 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2707 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2708 };
2709 }
2710 self.diagnostics_timestamp = lamport_timestamp;
2711 self.non_text_state_update_count += 1;
2712 self.text.lamport_clock.observe(lamport_timestamp);
2713 cx.notify();
2714 cx.emit(BufferEvent::DiagnosticsUpdated);
2715 }
2716 }
2717
2718 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2719 self.was_changed();
2720 cx.emit(BufferEvent::Operation {
2721 operation,
2722 is_local,
2723 });
2724 }
2725
2726 /// Removes the selections for a given peer.
2727 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2728 self.remote_selections.remove(&replica_id);
2729 cx.notify();
2730 }
2731
2732 /// Undoes the most recent transaction.
2733 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2734 let was_dirty = self.is_dirty();
2735 let old_version = self.version.clone();
2736
2737 if let Some((transaction_id, operation)) = self.text.undo() {
2738 self.send_operation(Operation::Buffer(operation), true, cx);
2739 self.did_edit(&old_version, was_dirty, cx);
2740 Some(transaction_id)
2741 } else {
2742 None
2743 }
2744 }
2745
2746 /// Manually undoes a specific transaction in the buffer's undo history.
2747 pub fn undo_transaction(
2748 &mut self,
2749 transaction_id: TransactionId,
2750 cx: &mut Context<Self>,
2751 ) -> bool {
2752 let was_dirty = self.is_dirty();
2753 let old_version = self.version.clone();
2754 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2755 self.send_operation(Operation::Buffer(operation), true, cx);
2756 self.did_edit(&old_version, was_dirty, cx);
2757 true
2758 } else {
2759 false
2760 }
2761 }
2762
2763 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2764 pub fn undo_to_transaction(
2765 &mut self,
2766 transaction_id: TransactionId,
2767 cx: &mut Context<Self>,
2768 ) -> bool {
2769 let was_dirty = self.is_dirty();
2770 let old_version = self.version.clone();
2771
2772 let operations = self.text.undo_to_transaction(transaction_id);
2773 let undone = !operations.is_empty();
2774 for operation in operations {
2775 self.send_operation(Operation::Buffer(operation), true, cx);
2776 }
2777 if undone {
2778 self.did_edit(&old_version, was_dirty, cx)
2779 }
2780 undone
2781 }
2782
2783 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2784 let was_dirty = self.is_dirty();
2785 let operation = self.text.undo_operations(counts);
2786 let old_version = self.version.clone();
2787 self.send_operation(Operation::Buffer(operation), true, cx);
2788 self.did_edit(&old_version, was_dirty, cx);
2789 }
2790
2791 /// Manually redoes a specific transaction in the buffer's redo history.
2792 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2793 let was_dirty = self.is_dirty();
2794 let old_version = self.version.clone();
2795
2796 if let Some((transaction_id, operation)) = self.text.redo() {
2797 self.send_operation(Operation::Buffer(operation), true, cx);
2798 self.did_edit(&old_version, was_dirty, cx);
2799 Some(transaction_id)
2800 } else {
2801 None
2802 }
2803 }
2804
2805 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2806 pub fn redo_to_transaction(
2807 &mut self,
2808 transaction_id: TransactionId,
2809 cx: &mut Context<Self>,
2810 ) -> bool {
2811 let was_dirty = self.is_dirty();
2812 let old_version = self.version.clone();
2813
2814 let operations = self.text.redo_to_transaction(transaction_id);
2815 let redone = !operations.is_empty();
2816 for operation in operations {
2817 self.send_operation(Operation::Buffer(operation), true, cx);
2818 }
2819 if redone {
2820 self.did_edit(&old_version, was_dirty, cx)
2821 }
2822 redone
2823 }
2824
2825 /// Override current completion triggers with the user-provided completion triggers.
2826 pub fn set_completion_triggers(
2827 &mut self,
2828 server_id: LanguageServerId,
2829 triggers: BTreeSet<String>,
2830 cx: &mut Context<Self>,
2831 ) {
2832 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2833 if triggers.is_empty() {
2834 self.completion_triggers_per_language_server
2835 .remove(&server_id);
2836 self.completion_triggers = self
2837 .completion_triggers_per_language_server
2838 .values()
2839 .flat_map(|triggers| triggers.iter().cloned())
2840 .collect();
2841 } else {
2842 self.completion_triggers_per_language_server
2843 .insert(server_id, triggers.clone());
2844 self.completion_triggers.extend(triggers.iter().cloned());
2845 }
2846 self.send_operation(
2847 Operation::UpdateCompletionTriggers {
2848 triggers: triggers.into_iter().collect(),
2849 lamport_timestamp: self.completion_triggers_timestamp,
2850 server_id,
2851 },
2852 true,
2853 cx,
2854 );
2855 cx.notify();
2856 }
2857
2858 /// Returns a list of strings which trigger a completion menu for this language.
2859 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2860 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2861 &self.completion_triggers
2862 }
2863
2864 /// Call this directly after performing edits to prevent the preview tab
2865 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2866 /// to return false until there are additional edits.
2867 pub fn refresh_preview(&mut self) {
2868 self.preview_version = self.version.clone();
2869 }
2870
2871 /// Whether we should preserve the preview status of a tab containing this buffer.
2872 pub fn preserve_preview(&self) -> bool {
2873 !self.has_edits_since(&self.preview_version)
2874 }
2875}
2876
2877#[doc(hidden)]
2878#[cfg(any(test, feature = "test-support"))]
2879impl Buffer {
2880 pub fn edit_via_marked_text(
2881 &mut self,
2882 marked_string: &str,
2883 autoindent_mode: Option<AutoindentMode>,
2884 cx: &mut Context<Self>,
2885 ) {
2886 let edits = self.edits_for_marked_text(marked_string);
2887 self.edit(edits, autoindent_mode, cx);
2888 }
2889
2890 pub fn set_group_interval(&mut self, group_interval: Duration) {
2891 self.text.set_group_interval(group_interval);
2892 }
2893
2894 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2895 where
2896 T: rand::Rng,
2897 {
2898 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2899 let mut last_end = None;
2900 for _ in 0..old_range_count {
2901 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2902 break;
2903 }
2904
2905 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2906 let mut range = self.random_byte_range(new_start, rng);
2907 if rng.random_bool(0.2) {
2908 mem::swap(&mut range.start, &mut range.end);
2909 }
2910 last_end = Some(range.end);
2911
2912 let new_text_len = rng.random_range(0..10);
2913 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2914 new_text = new_text.to_uppercase();
2915
2916 edits.push((range, new_text));
2917 }
2918 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2919 self.edit(edits, None, cx);
2920 }
2921
2922 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2923 let was_dirty = self.is_dirty();
2924 let old_version = self.version.clone();
2925
2926 let ops = self.text.randomly_undo_redo(rng);
2927 if !ops.is_empty() {
2928 for op in ops {
2929 self.send_operation(Operation::Buffer(op), true, cx);
2930 self.did_edit(&old_version, was_dirty, cx);
2931 }
2932 }
2933 }
2934}
2935
2936impl EventEmitter<BufferEvent> for Buffer {}
2937
2938impl Deref for Buffer {
2939 type Target = TextBuffer;
2940
2941 fn deref(&self) -> &Self::Target {
2942 &self.text
2943 }
2944}
2945
2946impl BufferSnapshot {
2947 /// Returns [`IndentSize`] for a given line that respects user settings and
2948 /// language preferences.
2949 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2950 indent_size_for_line(self, row)
2951 }
2952
2953 /// Returns [`IndentSize`] for a given position that respects user settings
2954 /// and language preferences.
2955 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2956 let settings = language_settings(
2957 self.language_at(position).map(|l| l.name()),
2958 self.file(),
2959 cx,
2960 );
2961 if settings.hard_tabs {
2962 IndentSize::tab()
2963 } else {
2964 IndentSize::spaces(settings.tab_size.get())
2965 }
2966 }
2967
2968 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2969 /// is passed in as `single_indent_size`.
2970 pub fn suggested_indents(
2971 &self,
2972 rows: impl Iterator<Item = u32>,
2973 single_indent_size: IndentSize,
2974 ) -> BTreeMap<u32, IndentSize> {
2975 let mut result = BTreeMap::new();
2976
2977 for row_range in contiguous_ranges(rows, 10) {
2978 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2979 Some(suggestions) => suggestions,
2980 _ => break,
2981 };
2982
2983 for (row, suggestion) in row_range.zip(suggestions) {
2984 let indent_size = if let Some(suggestion) = suggestion {
2985 result
2986 .get(&suggestion.basis_row)
2987 .copied()
2988 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2989 .with_delta(suggestion.delta, single_indent_size)
2990 } else {
2991 self.indent_size_for_line(row)
2992 };
2993
2994 result.insert(row, indent_size);
2995 }
2996 }
2997
2998 result
2999 }
3000
3001 fn suggest_autoindents(
3002 &self,
3003 row_range: Range<u32>,
3004 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3005 let config = &self.language.as_ref()?.config;
3006 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3007
3008 #[derive(Debug, Clone)]
3009 struct StartPosition {
3010 start: Point,
3011 suffix: SharedString,
3012 }
3013
3014 // Find the suggested indentation ranges based on the syntax tree.
3015 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3016 let end = Point::new(row_range.end, 0);
3017 let range = (start..end).to_offset(&self.text);
3018 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3019 Some(&grammar.indents_config.as_ref()?.query)
3020 });
3021 let indent_configs = matches
3022 .grammars()
3023 .iter()
3024 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3025 .collect::<Vec<_>>();
3026
3027 let mut indent_ranges = Vec::<Range<Point>>::new();
3028 let mut start_positions = Vec::<StartPosition>::new();
3029 let mut outdent_positions = Vec::<Point>::new();
3030 while let Some(mat) = matches.peek() {
3031 let mut start: Option<Point> = None;
3032 let mut end: Option<Point> = None;
3033
3034 let config = indent_configs[mat.grammar_index];
3035 for capture in mat.captures {
3036 if capture.index == config.indent_capture_ix {
3037 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3038 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3039 } else if Some(capture.index) == config.start_capture_ix {
3040 start = Some(Point::from_ts_point(capture.node.end_position()));
3041 } else if Some(capture.index) == config.end_capture_ix {
3042 end = Some(Point::from_ts_point(capture.node.start_position()));
3043 } else if Some(capture.index) == config.outdent_capture_ix {
3044 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3045 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3046 start_positions.push(StartPosition {
3047 start: Point::from_ts_point(capture.node.start_position()),
3048 suffix: suffix.clone(),
3049 });
3050 }
3051 }
3052
3053 matches.advance();
3054 if let Some((start, end)) = start.zip(end) {
3055 if start.row == end.row {
3056 continue;
3057 }
3058 let range = start..end;
3059 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3060 Err(ix) => indent_ranges.insert(ix, range),
3061 Ok(ix) => {
3062 let prev_range = &mut indent_ranges[ix];
3063 prev_range.end = prev_range.end.max(range.end);
3064 }
3065 }
3066 }
3067 }
3068
3069 let mut error_ranges = Vec::<Range<Point>>::new();
3070 let mut matches = self
3071 .syntax
3072 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3073 while let Some(mat) = matches.peek() {
3074 let node = mat.captures[0].node;
3075 let start = Point::from_ts_point(node.start_position());
3076 let end = Point::from_ts_point(node.end_position());
3077 let range = start..end;
3078 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3079 Ok(ix) | Err(ix) => ix,
3080 };
3081 let mut end_ix = ix;
3082 while let Some(existing_range) = error_ranges.get(end_ix) {
3083 if existing_range.end < end {
3084 end_ix += 1;
3085 } else {
3086 break;
3087 }
3088 }
3089 error_ranges.splice(ix..end_ix, [range]);
3090 matches.advance();
3091 }
3092
3093 outdent_positions.sort();
3094 for outdent_position in outdent_positions {
3095 // find the innermost indent range containing this outdent_position
3096 // set its end to the outdent position
3097 if let Some(range_to_truncate) = indent_ranges
3098 .iter_mut()
3099 .filter(|indent_range| indent_range.contains(&outdent_position))
3100 .next_back()
3101 {
3102 range_to_truncate.end = outdent_position;
3103 }
3104 }
3105
3106 start_positions.sort_by_key(|b| b.start);
3107
3108 // Find the suggested indentation increases and decreased based on regexes.
3109 let mut regex_outdent_map = HashMap::default();
3110 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3111 let mut start_positions_iter = start_positions.iter().peekable();
3112
3113 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3114 self.for_each_line(
3115 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3116 ..Point::new(row_range.end, 0),
3117 |row, line| {
3118 if config
3119 .decrease_indent_pattern
3120 .as_ref()
3121 .is_some_and(|regex| regex.is_match(line))
3122 {
3123 indent_change_rows.push((row, Ordering::Less));
3124 }
3125 if config
3126 .increase_indent_pattern
3127 .as_ref()
3128 .is_some_and(|regex| regex.is_match(line))
3129 {
3130 indent_change_rows.push((row + 1, Ordering::Greater));
3131 }
3132 while let Some(pos) = start_positions_iter.peek() {
3133 if pos.start.row < row {
3134 let pos = start_positions_iter.next().unwrap();
3135 last_seen_suffix
3136 .entry(pos.suffix.to_string())
3137 .or_default()
3138 .push(pos.start);
3139 } else {
3140 break;
3141 }
3142 }
3143 for rule in &config.decrease_indent_patterns {
3144 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3145 let row_start_column = self.indent_size_for_line(row).len;
3146 let basis_row = rule
3147 .valid_after
3148 .iter()
3149 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3150 .flatten()
3151 .filter(|start_point| start_point.column <= row_start_column)
3152 .max_by_key(|start_point| start_point.row);
3153 if let Some(outdent_to_row) = basis_row {
3154 regex_outdent_map.insert(row, outdent_to_row.row);
3155 }
3156 break;
3157 }
3158 }
3159 },
3160 );
3161
3162 let mut indent_changes = indent_change_rows.into_iter().peekable();
3163 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3164 prev_non_blank_row.unwrap_or(0)
3165 } else {
3166 row_range.start.saturating_sub(1)
3167 };
3168
3169 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3170 Some(row_range.map(move |row| {
3171 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3172
3173 let mut indent_from_prev_row = false;
3174 let mut outdent_from_prev_row = false;
3175 let mut outdent_to_row = u32::MAX;
3176 let mut from_regex = false;
3177
3178 while let Some((indent_row, delta)) = indent_changes.peek() {
3179 match indent_row.cmp(&row) {
3180 Ordering::Equal => match delta {
3181 Ordering::Less => {
3182 from_regex = true;
3183 outdent_from_prev_row = true
3184 }
3185 Ordering::Greater => {
3186 indent_from_prev_row = true;
3187 from_regex = true
3188 }
3189 _ => {}
3190 },
3191
3192 Ordering::Greater => break,
3193 Ordering::Less => {}
3194 }
3195
3196 indent_changes.next();
3197 }
3198
3199 for range in &indent_ranges {
3200 if range.start.row >= row {
3201 break;
3202 }
3203 if range.start.row == prev_row && range.end > row_start {
3204 indent_from_prev_row = true;
3205 }
3206 if range.end > prev_row_start && range.end <= row_start {
3207 outdent_to_row = outdent_to_row.min(range.start.row);
3208 }
3209 }
3210
3211 if let Some(basis_row) = regex_outdent_map.get(&row) {
3212 indent_from_prev_row = false;
3213 outdent_to_row = *basis_row;
3214 from_regex = true;
3215 }
3216
3217 let within_error = error_ranges
3218 .iter()
3219 .any(|e| e.start.row < row && e.end > row_start);
3220
3221 let suggestion = if outdent_to_row == prev_row
3222 || (outdent_from_prev_row && indent_from_prev_row)
3223 {
3224 Some(IndentSuggestion {
3225 basis_row: prev_row,
3226 delta: Ordering::Equal,
3227 within_error: within_error && !from_regex,
3228 })
3229 } else if indent_from_prev_row {
3230 Some(IndentSuggestion {
3231 basis_row: prev_row,
3232 delta: Ordering::Greater,
3233 within_error: within_error && !from_regex,
3234 })
3235 } else if outdent_to_row < prev_row {
3236 Some(IndentSuggestion {
3237 basis_row: outdent_to_row,
3238 delta: Ordering::Equal,
3239 within_error: within_error && !from_regex,
3240 })
3241 } else if outdent_from_prev_row {
3242 Some(IndentSuggestion {
3243 basis_row: prev_row,
3244 delta: Ordering::Less,
3245 within_error: within_error && !from_regex,
3246 })
3247 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3248 {
3249 Some(IndentSuggestion {
3250 basis_row: prev_row,
3251 delta: Ordering::Equal,
3252 within_error: within_error && !from_regex,
3253 })
3254 } else {
3255 None
3256 };
3257
3258 prev_row = row;
3259 prev_row_start = row_start;
3260 suggestion
3261 }))
3262 }
3263
3264 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3265 while row > 0 {
3266 row -= 1;
3267 if !self.is_line_blank(row) {
3268 return Some(row);
3269 }
3270 }
3271 None
3272 }
3273
3274 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3275 let captures = self.syntax.captures(range, &self.text, |grammar| {
3276 grammar
3277 .highlights_config
3278 .as_ref()
3279 .map(|config| &config.query)
3280 });
3281 let highlight_maps = captures
3282 .grammars()
3283 .iter()
3284 .map(|grammar| grammar.highlight_map())
3285 .collect();
3286 (captures, highlight_maps)
3287 }
3288
3289 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3290 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3291 /// returned in chunks where each chunk has a single syntax highlighting style and
3292 /// diagnostic status.
3293 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3294 let range = range.start.to_offset(self)..range.end.to_offset(self);
3295
3296 let mut syntax = None;
3297 if language_aware {
3298 syntax = Some(self.get_highlights(range.clone()));
3299 }
3300 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3301 let diagnostics = language_aware;
3302 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3303 }
3304
3305 pub fn highlighted_text_for_range<T: ToOffset>(
3306 &self,
3307 range: Range<T>,
3308 override_style: Option<HighlightStyle>,
3309 syntax_theme: &SyntaxTheme,
3310 ) -> HighlightedText {
3311 HighlightedText::from_buffer_range(
3312 range,
3313 &self.text,
3314 &self.syntax,
3315 override_style,
3316 syntax_theme,
3317 )
3318 }
3319
3320 /// Invokes the given callback for each line of text in the given range of the buffer.
3321 /// Uses callback to avoid allocating a string for each line.
3322 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3323 let mut line = String::new();
3324 let mut row = range.start.row;
3325 for chunk in self
3326 .as_rope()
3327 .chunks_in_range(range.to_offset(self))
3328 .chain(["\n"])
3329 {
3330 for (newline_ix, text) in chunk.split('\n').enumerate() {
3331 if newline_ix > 0 {
3332 callback(row, &line);
3333 row += 1;
3334 line.clear();
3335 }
3336 line.push_str(text);
3337 }
3338 }
3339 }
3340
3341 /// Iterates over every [`SyntaxLayer`] in the buffer.
3342 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3343 self.syntax_layers_for_range(0..self.len(), true)
3344 }
3345
3346 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3347 let offset = position.to_offset(self);
3348 self.syntax_layers_for_range(offset..offset, false)
3349 .filter(|l| l.node().end_byte() > offset)
3350 .last()
3351 }
3352
3353 pub fn syntax_layers_for_range<D: ToOffset>(
3354 &self,
3355 range: Range<D>,
3356 include_hidden: bool,
3357 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3358 self.syntax
3359 .layers_for_range(range, &self.text, include_hidden)
3360 }
3361
3362 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3363 &self,
3364 range: Range<D>,
3365 ) -> Option<SyntaxLayer<'_>> {
3366 let range = range.to_offset(self);
3367 self.syntax
3368 .layers_for_range(range, &self.text, false)
3369 .max_by(|a, b| {
3370 if a.depth != b.depth {
3371 a.depth.cmp(&b.depth)
3372 } else if a.offset.0 != b.offset.0 {
3373 a.offset.0.cmp(&b.offset.0)
3374 } else {
3375 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3376 }
3377 })
3378 }
3379
3380 /// Returns the main [`Language`].
3381 pub fn language(&self) -> Option<&Arc<Language>> {
3382 self.language.as_ref()
3383 }
3384
3385 /// Returns the [`Language`] at the given location.
3386 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3387 self.syntax_layer_at(position)
3388 .map(|info| info.language)
3389 .or(self.language.as_ref())
3390 }
3391
3392 /// Returns the settings for the language at the given location.
3393 pub fn settings_at<'a, D: ToOffset>(
3394 &'a self,
3395 position: D,
3396 cx: &'a App,
3397 ) -> Cow<'a, LanguageSettings> {
3398 language_settings(
3399 self.language_at(position).map(|l| l.name()),
3400 self.file.as_ref(),
3401 cx,
3402 )
3403 }
3404
3405 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3406 CharClassifier::new(self.language_scope_at(point))
3407 }
3408
3409 /// Returns the [`LanguageScope`] at the given location.
3410 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3411 let offset = position.to_offset(self);
3412 let mut scope = None;
3413 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3414
3415 // Use the layer that has the smallest node intersecting the given point.
3416 for layer in self
3417 .syntax
3418 .layers_for_range(offset..offset, &self.text, false)
3419 {
3420 let mut cursor = layer.node().walk();
3421
3422 let mut range = None;
3423 loop {
3424 let child_range = cursor.node().byte_range();
3425 if !child_range.contains(&offset) {
3426 break;
3427 }
3428
3429 range = Some(child_range);
3430 if cursor.goto_first_child_for_byte(offset).is_none() {
3431 break;
3432 }
3433 }
3434
3435 if let Some(range) = range
3436 && smallest_range_and_depth.as_ref().is_none_or(
3437 |(smallest_range, smallest_range_depth)| {
3438 if layer.depth > *smallest_range_depth {
3439 true
3440 } else if layer.depth == *smallest_range_depth {
3441 range.len() < smallest_range.len()
3442 } else {
3443 false
3444 }
3445 },
3446 )
3447 {
3448 smallest_range_and_depth = Some((range, layer.depth));
3449 scope = Some(LanguageScope {
3450 language: layer.language.clone(),
3451 override_id: layer.override_id(offset, &self.text),
3452 });
3453 }
3454 }
3455
3456 scope.or_else(|| {
3457 self.language.clone().map(|language| LanguageScope {
3458 language,
3459 override_id: None,
3460 })
3461 })
3462 }
3463
3464 /// Returns a tuple of the range and character kind of the word
3465 /// surrounding the given position.
3466 pub fn surrounding_word<T: ToOffset>(
3467 &self,
3468 start: T,
3469 scope_context: Option<CharScopeContext>,
3470 ) -> (Range<usize>, Option<CharKind>) {
3471 let mut start = start.to_offset(self);
3472 let mut end = start;
3473 let mut next_chars = self.chars_at(start).take(128).peekable();
3474 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3475
3476 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3477 let word_kind = cmp::max(
3478 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3479 next_chars.peek().copied().map(|c| classifier.kind(c)),
3480 );
3481
3482 for ch in prev_chars {
3483 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3484 start -= ch.len_utf8();
3485 } else {
3486 break;
3487 }
3488 }
3489
3490 for ch in next_chars {
3491 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3492 end += ch.len_utf8();
3493 } else {
3494 break;
3495 }
3496 }
3497
3498 (start..end, word_kind)
3499 }
3500
3501 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3502 /// range. When `require_larger` is true, the node found must be larger than the query range.
3503 ///
3504 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3505 /// be moved to the root of the tree.
3506 fn goto_node_enclosing_range(
3507 cursor: &mut tree_sitter::TreeCursor,
3508 query_range: &Range<usize>,
3509 require_larger: bool,
3510 ) -> bool {
3511 let mut ascending = false;
3512 loop {
3513 let mut range = cursor.node().byte_range();
3514 if query_range.is_empty() {
3515 // When the query range is empty and the current node starts after it, move to the
3516 // previous sibling to find the node the containing node.
3517 if range.start > query_range.start {
3518 cursor.goto_previous_sibling();
3519 range = cursor.node().byte_range();
3520 }
3521 } else {
3522 // When the query range is non-empty and the current node ends exactly at the start,
3523 // move to the next sibling to find a node that extends beyond the start.
3524 if range.end == query_range.start {
3525 cursor.goto_next_sibling();
3526 range = cursor.node().byte_range();
3527 }
3528 }
3529
3530 let encloses = range.contains_inclusive(query_range)
3531 && (!require_larger || range.len() > query_range.len());
3532 if !encloses {
3533 ascending = true;
3534 if !cursor.goto_parent() {
3535 return false;
3536 }
3537 continue;
3538 } else if ascending {
3539 return true;
3540 }
3541
3542 // Descend into the current node.
3543 if cursor
3544 .goto_first_child_for_byte(query_range.start)
3545 .is_none()
3546 {
3547 return true;
3548 }
3549 }
3550 }
3551
3552 pub fn syntax_ancestor<'a, T: ToOffset>(
3553 &'a self,
3554 range: Range<T>,
3555 ) -> Option<tree_sitter::Node<'a>> {
3556 let range = range.start.to_offset(self)..range.end.to_offset(self);
3557 let mut result: Option<tree_sitter::Node<'a>> = None;
3558 for layer in self
3559 .syntax
3560 .layers_for_range(range.clone(), &self.text, true)
3561 {
3562 let mut cursor = layer.node().walk();
3563
3564 // Find the node that both contains the range and is larger than it.
3565 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3566 continue;
3567 }
3568
3569 let left_node = cursor.node();
3570 let mut layer_result = left_node;
3571
3572 // For an empty range, try to find another node immediately to the right of the range.
3573 if left_node.end_byte() == range.start {
3574 let mut right_node = None;
3575 while !cursor.goto_next_sibling() {
3576 if !cursor.goto_parent() {
3577 break;
3578 }
3579 }
3580
3581 while cursor.node().start_byte() == range.start {
3582 right_node = Some(cursor.node());
3583 if !cursor.goto_first_child() {
3584 break;
3585 }
3586 }
3587
3588 // If there is a candidate node on both sides of the (empty) range, then
3589 // decide between the two by favoring a named node over an anonymous token.
3590 // If both nodes are the same in that regard, favor the right one.
3591 if let Some(right_node) = right_node
3592 && (right_node.is_named() || !left_node.is_named())
3593 {
3594 layer_result = right_node;
3595 }
3596 }
3597
3598 if let Some(previous_result) = &result
3599 && previous_result.byte_range().len() < layer_result.byte_range().len()
3600 {
3601 continue;
3602 }
3603 result = Some(layer_result);
3604 }
3605
3606 result
3607 }
3608
3609 /// Find the previous sibling syntax node at the given range.
3610 ///
3611 /// This function locates the syntax node that precedes the node containing
3612 /// the given range. It searches hierarchically by:
3613 /// 1. Finding the node that contains the given range
3614 /// 2. Looking for the previous sibling at the same tree level
3615 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3616 ///
3617 /// Returns `None` if there is no previous sibling at any ancestor level.
3618 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3619 &'a self,
3620 range: Range<T>,
3621 ) -> Option<tree_sitter::Node<'a>> {
3622 let range = range.start.to_offset(self)..range.end.to_offset(self);
3623 let mut result: Option<tree_sitter::Node<'a>> = None;
3624
3625 for layer in self
3626 .syntax
3627 .layers_for_range(range.clone(), &self.text, true)
3628 {
3629 let mut cursor = layer.node().walk();
3630
3631 // Find the node that contains the range
3632 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3633 continue;
3634 }
3635
3636 // Look for the previous sibling, moving up ancestor levels if needed
3637 loop {
3638 if cursor.goto_previous_sibling() {
3639 let layer_result = cursor.node();
3640
3641 if let Some(previous_result) = &result {
3642 if previous_result.byte_range().end < layer_result.byte_range().end {
3643 continue;
3644 }
3645 }
3646 result = Some(layer_result);
3647 break;
3648 }
3649
3650 // No sibling found at this level, try moving up to parent
3651 if !cursor.goto_parent() {
3652 break;
3653 }
3654 }
3655 }
3656
3657 result
3658 }
3659
3660 /// Find the next sibling syntax node at the given range.
3661 ///
3662 /// This function locates the syntax node that follows the node containing
3663 /// the given range. It searches hierarchically by:
3664 /// 1. Finding the node that contains the given range
3665 /// 2. Looking for the next sibling at the same tree level
3666 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3667 ///
3668 /// Returns `None` if there is no next sibling at any ancestor level.
3669 pub fn syntax_next_sibling<'a, T: ToOffset>(
3670 &'a self,
3671 range: Range<T>,
3672 ) -> Option<tree_sitter::Node<'a>> {
3673 let range = range.start.to_offset(self)..range.end.to_offset(self);
3674 let mut result: Option<tree_sitter::Node<'a>> = None;
3675
3676 for layer in self
3677 .syntax
3678 .layers_for_range(range.clone(), &self.text, true)
3679 {
3680 let mut cursor = layer.node().walk();
3681
3682 // Find the node that contains the range
3683 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3684 continue;
3685 }
3686
3687 // Look for the next sibling, moving up ancestor levels if needed
3688 loop {
3689 if cursor.goto_next_sibling() {
3690 let layer_result = cursor.node();
3691
3692 if let Some(previous_result) = &result {
3693 if previous_result.byte_range().start > layer_result.byte_range().start {
3694 continue;
3695 }
3696 }
3697 result = Some(layer_result);
3698 break;
3699 }
3700
3701 // No sibling found at this level, try moving up to parent
3702 if !cursor.goto_parent() {
3703 break;
3704 }
3705 }
3706 }
3707
3708 result
3709 }
3710
3711 /// Returns the root syntax node within the given row
3712 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3713 let start_offset = position.to_offset(self);
3714
3715 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3716
3717 let layer = self
3718 .syntax
3719 .layers_for_range(start_offset..start_offset, &self.text, true)
3720 .next()?;
3721
3722 let mut cursor = layer.node().walk();
3723
3724 // Descend to the first leaf that touches the start of the range.
3725 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3726 if cursor.node().end_byte() == start_offset {
3727 cursor.goto_next_sibling();
3728 }
3729 }
3730
3731 // Ascend to the root node within the same row.
3732 while cursor.goto_parent() {
3733 if cursor.node().start_position().row != row {
3734 break;
3735 }
3736 }
3737
3738 Some(cursor.node())
3739 }
3740
3741 /// Returns the outline for the buffer.
3742 ///
3743 /// This method allows passing an optional [`SyntaxTheme`] to
3744 /// syntax-highlight the returned symbols.
3745 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3746 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3747 }
3748
3749 /// Returns all the symbols that contain the given position.
3750 ///
3751 /// This method allows passing an optional [`SyntaxTheme`] to
3752 /// syntax-highlight the returned symbols.
3753 pub fn symbols_containing<T: ToOffset>(
3754 &self,
3755 position: T,
3756 theme: Option<&SyntaxTheme>,
3757 ) -> Vec<OutlineItem<Anchor>> {
3758 let position = position.to_offset(self);
3759 let mut items = self.outline_items_containing(
3760 position.saturating_sub(1)..self.len().min(position + 1),
3761 false,
3762 theme,
3763 );
3764 let mut prev_depth = None;
3765 items.retain(|item| {
3766 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3767 prev_depth = Some(item.depth);
3768 result
3769 });
3770 items
3771 }
3772
3773 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3774 let range = range.to_offset(self);
3775 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3776 grammar.outline_config.as_ref().map(|c| &c.query)
3777 });
3778 let configs = matches
3779 .grammars()
3780 .iter()
3781 .map(|g| g.outline_config.as_ref().unwrap())
3782 .collect::<Vec<_>>();
3783
3784 while let Some(mat) = matches.peek() {
3785 let config = &configs[mat.grammar_index];
3786 let containing_item_node = maybe!({
3787 let item_node = mat.captures.iter().find_map(|cap| {
3788 if cap.index == config.item_capture_ix {
3789 Some(cap.node)
3790 } else {
3791 None
3792 }
3793 })?;
3794
3795 let item_byte_range = item_node.byte_range();
3796 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3797 None
3798 } else {
3799 Some(item_node)
3800 }
3801 });
3802
3803 if let Some(item_node) = containing_item_node {
3804 return Some(
3805 Point::from_ts_point(item_node.start_position())
3806 ..Point::from_ts_point(item_node.end_position()),
3807 );
3808 }
3809
3810 matches.advance();
3811 }
3812 None
3813 }
3814
3815 pub fn outline_items_containing<T: ToOffset>(
3816 &self,
3817 range: Range<T>,
3818 include_extra_context: bool,
3819 theme: Option<&SyntaxTheme>,
3820 ) -> Vec<OutlineItem<Anchor>> {
3821 let range = range.to_offset(self);
3822 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3823 grammar.outline_config.as_ref().map(|c| &c.query)
3824 });
3825
3826 let mut items = Vec::new();
3827 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3828 while let Some(mat) = matches.peek() {
3829 let config = matches.grammars()[mat.grammar_index]
3830 .outline_config
3831 .as_ref()
3832 .unwrap();
3833 if let Some(item) =
3834 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3835 {
3836 items.push(item);
3837 } else if let Some(capture) = mat
3838 .captures
3839 .iter()
3840 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3841 {
3842 let capture_range = capture.node.start_position()..capture.node.end_position();
3843 let mut capture_row_range =
3844 capture_range.start.row as u32..capture_range.end.row as u32;
3845 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3846 {
3847 capture_row_range.end -= 1;
3848 }
3849 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3850 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3851 last_row_range.end = capture_row_range.end;
3852 } else {
3853 annotation_row_ranges.push(capture_row_range);
3854 }
3855 } else {
3856 annotation_row_ranges.push(capture_row_range);
3857 }
3858 }
3859 matches.advance();
3860 }
3861
3862 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3863
3864 // Assign depths based on containment relationships and convert to anchors.
3865 let mut item_ends_stack = Vec::<Point>::new();
3866 let mut anchor_items = Vec::new();
3867 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3868 for item in items {
3869 while let Some(last_end) = item_ends_stack.last().copied() {
3870 if last_end < item.range.end {
3871 item_ends_stack.pop();
3872 } else {
3873 break;
3874 }
3875 }
3876
3877 let mut annotation_row_range = None;
3878 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3879 let row_preceding_item = item.range.start.row.saturating_sub(1);
3880 if next_annotation_row_range.end < row_preceding_item {
3881 annotation_row_ranges.next();
3882 } else {
3883 if next_annotation_row_range.end == row_preceding_item {
3884 annotation_row_range = Some(next_annotation_row_range.clone());
3885 annotation_row_ranges.next();
3886 }
3887 break;
3888 }
3889 }
3890
3891 anchor_items.push(OutlineItem {
3892 depth: item_ends_stack.len(),
3893 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3894 text: item.text,
3895 highlight_ranges: item.highlight_ranges,
3896 name_ranges: item.name_ranges,
3897 signature_range: item
3898 .signature_range
3899 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3900 body_range: item
3901 .body_range
3902 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3903 annotation_range: annotation_row_range.map(|annotation_range| {
3904 self.anchor_after(Point::new(annotation_range.start, 0))
3905 ..self.anchor_before(Point::new(
3906 annotation_range.end,
3907 self.line_len(annotation_range.end),
3908 ))
3909 }),
3910 });
3911 item_ends_stack.push(item.range.end);
3912 }
3913
3914 anchor_items
3915 }
3916
3917 fn next_outline_item(
3918 &self,
3919 config: &OutlineConfig,
3920 mat: &SyntaxMapMatch,
3921 range: &Range<usize>,
3922 include_extra_context: bool,
3923 theme: Option<&SyntaxTheme>,
3924 ) -> Option<OutlineItem<Point>> {
3925 let item_node = mat.captures.iter().find_map(|cap| {
3926 if cap.index == config.item_capture_ix {
3927 Some(cap.node)
3928 } else {
3929 None
3930 }
3931 })?;
3932
3933 let item_byte_range = item_node.byte_range();
3934 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3935 return None;
3936 }
3937 let item_point_range = Point::from_ts_point(item_node.start_position())
3938 ..Point::from_ts_point(item_node.end_position());
3939
3940 let mut open_point = None;
3941 let mut close_point = None;
3942
3943 let mut signature_start = None;
3944 let mut signature_end = None;
3945 let mut extend_signature_range = |node: tree_sitter::Node| {
3946 if signature_start.is_none() {
3947 signature_start = Some(Point::from_ts_point(node.start_position()));
3948 }
3949 signature_end = Some(Point::from_ts_point(node.end_position()));
3950 };
3951
3952 let mut buffer_ranges = Vec::new();
3953 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3954 let mut range = node.start_byte()..node.end_byte();
3955 let start = node.start_position();
3956 if node.end_position().row > start.row {
3957 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3958 }
3959
3960 if !range.is_empty() {
3961 buffer_ranges.push((range, node_is_name));
3962 }
3963 };
3964
3965 for capture in mat.captures {
3966 if capture.index == config.name_capture_ix {
3967 add_to_buffer_ranges(capture.node, true);
3968 extend_signature_range(capture.node);
3969 } else if Some(capture.index) == config.context_capture_ix
3970 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3971 {
3972 add_to_buffer_ranges(capture.node, false);
3973 extend_signature_range(capture.node);
3974 } else {
3975 if Some(capture.index) == config.open_capture_ix {
3976 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3977 } else if Some(capture.index) == config.close_capture_ix {
3978 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3979 }
3980 }
3981 }
3982
3983 if buffer_ranges.is_empty() {
3984 return None;
3985 }
3986
3987 let mut text = String::new();
3988 let mut highlight_ranges = Vec::new();
3989 let mut name_ranges = Vec::new();
3990 let mut chunks = self.chunks(
3991 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3992 true,
3993 );
3994 let mut last_buffer_range_end = 0;
3995 for (buffer_range, is_name) in buffer_ranges {
3996 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3997 if space_added {
3998 text.push(' ');
3999 }
4000 let before_append_len = text.len();
4001 let mut offset = buffer_range.start;
4002 chunks.seek(buffer_range.clone());
4003 for mut chunk in chunks.by_ref() {
4004 if chunk.text.len() > buffer_range.end - offset {
4005 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4006 offset = buffer_range.end;
4007 } else {
4008 offset += chunk.text.len();
4009 }
4010 let style = chunk
4011 .syntax_highlight_id
4012 .zip(theme)
4013 .and_then(|(highlight, theme)| highlight.style(theme));
4014 if let Some(style) = style {
4015 let start = text.len();
4016 let end = start + chunk.text.len();
4017 highlight_ranges.push((start..end, style));
4018 }
4019 text.push_str(chunk.text);
4020 if offset >= buffer_range.end {
4021 break;
4022 }
4023 }
4024 if is_name {
4025 let after_append_len = text.len();
4026 let start = if space_added && !name_ranges.is_empty() {
4027 before_append_len - 1
4028 } else {
4029 before_append_len
4030 };
4031 name_ranges.push(start..after_append_len);
4032 }
4033 last_buffer_range_end = buffer_range.end;
4034 }
4035
4036 let signature_range = signature_start
4037 .zip(signature_end)
4038 .map(|(start, end)| start..end);
4039
4040 Some(OutlineItem {
4041 depth: 0, // We'll calculate the depth later
4042 range: item_point_range,
4043 text,
4044 highlight_ranges,
4045 name_ranges,
4046 signature_range,
4047 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4048 annotation_range: None,
4049 })
4050 }
4051
4052 pub fn function_body_fold_ranges<T: ToOffset>(
4053 &self,
4054 within: Range<T>,
4055 ) -> impl Iterator<Item = Range<usize>> + '_ {
4056 self.text_object_ranges(within, TreeSitterOptions::default())
4057 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4058 }
4059
4060 /// For each grammar in the language, runs the provided
4061 /// [`tree_sitter::Query`] against the given range.
4062 pub fn matches(
4063 &self,
4064 range: Range<usize>,
4065 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4066 ) -> SyntaxMapMatches<'_> {
4067 self.syntax.matches(range, self, query)
4068 }
4069
4070 pub fn all_bracket_ranges(
4071 &self,
4072 range: Range<usize>,
4073 ) -> impl Iterator<Item = BracketMatch> + '_ {
4074 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4075 grammar.brackets_config.as_ref().map(|c| &c.query)
4076 });
4077 let configs = matches
4078 .grammars()
4079 .iter()
4080 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4081 .collect::<Vec<_>>();
4082
4083 iter::from_fn(move || {
4084 while let Some(mat) = matches.peek() {
4085 let mut open = None;
4086 let mut close = None;
4087 let config = &configs[mat.grammar_index];
4088 let pattern = &config.patterns[mat.pattern_index];
4089 for capture in mat.captures {
4090 if capture.index == config.open_capture_ix {
4091 open = Some(capture.node.byte_range());
4092 } else if capture.index == config.close_capture_ix {
4093 close = Some(capture.node.byte_range());
4094 }
4095 }
4096
4097 matches.advance();
4098
4099 let Some((open_range, close_range)) = open.zip(close) else {
4100 continue;
4101 };
4102
4103 let bracket_range = open_range.start..=close_range.end;
4104 if !bracket_range.overlaps(&range) {
4105 continue;
4106 }
4107
4108 return Some(BracketMatch {
4109 open_range,
4110 close_range,
4111 newline_only: pattern.newline_only,
4112 });
4113 }
4114 None
4115 })
4116 }
4117
4118 /// Returns bracket range pairs overlapping or adjacent to `range`
4119 pub fn bracket_ranges<T: ToOffset>(
4120 &self,
4121 range: Range<T>,
4122 ) -> impl Iterator<Item = BracketMatch> + '_ {
4123 // Find bracket pairs that *inclusively* contain the given range.
4124 let range = range.start.to_offset(self).saturating_sub(1)
4125 ..self.len().min(range.end.to_offset(self) + 1);
4126 self.all_bracket_ranges(range)
4127 .filter(|pair| !pair.newline_only)
4128 }
4129
4130 pub fn debug_variables_query<T: ToOffset>(
4131 &self,
4132 range: Range<T>,
4133 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4134 let range = range.start.to_offset(self).saturating_sub(1)
4135 ..self.len().min(range.end.to_offset(self) + 1);
4136
4137 let mut matches = self.syntax.matches_with_options(
4138 range.clone(),
4139 &self.text,
4140 TreeSitterOptions::default(),
4141 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4142 );
4143
4144 let configs = matches
4145 .grammars()
4146 .iter()
4147 .map(|grammar| grammar.debug_variables_config.as_ref())
4148 .collect::<Vec<_>>();
4149
4150 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4151
4152 iter::from_fn(move || {
4153 loop {
4154 while let Some(capture) = captures.pop() {
4155 if capture.0.overlaps(&range) {
4156 return Some(capture);
4157 }
4158 }
4159
4160 let mat = matches.peek()?;
4161
4162 let Some(config) = configs[mat.grammar_index].as_ref() else {
4163 matches.advance();
4164 continue;
4165 };
4166
4167 for capture in mat.captures {
4168 let Some(ix) = config
4169 .objects_by_capture_ix
4170 .binary_search_by_key(&capture.index, |e| e.0)
4171 .ok()
4172 else {
4173 continue;
4174 };
4175 let text_object = config.objects_by_capture_ix[ix].1;
4176 let byte_range = capture.node.byte_range();
4177
4178 let mut found = false;
4179 for (range, existing) in captures.iter_mut() {
4180 if existing == &text_object {
4181 range.start = range.start.min(byte_range.start);
4182 range.end = range.end.max(byte_range.end);
4183 found = true;
4184 break;
4185 }
4186 }
4187
4188 if !found {
4189 captures.push((byte_range, text_object));
4190 }
4191 }
4192
4193 matches.advance();
4194 }
4195 })
4196 }
4197
4198 pub fn text_object_ranges<T: ToOffset>(
4199 &self,
4200 range: Range<T>,
4201 options: TreeSitterOptions,
4202 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4203 let range = range.start.to_offset(self).saturating_sub(1)
4204 ..self.len().min(range.end.to_offset(self) + 1);
4205
4206 let mut matches =
4207 self.syntax
4208 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4209 grammar.text_object_config.as_ref().map(|c| &c.query)
4210 });
4211
4212 let configs = matches
4213 .grammars()
4214 .iter()
4215 .map(|grammar| grammar.text_object_config.as_ref())
4216 .collect::<Vec<_>>();
4217
4218 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4219
4220 iter::from_fn(move || {
4221 loop {
4222 while let Some(capture) = captures.pop() {
4223 if capture.0.overlaps(&range) {
4224 return Some(capture);
4225 }
4226 }
4227
4228 let mat = matches.peek()?;
4229
4230 let Some(config) = configs[mat.grammar_index].as_ref() else {
4231 matches.advance();
4232 continue;
4233 };
4234
4235 for capture in mat.captures {
4236 let Some(ix) = config
4237 .text_objects_by_capture_ix
4238 .binary_search_by_key(&capture.index, |e| e.0)
4239 .ok()
4240 else {
4241 continue;
4242 };
4243 let text_object = config.text_objects_by_capture_ix[ix].1;
4244 let byte_range = capture.node.byte_range();
4245
4246 let mut found = false;
4247 for (range, existing) in captures.iter_mut() {
4248 if existing == &text_object {
4249 range.start = range.start.min(byte_range.start);
4250 range.end = range.end.max(byte_range.end);
4251 found = true;
4252 break;
4253 }
4254 }
4255
4256 if !found {
4257 captures.push((byte_range, text_object));
4258 }
4259 }
4260
4261 matches.advance();
4262 }
4263 })
4264 }
4265
4266 /// Returns enclosing bracket ranges containing the given range
4267 pub fn enclosing_bracket_ranges<T: ToOffset>(
4268 &self,
4269 range: Range<T>,
4270 ) -> impl Iterator<Item = BracketMatch> + '_ {
4271 let range = range.start.to_offset(self)..range.end.to_offset(self);
4272
4273 self.bracket_ranges(range.clone()).filter(move |pair| {
4274 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4275 })
4276 }
4277
4278 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4279 ///
4280 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4281 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4282 &self,
4283 range: Range<T>,
4284 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4285 ) -> Option<(Range<usize>, Range<usize>)> {
4286 let range = range.start.to_offset(self)..range.end.to_offset(self);
4287
4288 // Get the ranges of the innermost pair of brackets.
4289 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4290
4291 for pair in self.enclosing_bracket_ranges(range) {
4292 if let Some(range_filter) = range_filter
4293 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4294 {
4295 continue;
4296 }
4297
4298 let len = pair.close_range.end - pair.open_range.start;
4299
4300 if let Some((existing_open, existing_close)) = &result {
4301 let existing_len = existing_close.end - existing_open.start;
4302 if len > existing_len {
4303 continue;
4304 }
4305 }
4306
4307 result = Some((pair.open_range, pair.close_range));
4308 }
4309
4310 result
4311 }
4312
4313 /// Returns anchor ranges for any matches of the redaction query.
4314 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4315 /// will be run on the relevant section of the buffer.
4316 pub fn redacted_ranges<T: ToOffset>(
4317 &self,
4318 range: Range<T>,
4319 ) -> impl Iterator<Item = Range<usize>> + '_ {
4320 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4321 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4322 grammar
4323 .redactions_config
4324 .as_ref()
4325 .map(|config| &config.query)
4326 });
4327
4328 let configs = syntax_matches
4329 .grammars()
4330 .iter()
4331 .map(|grammar| grammar.redactions_config.as_ref())
4332 .collect::<Vec<_>>();
4333
4334 iter::from_fn(move || {
4335 let redacted_range = syntax_matches
4336 .peek()
4337 .and_then(|mat| {
4338 configs[mat.grammar_index].and_then(|config| {
4339 mat.captures
4340 .iter()
4341 .find(|capture| capture.index == config.redaction_capture_ix)
4342 })
4343 })
4344 .map(|mat| mat.node.byte_range());
4345 syntax_matches.advance();
4346 redacted_range
4347 })
4348 }
4349
4350 pub fn injections_intersecting_range<T: ToOffset>(
4351 &self,
4352 range: Range<T>,
4353 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4354 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4355
4356 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4357 grammar
4358 .injection_config
4359 .as_ref()
4360 .map(|config| &config.query)
4361 });
4362
4363 let configs = syntax_matches
4364 .grammars()
4365 .iter()
4366 .map(|grammar| grammar.injection_config.as_ref())
4367 .collect::<Vec<_>>();
4368
4369 iter::from_fn(move || {
4370 let ranges = syntax_matches.peek().and_then(|mat| {
4371 let config = &configs[mat.grammar_index]?;
4372 let content_capture_range = mat.captures.iter().find_map(|capture| {
4373 if capture.index == config.content_capture_ix {
4374 Some(capture.node.byte_range())
4375 } else {
4376 None
4377 }
4378 })?;
4379 let language = self.language_at(content_capture_range.start)?;
4380 Some((content_capture_range, language))
4381 });
4382 syntax_matches.advance();
4383 ranges
4384 })
4385 }
4386
4387 pub fn runnable_ranges(
4388 &self,
4389 offset_range: Range<usize>,
4390 ) -> impl Iterator<Item = RunnableRange> + '_ {
4391 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4392 grammar.runnable_config.as_ref().map(|config| &config.query)
4393 });
4394
4395 let test_configs = syntax_matches
4396 .grammars()
4397 .iter()
4398 .map(|grammar| grammar.runnable_config.as_ref())
4399 .collect::<Vec<_>>();
4400
4401 iter::from_fn(move || {
4402 loop {
4403 let mat = syntax_matches.peek()?;
4404
4405 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4406 let mut run_range = None;
4407 let full_range = mat.captures.iter().fold(
4408 Range {
4409 start: usize::MAX,
4410 end: 0,
4411 },
4412 |mut acc, next| {
4413 let byte_range = next.node.byte_range();
4414 if acc.start > byte_range.start {
4415 acc.start = byte_range.start;
4416 }
4417 if acc.end < byte_range.end {
4418 acc.end = byte_range.end;
4419 }
4420 acc
4421 },
4422 );
4423 if full_range.start > full_range.end {
4424 // We did not find a full spanning range of this match.
4425 return None;
4426 }
4427 let extra_captures: SmallVec<[_; 1]> =
4428 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4429 test_configs
4430 .extra_captures
4431 .get(capture.index as usize)
4432 .cloned()
4433 .and_then(|tag_name| match tag_name {
4434 RunnableCapture::Named(name) => {
4435 Some((capture.node.byte_range(), name))
4436 }
4437 RunnableCapture::Run => {
4438 let _ = run_range.insert(capture.node.byte_range());
4439 None
4440 }
4441 })
4442 }));
4443 let run_range = run_range?;
4444 let tags = test_configs
4445 .query
4446 .property_settings(mat.pattern_index)
4447 .iter()
4448 .filter_map(|property| {
4449 if *property.key == *"tag" {
4450 property
4451 .value
4452 .as_ref()
4453 .map(|value| RunnableTag(value.to_string().into()))
4454 } else {
4455 None
4456 }
4457 })
4458 .collect();
4459 let extra_captures = extra_captures
4460 .into_iter()
4461 .map(|(range, name)| {
4462 (
4463 name.to_string(),
4464 self.text_for_range(range).collect::<String>(),
4465 )
4466 })
4467 .collect();
4468 // All tags should have the same range.
4469 Some(RunnableRange {
4470 run_range,
4471 full_range,
4472 runnable: Runnable {
4473 tags,
4474 language: mat.language,
4475 buffer: self.remote_id(),
4476 },
4477 extra_captures,
4478 buffer_id: self.remote_id(),
4479 })
4480 });
4481
4482 syntax_matches.advance();
4483 if test_range.is_some() {
4484 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4485 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4486 return test_range;
4487 }
4488 }
4489 })
4490 }
4491
4492 /// Returns selections for remote peers intersecting the given range.
4493 #[allow(clippy::type_complexity)]
4494 pub fn selections_in_range(
4495 &self,
4496 range: Range<Anchor>,
4497 include_local: bool,
4498 ) -> impl Iterator<
4499 Item = (
4500 ReplicaId,
4501 bool,
4502 CursorShape,
4503 impl Iterator<Item = &Selection<Anchor>> + '_,
4504 ),
4505 > + '_ {
4506 self.remote_selections
4507 .iter()
4508 .filter(move |(replica_id, set)| {
4509 (include_local || **replica_id != self.text.replica_id())
4510 && !set.selections.is_empty()
4511 })
4512 .map(move |(replica_id, set)| {
4513 let start_ix = match set.selections.binary_search_by(|probe| {
4514 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4515 }) {
4516 Ok(ix) | Err(ix) => ix,
4517 };
4518 let end_ix = match set.selections.binary_search_by(|probe| {
4519 probe.start.cmp(&range.end, self).then(Ordering::Less)
4520 }) {
4521 Ok(ix) | Err(ix) => ix,
4522 };
4523
4524 (
4525 *replica_id,
4526 set.line_mode,
4527 set.cursor_shape,
4528 set.selections[start_ix..end_ix].iter(),
4529 )
4530 })
4531 }
4532
4533 /// Returns if the buffer contains any diagnostics.
4534 pub fn has_diagnostics(&self) -> bool {
4535 !self.diagnostics.is_empty()
4536 }
4537
4538 /// Returns all the diagnostics intersecting the given range.
4539 pub fn diagnostics_in_range<'a, T, O>(
4540 &'a self,
4541 search_range: Range<T>,
4542 reversed: bool,
4543 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4544 where
4545 T: 'a + Clone + ToOffset,
4546 O: 'a + FromAnchor,
4547 {
4548 let mut iterators: Vec<_> = self
4549 .diagnostics
4550 .iter()
4551 .map(|(_, collection)| {
4552 collection
4553 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4554 .peekable()
4555 })
4556 .collect();
4557
4558 std::iter::from_fn(move || {
4559 let (next_ix, _) = iterators
4560 .iter_mut()
4561 .enumerate()
4562 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4563 .min_by(|(_, a), (_, b)| {
4564 let cmp = a
4565 .range
4566 .start
4567 .cmp(&b.range.start, self)
4568 // when range is equal, sort by diagnostic severity
4569 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4570 // and stabilize order with group_id
4571 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4572 if reversed { cmp.reverse() } else { cmp }
4573 })?;
4574 iterators[next_ix]
4575 .next()
4576 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4577 diagnostic,
4578 range: FromAnchor::from_anchor(&range.start, self)
4579 ..FromAnchor::from_anchor(&range.end, self),
4580 })
4581 })
4582 }
4583
4584 /// Returns all the diagnostic groups associated with the given
4585 /// language server ID. If no language server ID is provided,
4586 /// all diagnostics groups are returned.
4587 pub fn diagnostic_groups(
4588 &self,
4589 language_server_id: Option<LanguageServerId>,
4590 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4591 let mut groups = Vec::new();
4592
4593 if let Some(language_server_id) = language_server_id {
4594 if let Ok(ix) = self
4595 .diagnostics
4596 .binary_search_by_key(&language_server_id, |e| e.0)
4597 {
4598 self.diagnostics[ix]
4599 .1
4600 .groups(language_server_id, &mut groups, self);
4601 }
4602 } else {
4603 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4604 diagnostics.groups(*language_server_id, &mut groups, self);
4605 }
4606 }
4607
4608 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4609 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4610 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4611 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4612 });
4613
4614 groups
4615 }
4616
4617 /// Returns an iterator over the diagnostics for the given group.
4618 pub fn diagnostic_group<O>(
4619 &self,
4620 group_id: usize,
4621 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4622 where
4623 O: FromAnchor + 'static,
4624 {
4625 self.diagnostics
4626 .iter()
4627 .flat_map(move |(_, set)| set.group(group_id, self))
4628 }
4629
4630 /// An integer version number that accounts for all updates besides
4631 /// the buffer's text itself (which is versioned via a version vector).
4632 pub fn non_text_state_update_count(&self) -> usize {
4633 self.non_text_state_update_count
4634 }
4635
4636 /// An integer version that changes when the buffer's syntax changes.
4637 pub fn syntax_update_count(&self) -> usize {
4638 self.syntax.update_count()
4639 }
4640
4641 /// Returns a snapshot of underlying file.
4642 pub fn file(&self) -> Option<&Arc<dyn File>> {
4643 self.file.as_ref()
4644 }
4645
4646 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4647 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4648 if let Some(file) = self.file() {
4649 if file.path().file_name().is_none() || include_root {
4650 Some(file.full_path(cx))
4651 } else {
4652 Some(file.path().to_path_buf())
4653 }
4654 } else {
4655 None
4656 }
4657 }
4658
4659 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4660 let query_str = query.fuzzy_contents;
4661 if query_str.is_some_and(|query| query.is_empty()) {
4662 return BTreeMap::default();
4663 }
4664
4665 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4666 language,
4667 override_id: None,
4668 }));
4669
4670 let mut query_ix = 0;
4671 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4672 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4673
4674 let mut words = BTreeMap::default();
4675 let mut current_word_start_ix = None;
4676 let mut chunk_ix = query.range.start;
4677 for chunk in self.chunks(query.range, false) {
4678 for (i, c) in chunk.text.char_indices() {
4679 let ix = chunk_ix + i;
4680 if classifier.is_word(c) {
4681 if current_word_start_ix.is_none() {
4682 current_word_start_ix = Some(ix);
4683 }
4684
4685 if let Some(query_chars) = &query_chars
4686 && query_ix < query_len
4687 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4688 {
4689 query_ix += 1;
4690 }
4691 continue;
4692 } else if let Some(word_start) = current_word_start_ix.take()
4693 && query_ix == query_len
4694 {
4695 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4696 let mut word_text = self.text_for_range(word_start..ix).peekable();
4697 let first_char = word_text
4698 .peek()
4699 .and_then(|first_chunk| first_chunk.chars().next());
4700 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4701 if !query.skip_digits
4702 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4703 {
4704 words.insert(word_text.collect(), word_range);
4705 }
4706 }
4707 query_ix = 0;
4708 }
4709 chunk_ix += chunk.text.len();
4710 }
4711
4712 words
4713 }
4714}
4715
4716pub struct WordsQuery<'a> {
4717 /// Only returns words with all chars from the fuzzy string in them.
4718 pub fuzzy_contents: Option<&'a str>,
4719 /// Skips words that start with a digit.
4720 pub skip_digits: bool,
4721 /// Buffer offset range, to look for words.
4722 pub range: Range<usize>,
4723}
4724
4725fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4726 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4727}
4728
4729fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4730 let mut result = IndentSize::spaces(0);
4731 for c in text {
4732 let kind = match c {
4733 ' ' => IndentKind::Space,
4734 '\t' => IndentKind::Tab,
4735 _ => break,
4736 };
4737 if result.len == 0 {
4738 result.kind = kind;
4739 }
4740 result.len += 1;
4741 }
4742 result
4743}
4744
4745impl Clone for BufferSnapshot {
4746 fn clone(&self) -> Self {
4747 Self {
4748 text: self.text.clone(),
4749 syntax: self.syntax.clone(),
4750 file: self.file.clone(),
4751 remote_selections: self.remote_selections.clone(),
4752 diagnostics: self.diagnostics.clone(),
4753 language: self.language.clone(),
4754 non_text_state_update_count: self.non_text_state_update_count,
4755 }
4756 }
4757}
4758
4759impl Deref for BufferSnapshot {
4760 type Target = text::BufferSnapshot;
4761
4762 fn deref(&self) -> &Self::Target {
4763 &self.text
4764 }
4765}
4766
4767unsafe impl Send for BufferChunks<'_> {}
4768
4769impl<'a> BufferChunks<'a> {
4770 pub(crate) fn new(
4771 text: &'a Rope,
4772 range: Range<usize>,
4773 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4774 diagnostics: bool,
4775 buffer_snapshot: Option<&'a BufferSnapshot>,
4776 ) -> Self {
4777 let mut highlights = None;
4778 if let Some((captures, highlight_maps)) = syntax {
4779 highlights = Some(BufferChunkHighlights {
4780 captures,
4781 next_capture: None,
4782 stack: Default::default(),
4783 highlight_maps,
4784 })
4785 }
4786
4787 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4788 let chunks = text.chunks_in_range(range.clone());
4789
4790 let mut this = BufferChunks {
4791 range,
4792 buffer_snapshot,
4793 chunks,
4794 diagnostic_endpoints,
4795 error_depth: 0,
4796 warning_depth: 0,
4797 information_depth: 0,
4798 hint_depth: 0,
4799 unnecessary_depth: 0,
4800 underline: true,
4801 highlights,
4802 };
4803 this.initialize_diagnostic_endpoints();
4804 this
4805 }
4806
4807 /// Seeks to the given byte offset in the buffer.
4808 pub fn seek(&mut self, range: Range<usize>) {
4809 let old_range = std::mem::replace(&mut self.range, range.clone());
4810 self.chunks.set_range(self.range.clone());
4811 if let Some(highlights) = self.highlights.as_mut() {
4812 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4813 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4814 highlights
4815 .stack
4816 .retain(|(end_offset, _)| *end_offset > range.start);
4817 if let Some(capture) = &highlights.next_capture
4818 && range.start >= capture.node.start_byte()
4819 {
4820 let next_capture_end = capture.node.end_byte();
4821 if range.start < next_capture_end {
4822 highlights.stack.push((
4823 next_capture_end,
4824 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4825 ));
4826 }
4827 highlights.next_capture.take();
4828 }
4829 } else if let Some(snapshot) = self.buffer_snapshot {
4830 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4831 *highlights = BufferChunkHighlights {
4832 captures,
4833 next_capture: None,
4834 stack: Default::default(),
4835 highlight_maps,
4836 };
4837 } else {
4838 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4839 // Seeking such BufferChunks is not supported.
4840 debug_assert!(
4841 false,
4842 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4843 );
4844 }
4845
4846 highlights.captures.set_byte_range(self.range.clone());
4847 self.initialize_diagnostic_endpoints();
4848 }
4849 }
4850
4851 fn initialize_diagnostic_endpoints(&mut self) {
4852 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4853 && let Some(buffer) = self.buffer_snapshot
4854 {
4855 let mut diagnostic_endpoints = Vec::new();
4856 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4857 diagnostic_endpoints.push(DiagnosticEndpoint {
4858 offset: entry.range.start,
4859 is_start: true,
4860 severity: entry.diagnostic.severity,
4861 is_unnecessary: entry.diagnostic.is_unnecessary,
4862 underline: entry.diagnostic.underline,
4863 });
4864 diagnostic_endpoints.push(DiagnosticEndpoint {
4865 offset: entry.range.end,
4866 is_start: false,
4867 severity: entry.diagnostic.severity,
4868 is_unnecessary: entry.diagnostic.is_unnecessary,
4869 underline: entry.diagnostic.underline,
4870 });
4871 }
4872 diagnostic_endpoints
4873 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4874 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4875 self.hint_depth = 0;
4876 self.error_depth = 0;
4877 self.warning_depth = 0;
4878 self.information_depth = 0;
4879 }
4880 }
4881
4882 /// The current byte offset in the buffer.
4883 pub fn offset(&self) -> usize {
4884 self.range.start
4885 }
4886
4887 pub fn range(&self) -> Range<usize> {
4888 self.range.clone()
4889 }
4890
4891 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4892 let depth = match endpoint.severity {
4893 DiagnosticSeverity::ERROR => &mut self.error_depth,
4894 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4895 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4896 DiagnosticSeverity::HINT => &mut self.hint_depth,
4897 _ => return,
4898 };
4899 if endpoint.is_start {
4900 *depth += 1;
4901 } else {
4902 *depth -= 1;
4903 }
4904
4905 if endpoint.is_unnecessary {
4906 if endpoint.is_start {
4907 self.unnecessary_depth += 1;
4908 } else {
4909 self.unnecessary_depth -= 1;
4910 }
4911 }
4912 }
4913
4914 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4915 if self.error_depth > 0 {
4916 Some(DiagnosticSeverity::ERROR)
4917 } else if self.warning_depth > 0 {
4918 Some(DiagnosticSeverity::WARNING)
4919 } else if self.information_depth > 0 {
4920 Some(DiagnosticSeverity::INFORMATION)
4921 } else if self.hint_depth > 0 {
4922 Some(DiagnosticSeverity::HINT)
4923 } else {
4924 None
4925 }
4926 }
4927
4928 fn current_code_is_unnecessary(&self) -> bool {
4929 self.unnecessary_depth > 0
4930 }
4931}
4932
4933impl<'a> Iterator for BufferChunks<'a> {
4934 type Item = Chunk<'a>;
4935
4936 fn next(&mut self) -> Option<Self::Item> {
4937 let mut next_capture_start = usize::MAX;
4938 let mut next_diagnostic_endpoint = usize::MAX;
4939
4940 if let Some(highlights) = self.highlights.as_mut() {
4941 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4942 if *parent_capture_end <= self.range.start {
4943 highlights.stack.pop();
4944 } else {
4945 break;
4946 }
4947 }
4948
4949 if highlights.next_capture.is_none() {
4950 highlights.next_capture = highlights.captures.next();
4951 }
4952
4953 while let Some(capture) = highlights.next_capture.as_ref() {
4954 if self.range.start < capture.node.start_byte() {
4955 next_capture_start = capture.node.start_byte();
4956 break;
4957 } else {
4958 let highlight_id =
4959 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4960 highlights
4961 .stack
4962 .push((capture.node.end_byte(), highlight_id));
4963 highlights.next_capture = highlights.captures.next();
4964 }
4965 }
4966 }
4967
4968 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4969 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4970 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4971 if endpoint.offset <= self.range.start {
4972 self.update_diagnostic_depths(endpoint);
4973 diagnostic_endpoints.next();
4974 self.underline = endpoint.underline;
4975 } else {
4976 next_diagnostic_endpoint = endpoint.offset;
4977 break;
4978 }
4979 }
4980 }
4981 self.diagnostic_endpoints = diagnostic_endpoints;
4982
4983 if let Some(ChunkBitmaps {
4984 text: chunk,
4985 chars: chars_map,
4986 tabs,
4987 }) = self.chunks.peek_tabs()
4988 {
4989 let chunk_start = self.range.start;
4990 let mut chunk_end = (self.chunks.offset() + chunk.len())
4991 .min(next_capture_start)
4992 .min(next_diagnostic_endpoint);
4993 let mut highlight_id = None;
4994 if let Some(highlights) = self.highlights.as_ref()
4995 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4996 {
4997 chunk_end = chunk_end.min(*parent_capture_end);
4998 highlight_id = Some(*parent_highlight_id);
4999 }
5000
5001 let slice =
5002 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
5003 let bit_end = chunk_end - self.chunks.offset();
5004
5005 let mask = if bit_end >= 128 {
5006 u128::MAX
5007 } else {
5008 (1u128 << bit_end) - 1
5009 };
5010 let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
5011 let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
5012
5013 self.range.start = chunk_end;
5014 if self.range.start == self.chunks.offset() + chunk.len() {
5015 self.chunks.next().unwrap();
5016 }
5017
5018 Some(Chunk {
5019 text: slice,
5020 syntax_highlight_id: highlight_id,
5021 underline: self.underline,
5022 diagnostic_severity: self.current_diagnostic_severity(),
5023 is_unnecessary: self.current_code_is_unnecessary(),
5024 tabs,
5025 chars: chars_map,
5026 ..Chunk::default()
5027 })
5028 } else {
5029 None
5030 }
5031 }
5032}
5033
5034impl operation_queue::Operation for Operation {
5035 fn lamport_timestamp(&self) -> clock::Lamport {
5036 match self {
5037 Operation::Buffer(_) => {
5038 unreachable!("buffer operations should never be deferred at this layer")
5039 }
5040 Operation::UpdateDiagnostics {
5041 lamport_timestamp, ..
5042 }
5043 | Operation::UpdateSelections {
5044 lamport_timestamp, ..
5045 }
5046 | Operation::UpdateCompletionTriggers {
5047 lamport_timestamp, ..
5048 }
5049 | Operation::UpdateLineEnding {
5050 lamport_timestamp, ..
5051 } => *lamport_timestamp,
5052 }
5053 }
5054}
5055
5056impl Default for Diagnostic {
5057 fn default() -> Self {
5058 Self {
5059 source: Default::default(),
5060 source_kind: DiagnosticSourceKind::Other,
5061 code: None,
5062 code_description: None,
5063 severity: DiagnosticSeverity::ERROR,
5064 message: Default::default(),
5065 markdown: None,
5066 group_id: 0,
5067 is_primary: false,
5068 is_disk_based: false,
5069 is_unnecessary: false,
5070 underline: true,
5071 data: None,
5072 }
5073 }
5074}
5075
5076impl IndentSize {
5077 /// Returns an [`IndentSize`] representing the given spaces.
5078 pub fn spaces(len: u32) -> Self {
5079 Self {
5080 len,
5081 kind: IndentKind::Space,
5082 }
5083 }
5084
5085 /// Returns an [`IndentSize`] representing a tab.
5086 pub fn tab() -> Self {
5087 Self {
5088 len: 1,
5089 kind: IndentKind::Tab,
5090 }
5091 }
5092
5093 /// An iterator over the characters represented by this [`IndentSize`].
5094 pub fn chars(&self) -> impl Iterator<Item = char> {
5095 iter::repeat(self.char()).take(self.len as usize)
5096 }
5097
5098 /// The character representation of this [`IndentSize`].
5099 pub fn char(&self) -> char {
5100 match self.kind {
5101 IndentKind::Space => ' ',
5102 IndentKind::Tab => '\t',
5103 }
5104 }
5105
5106 /// Consumes the current [`IndentSize`] and returns a new one that has
5107 /// been shrunk or enlarged by the given size along the given direction.
5108 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5109 match direction {
5110 Ordering::Less => {
5111 if self.kind == size.kind && self.len >= size.len {
5112 self.len -= size.len;
5113 }
5114 }
5115 Ordering::Equal => {}
5116 Ordering::Greater => {
5117 if self.len == 0 {
5118 self = size;
5119 } else if self.kind == size.kind {
5120 self.len += size.len;
5121 }
5122 }
5123 }
5124 self
5125 }
5126
5127 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5128 match self.kind {
5129 IndentKind::Space => self.len as usize,
5130 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5131 }
5132 }
5133}
5134
5135#[cfg(any(test, feature = "test-support"))]
5136pub struct TestFile {
5137 pub path: Arc<Path>,
5138 pub root_name: String,
5139 pub local_root: Option<PathBuf>,
5140}
5141
5142#[cfg(any(test, feature = "test-support"))]
5143impl File for TestFile {
5144 fn path(&self) -> &Arc<Path> {
5145 &self.path
5146 }
5147
5148 fn full_path(&self, _: &gpui::App) -> PathBuf {
5149 PathBuf::from(&self.root_name).join(self.path.as_ref())
5150 }
5151
5152 fn as_local(&self) -> Option<&dyn LocalFile> {
5153 if self.local_root.is_some() {
5154 Some(self)
5155 } else {
5156 None
5157 }
5158 }
5159
5160 fn disk_state(&self) -> DiskState {
5161 unimplemented!()
5162 }
5163
5164 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
5165 self.path().file_name().unwrap_or(self.root_name.as_ref())
5166 }
5167
5168 fn worktree_id(&self, _: &App) -> WorktreeId {
5169 WorktreeId::from_usize(0)
5170 }
5171
5172 fn to_proto(&self, _: &App) -> rpc::proto::File {
5173 unimplemented!()
5174 }
5175
5176 fn is_private(&self) -> bool {
5177 false
5178 }
5179}
5180
5181#[cfg(any(test, feature = "test-support"))]
5182impl LocalFile for TestFile {
5183 fn abs_path(&self, _cx: &App) -> PathBuf {
5184 PathBuf::from(self.local_root.as_ref().unwrap())
5185 .join(&self.root_name)
5186 .join(self.path.as_ref())
5187 }
5188
5189 fn load(&self, _cx: &App) -> Task<Result<String>> {
5190 unimplemented!()
5191 }
5192
5193 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5194 unimplemented!()
5195 }
5196}
5197
5198pub(crate) fn contiguous_ranges(
5199 values: impl Iterator<Item = u32>,
5200 max_len: usize,
5201) -> impl Iterator<Item = Range<u32>> {
5202 let mut values = values;
5203 let mut current_range: Option<Range<u32>> = None;
5204 std::iter::from_fn(move || {
5205 loop {
5206 if let Some(value) = values.next() {
5207 if let Some(range) = &mut current_range
5208 && value == range.end
5209 && range.len() < max_len
5210 {
5211 range.end += 1;
5212 continue;
5213 }
5214
5215 let prev_range = current_range.clone();
5216 current_range = Some(value..(value + 1));
5217 if prev_range.is_some() {
5218 return prev_range;
5219 }
5220 } else {
5221 return current_range.take();
5222 }
5223 }
5224 })
5225}
5226
5227#[derive(Default, Debug)]
5228pub struct CharClassifier {
5229 scope: Option<LanguageScope>,
5230 scope_context: Option<CharScopeContext>,
5231 ignore_punctuation: bool,
5232}
5233
5234impl CharClassifier {
5235 pub fn new(scope: Option<LanguageScope>) -> Self {
5236 Self {
5237 scope,
5238 scope_context: None,
5239 ignore_punctuation: false,
5240 }
5241 }
5242
5243 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5244 Self {
5245 scope_context,
5246 ..self
5247 }
5248 }
5249
5250 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5251 Self {
5252 ignore_punctuation,
5253 ..self
5254 }
5255 }
5256
5257 pub fn is_whitespace(&self, c: char) -> bool {
5258 self.kind(c) == CharKind::Whitespace
5259 }
5260
5261 pub fn is_word(&self, c: char) -> bool {
5262 self.kind(c) == CharKind::Word
5263 }
5264
5265 pub fn is_punctuation(&self, c: char) -> bool {
5266 self.kind(c) == CharKind::Punctuation
5267 }
5268
5269 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5270 if c.is_alphanumeric() || c == '_' {
5271 return CharKind::Word;
5272 }
5273
5274 if let Some(scope) = &self.scope {
5275 let characters = match self.scope_context {
5276 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5277 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5278 None => scope.word_characters(),
5279 };
5280 if let Some(characters) = characters
5281 && characters.contains(&c)
5282 {
5283 return CharKind::Word;
5284 }
5285 }
5286
5287 if c.is_whitespace() {
5288 return CharKind::Whitespace;
5289 }
5290
5291 if ignore_punctuation {
5292 CharKind::Word
5293 } else {
5294 CharKind::Punctuation
5295 }
5296 }
5297
5298 pub fn kind(&self, c: char) -> CharKind {
5299 self.kind_with(c, self.ignore_punctuation)
5300 }
5301}
5302
5303/// Find all of the ranges of whitespace that occur at the ends of lines
5304/// in the given rope.
5305///
5306/// This could also be done with a regex search, but this implementation
5307/// avoids copying text.
5308pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5309 let mut ranges = Vec::new();
5310
5311 let mut offset = 0;
5312 let mut prev_chunk_trailing_whitespace_range = 0..0;
5313 for chunk in rope.chunks() {
5314 let mut prev_line_trailing_whitespace_range = 0..0;
5315 for (i, line) in chunk.split('\n').enumerate() {
5316 let line_end_offset = offset + line.len();
5317 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5318 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5319
5320 if i == 0 && trimmed_line_len == 0 {
5321 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5322 }
5323 if !prev_line_trailing_whitespace_range.is_empty() {
5324 ranges.push(prev_line_trailing_whitespace_range);
5325 }
5326
5327 offset = line_end_offset + 1;
5328 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5329 }
5330
5331 offset -= 1;
5332 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5333 }
5334
5335 if !prev_chunk_trailing_whitespace_range.is_empty() {
5336 ranges.push(prev_chunk_trailing_whitespace_range);
5337 }
5338
5339 ranges
5340}