1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189impl From<settings::CursorShape> for CursorShape {
190 fn from(shape: settings::CursorShape) -> Self {
191 match shape {
192 settings::CursorShape::Bar => CursorShape::Bar,
193 settings::CursorShape::Block => CursorShape::Block,
194 settings::CursorShape::Underline => CursorShape::Underline,
195 settings::CursorShape::Hollow => CursorShape::Hollow,
196 }
197 }
198}
199
200#[derive(Clone, Debug)]
201struct SelectionSet {
202 line_mode: bool,
203 cursor_shape: CursorShape,
204 selections: Arc<[Selection<Anchor>]>,
205 lamport_timestamp: clock::Lamport,
206}
207
208/// A diagnostic associated with a certain range of a buffer.
209#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
210pub struct Diagnostic {
211 /// The name of the service that produced this diagnostic.
212 pub source: Option<String>,
213 /// A machine-readable code that identifies this diagnostic.
214 pub code: Option<NumberOrString>,
215 pub code_description: Option<lsp::Uri>,
216 /// Whether this diagnostic is a hint, warning, or error.
217 pub severity: DiagnosticSeverity,
218 /// The human-readable message associated with this diagnostic.
219 pub message: String,
220 /// The human-readable message (in markdown format)
221 pub markdown: Option<String>,
222 /// An id that identifies the group to which this diagnostic belongs.
223 ///
224 /// When a language server produces a diagnostic with
225 /// one or more associated diagnostics, those diagnostics are all
226 /// assigned a single group ID.
227 pub group_id: usize,
228 /// Whether this diagnostic is the primary diagnostic for its group.
229 ///
230 /// In a given group, the primary diagnostic is the top-level diagnostic
231 /// returned by the language server. The non-primary diagnostics are the
232 /// associated diagnostics.
233 pub is_primary: bool,
234 /// Whether this diagnostic is considered to originate from an analysis of
235 /// files on disk, as opposed to any unsaved buffer contents. This is a
236 /// property of a given diagnostic source, and is configured for a given
237 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
238 /// for the language server.
239 pub is_disk_based: bool,
240 /// Whether this diagnostic marks unnecessary code.
241 pub is_unnecessary: bool,
242 /// Quick separation of diagnostics groups based by their source.
243 pub source_kind: DiagnosticSourceKind,
244 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
245 pub data: Option<Value>,
246 /// Whether to underline the corresponding text range in the editor.
247 pub underline: bool,
248}
249
250#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
251pub enum DiagnosticSourceKind {
252 Pulled,
253 Pushed,
254 Other,
255}
256
257/// An operation used to synchronize this buffer with its other replicas.
258#[derive(Clone, Debug, PartialEq)]
259pub enum Operation {
260 /// A text operation.
261 Buffer(text::Operation),
262
263 /// An update to the buffer's diagnostics.
264 UpdateDiagnostics {
265 /// The id of the language server that produced the new diagnostics.
266 server_id: LanguageServerId,
267 /// The diagnostics.
268 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 },
272
273 /// An update to the most recent selections in this buffer.
274 UpdateSelections {
275 /// The selections.
276 selections: Arc<[Selection<Anchor>]>,
277 /// The buffer's lamport timestamp.
278 lamport_timestamp: clock::Lamport,
279 /// Whether the selections are in 'line mode'.
280 line_mode: bool,
281 /// The [`CursorShape`] associated with these selections.
282 cursor_shape: CursorShape,
283 },
284
285 /// An update to the characters that should trigger autocompletion
286 /// for this buffer.
287 UpdateCompletionTriggers {
288 /// The characters that trigger autocompletion.
289 triggers: Vec<String>,
290 /// The buffer's lamport timestamp.
291 lamport_timestamp: clock::Lamport,
292 /// The language server ID.
293 server_id: LanguageServerId,
294 },
295
296 /// An update to the line ending type of this buffer.
297 UpdateLineEnding {
298 /// The line ending type.
299 line_ending: LineEnding,
300 /// The buffer's lamport timestamp.
301 lamport_timestamp: clock::Lamport,
302 },
303}
304
305/// An event that occurs in a buffer.
306#[derive(Clone, Debug, PartialEq)]
307pub enum BufferEvent {
308 /// The buffer was changed in a way that must be
309 /// propagated to its other replicas.
310 Operation {
311 operation: Operation,
312 is_local: bool,
313 },
314 /// The buffer was edited.
315 Edited,
316 /// The buffer's `dirty` bit changed.
317 DirtyChanged,
318 /// The buffer was saved.
319 Saved,
320 /// The buffer's file was changed on disk.
321 FileHandleChanged,
322 /// The buffer was reloaded.
323 Reloaded,
324 /// The buffer is in need of a reload
325 ReloadNeeded,
326 /// The buffer's language was changed.
327 LanguageChanged,
328 /// The buffer's syntax trees were updated.
329 Reparsed,
330 /// The buffer's diagnostics were updated.
331 DiagnosticsUpdated,
332 /// The buffer gained or lost editing capabilities.
333 CapabilityChanged,
334}
335
336/// The file associated with a buffer.
337pub trait File: Send + Sync + Any {
338 /// Returns the [`LocalFile`] associated with this file, if the
339 /// file is local.
340 fn as_local(&self) -> Option<&dyn LocalFile>;
341
342 /// Returns whether this file is local.
343 fn is_local(&self) -> bool {
344 self.as_local().is_some()
345 }
346
347 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
348 /// only available in some states, such as modification time.
349 fn disk_state(&self) -> DiskState;
350
351 /// Returns the path of this file relative to the worktree's root directory.
352 fn path(&self) -> &Arc<Path>;
353
354 /// Returns the path of this file relative to the worktree's parent directory (this means it
355 /// includes the name of the worktree's root folder).
356 fn full_path(&self, cx: &App) -> PathBuf;
357
358 /// Returns the last component of this handle's absolute path. If this handle refers to the root
359 /// of its worktree, then this method will return the name of the worktree itself.
360 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
361
362 /// Returns the id of the worktree to which this file belongs.
363 ///
364 /// This is needed for looking up project-specific settings.
365 fn worktree_id(&self, cx: &App) -> WorktreeId;
366
367 /// Converts this file into a protobuf message.
368 fn to_proto(&self, cx: &App) -> rpc::proto::File;
369
370 /// Return whether Zed considers this to be a private file.
371 fn is_private(&self) -> bool;
372}
373
374/// The file's storage status - whether it's stored (`Present`), and if so when it was last
375/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
376/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
377/// indicator for new files.
378#[derive(Copy, Clone, Debug, PartialEq)]
379pub enum DiskState {
380 /// File created in Zed that has not been saved.
381 New,
382 /// File present on the filesystem.
383 Present { mtime: MTime },
384 /// Deleted file that was previously present.
385 Deleted,
386}
387
388impl DiskState {
389 /// Returns the file's last known modification time on disk.
390 pub fn mtime(self) -> Option<MTime> {
391 match self {
392 DiskState::New => None,
393 DiskState::Present { mtime } => Some(mtime),
394 DiskState::Deleted => None,
395 }
396 }
397
398 pub fn exists(&self) -> bool {
399 match self {
400 DiskState::New => false,
401 DiskState::Present { .. } => true,
402 DiskState::Deleted => false,
403 }
404 }
405}
406
407/// The file associated with a buffer, in the case where the file is on the local disk.
408pub trait LocalFile: File {
409 /// Returns the absolute path of this file
410 fn abs_path(&self, cx: &App) -> PathBuf;
411
412 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
413 fn load(&self, cx: &App) -> Task<Result<String>>;
414
415 /// Loads the file's contents from disk.
416 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
417}
418
419/// The auto-indent behavior associated with an editing operation.
420/// For some editing operations, each affected line of text has its
421/// indentation recomputed. For other operations, the entire block
422/// of edited text is adjusted uniformly.
423#[derive(Clone, Debug)]
424pub enum AutoindentMode {
425 /// Indent each line of inserted text.
426 EachLine,
427 /// Apply the same indentation adjustment to all of the lines
428 /// in a given insertion.
429 Block {
430 /// The original indentation column of the first line of each
431 /// insertion, if it has been copied.
432 ///
433 /// Knowing this makes it possible to preserve the relative indentation
434 /// of every line in the insertion from when it was copied.
435 ///
436 /// If the original indent column is `a`, and the first line of insertion
437 /// is then auto-indented to column `b`, then every other line of
438 /// the insertion will be auto-indented to column `b - a`
439 original_indent_columns: Vec<Option<u32>>,
440 },
441}
442
443#[derive(Clone)]
444struct AutoindentRequest {
445 before_edit: BufferSnapshot,
446 entries: Vec<AutoindentRequestEntry>,
447 is_block_mode: bool,
448 ignore_empty_lines: bool,
449}
450
451#[derive(Debug, Clone)]
452struct AutoindentRequestEntry {
453 /// A range of the buffer whose indentation should be adjusted.
454 range: Range<Anchor>,
455 /// Whether or not these lines should be considered brand new, for the
456 /// purpose of auto-indent. When text is not new, its indentation will
457 /// only be adjusted if the suggested indentation level has *changed*
458 /// since the edit was made.
459 first_line_is_new: bool,
460 indent_size: IndentSize,
461 original_indent_column: Option<u32>,
462}
463
464#[derive(Debug)]
465struct IndentSuggestion {
466 basis_row: u32,
467 delta: Ordering,
468 within_error: bool,
469}
470
471struct BufferChunkHighlights<'a> {
472 captures: SyntaxMapCaptures<'a>,
473 next_capture: Option<SyntaxMapCapture<'a>>,
474 stack: Vec<(usize, HighlightId)>,
475 highlight_maps: Vec<HighlightMap>,
476}
477
478/// An iterator that yields chunks of a buffer's text, along with their
479/// syntax highlights and diagnostic status.
480pub struct BufferChunks<'a> {
481 buffer_snapshot: Option<&'a BufferSnapshot>,
482 range: Range<usize>,
483 chunks: text::Chunks<'a>,
484 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
485 error_depth: usize,
486 warning_depth: usize,
487 information_depth: usize,
488 hint_depth: usize,
489 unnecessary_depth: usize,
490 underline: bool,
491 highlights: Option<BufferChunkHighlights<'a>>,
492}
493
494/// A chunk of a buffer's text, along with its syntax highlight and
495/// diagnostic status.
496#[derive(Clone, Debug, Default)]
497pub struct Chunk<'a> {
498 /// The text of the chunk.
499 pub text: &'a str,
500 /// The syntax highlighting style of the chunk.
501 pub syntax_highlight_id: Option<HighlightId>,
502 /// The highlight style that has been applied to this chunk in
503 /// the editor.
504 pub highlight_style: Option<HighlightStyle>,
505 /// The severity of diagnostic associated with this chunk, if any.
506 pub diagnostic_severity: Option<DiagnosticSeverity>,
507 /// Whether this chunk of text is marked as unnecessary.
508 pub is_unnecessary: bool,
509 /// Whether this chunk of text was originally a tab character.
510 pub is_tab: bool,
511 /// A bitset of which characters are tabs in this string.
512 pub tabs: u128,
513 /// Bitmap of character indices in this chunk
514 pub chars: u128,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_inlay: bool,
517 /// Whether to underline the corresponding text range in the editor.
518 pub underline: bool,
519}
520
521/// A set of edits to a given version of a buffer, computed asynchronously.
522#[derive(Debug)]
523pub struct Diff {
524 pub base_version: clock::Global,
525 pub line_ending: LineEnding,
526 pub edits: Vec<(Range<usize>, Arc<str>)>,
527}
528
529#[derive(Debug, Clone, Copy)]
530pub(crate) struct DiagnosticEndpoint {
531 offset: usize,
532 is_start: bool,
533 underline: bool,
534 severity: DiagnosticSeverity,
535 is_unnecessary: bool,
536}
537
538/// A class of characters, used for characterizing a run of text.
539#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
540pub enum CharKind {
541 /// Whitespace.
542 Whitespace,
543 /// Punctuation.
544 Punctuation,
545 /// Word.
546 Word,
547}
548
549/// A runnable is a set of data about a region that could be resolved into a task
550pub struct Runnable {
551 pub tags: SmallVec<[RunnableTag; 1]>,
552 pub language: Arc<Language>,
553 pub buffer: BufferId,
554}
555
556#[derive(Default, Clone, Debug)]
557pub struct HighlightedText {
558 pub text: SharedString,
559 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
560}
561
562#[derive(Default, Debug)]
563struct HighlightedTextBuilder {
564 pub text: String,
565 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
566}
567
568impl HighlightedText {
569 pub fn from_buffer_range<T: ToOffset>(
570 range: Range<T>,
571 snapshot: &text::BufferSnapshot,
572 syntax_snapshot: &SyntaxSnapshot,
573 override_style: Option<HighlightStyle>,
574 syntax_theme: &SyntaxTheme,
575 ) -> Self {
576 let mut highlighted_text = HighlightedTextBuilder::default();
577 highlighted_text.add_text_from_buffer_range(
578 range,
579 snapshot,
580 syntax_snapshot,
581 override_style,
582 syntax_theme,
583 );
584 highlighted_text.build()
585 }
586
587 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
588 gpui::StyledText::new(self.text.clone())
589 .with_default_highlights(default_style, self.highlights.iter().cloned())
590 }
591
592 /// Returns the first line without leading whitespace unless highlighted
593 /// and a boolean indicating if there are more lines after
594 pub fn first_line_preview(self) -> (Self, bool) {
595 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
596 let first_line = &self.text[..newline_ix];
597
598 // Trim leading whitespace, unless an edit starts prior to it.
599 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
600 if let Some((first_highlight_range, _)) = self.highlights.first() {
601 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
602 }
603
604 let preview_text = &first_line[preview_start_ix..];
605 let preview_highlights = self
606 .highlights
607 .into_iter()
608 .take_while(|(range, _)| range.start < newline_ix)
609 .filter_map(|(mut range, highlight)| {
610 range.start = range.start.saturating_sub(preview_start_ix);
611 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
612 if range.is_empty() {
613 None
614 } else {
615 Some((range, highlight))
616 }
617 });
618
619 let preview = Self {
620 text: SharedString::new(preview_text),
621 highlights: preview_highlights.collect(),
622 };
623
624 (preview, self.text.len() > newline_ix)
625 }
626}
627
628impl HighlightedTextBuilder {
629 pub fn build(self) -> HighlightedText {
630 HighlightedText {
631 text: self.text.into(),
632 highlights: self.highlights,
633 }
634 }
635
636 pub fn add_text_from_buffer_range<T: ToOffset>(
637 &mut self,
638 range: Range<T>,
639 snapshot: &text::BufferSnapshot,
640 syntax_snapshot: &SyntaxSnapshot,
641 override_style: Option<HighlightStyle>,
642 syntax_theme: &SyntaxTheme,
643 ) {
644 let range = range.to_offset(snapshot);
645 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
646 let start = self.text.len();
647 self.text.push_str(chunk.text);
648 let end = self.text.len();
649
650 if let Some(highlight_style) = chunk
651 .syntax_highlight_id
652 .and_then(|id| id.style(syntax_theme))
653 {
654 let highlight_style = override_style.map_or(highlight_style, |override_style| {
655 highlight_style.highlight(override_style)
656 });
657 self.highlights.push((start..end, highlight_style));
658 } else if let Some(override_style) = override_style {
659 self.highlights.push((start..end, override_style));
660 }
661 }
662 }
663
664 fn highlighted_chunks<'a>(
665 range: Range<usize>,
666 snapshot: &'a text::BufferSnapshot,
667 syntax_snapshot: &'a SyntaxSnapshot,
668 ) -> BufferChunks<'a> {
669 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
670 grammar
671 .highlights_config
672 .as_ref()
673 .map(|config| &config.query)
674 });
675
676 let highlight_maps = captures
677 .grammars()
678 .iter()
679 .map(|grammar| grammar.highlight_map())
680 .collect();
681
682 BufferChunks::new(
683 snapshot.as_rope(),
684 range,
685 Some((captures, highlight_maps)),
686 false,
687 None,
688 )
689 }
690}
691
692#[derive(Clone)]
693pub struct EditPreview {
694 old_snapshot: text::BufferSnapshot,
695 applied_edits_snapshot: text::BufferSnapshot,
696 syntax_snapshot: SyntaxSnapshot,
697}
698
699impl EditPreview {
700 pub fn highlight_edits(
701 &self,
702 current_snapshot: &BufferSnapshot,
703 edits: &[(Range<Anchor>, String)],
704 include_deletions: bool,
705 cx: &App,
706 ) -> HighlightedText {
707 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
708 return HighlightedText::default();
709 };
710
711 let mut highlighted_text = HighlightedTextBuilder::default();
712
713 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
714
715 let insertion_highlight_style = HighlightStyle {
716 background_color: Some(cx.theme().status().created_background),
717 ..Default::default()
718 };
719 let deletion_highlight_style = HighlightStyle {
720 background_color: Some(cx.theme().status().deleted_background),
721 ..Default::default()
722 };
723 let syntax_theme = cx.theme().syntax();
724
725 for (range, edit_text) in edits {
726 let edit_new_end_in_preview_snapshot = range
727 .end
728 .bias_right(&self.old_snapshot)
729 .to_offset(&self.applied_edits_snapshot);
730 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
731
732 let unchanged_range_in_preview_snapshot =
733 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
734 if !unchanged_range_in_preview_snapshot.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 unchanged_range_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 None,
740 syntax_theme,
741 );
742 }
743
744 let range_in_current_snapshot = range.to_offset(current_snapshot);
745 if include_deletions && !range_in_current_snapshot.is_empty() {
746 highlighted_text.add_text_from_buffer_range(
747 range_in_current_snapshot,
748 ¤t_snapshot.text,
749 ¤t_snapshot.syntax,
750 Some(deletion_highlight_style),
751 syntax_theme,
752 );
753 }
754
755 if !edit_text.is_empty() {
756 highlighted_text.add_text_from_buffer_range(
757 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
758 &self.applied_edits_snapshot,
759 &self.syntax_snapshot,
760 Some(insertion_highlight_style),
761 syntax_theme,
762 );
763 }
764
765 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
766 }
767
768 highlighted_text.add_text_from_buffer_range(
769 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
770 &self.applied_edits_snapshot,
771 &self.syntax_snapshot,
772 None,
773 syntax_theme,
774 );
775
776 highlighted_text.build()
777 }
778
779 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
780 let (first, _) = edits.first()?;
781 let (last, _) = edits.last()?;
782
783 let start = first
784 .start
785 .bias_left(&self.old_snapshot)
786 .to_point(&self.applied_edits_snapshot);
787 let end = last
788 .end
789 .bias_right(&self.old_snapshot)
790 .to_point(&self.applied_edits_snapshot);
791
792 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
793 let range = Point::new(start.row, 0)
794 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
795
796 Some(range.to_offset(&self.applied_edits_snapshot))
797 }
798}
799
800#[derive(Clone, Debug, PartialEq, Eq)]
801pub struct BracketMatch {
802 pub open_range: Range<usize>,
803 pub close_range: Range<usize>,
804 pub newline_only: bool,
805}
806
807impl Buffer {
808 /// Create a new buffer with the given base text.
809 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
810 Self::build(
811 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
812 None,
813 Capability::ReadWrite,
814 )
815 }
816
817 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
818 pub fn local_normalized(
819 base_text_normalized: Rope,
820 line_ending: LineEnding,
821 cx: &Context<Self>,
822 ) -> Self {
823 Self::build(
824 TextBuffer::new_normalized(
825 0,
826 cx.entity_id().as_non_zero_u64().into(),
827 line_ending,
828 base_text_normalized,
829 ),
830 None,
831 Capability::ReadWrite,
832 )
833 }
834
835 /// Create a new buffer that is a replica of a remote buffer.
836 pub fn remote(
837 remote_id: BufferId,
838 replica_id: ReplicaId,
839 capability: Capability,
840 base_text: impl Into<String>,
841 ) -> Self {
842 Self::build(
843 TextBuffer::new(replica_id, remote_id, base_text.into()),
844 None,
845 capability,
846 )
847 }
848
849 /// Create a new buffer that is a replica of a remote buffer, populating its
850 /// state from the given protobuf message.
851 pub fn from_proto(
852 replica_id: ReplicaId,
853 capability: Capability,
854 message: proto::BufferState,
855 file: Option<Arc<dyn File>>,
856 ) -> Result<Self> {
857 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
858 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
859 let mut this = Self::build(buffer, file, capability);
860 this.text.set_line_ending(proto::deserialize_line_ending(
861 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
862 ));
863 this.saved_version = proto::deserialize_version(&message.saved_version);
864 this.saved_mtime = message.saved_mtime.map(|time| time.into());
865 Ok(this)
866 }
867
868 /// Serialize the buffer's state to a protobuf message.
869 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
870 proto::BufferState {
871 id: self.remote_id().into(),
872 file: self.file.as_ref().map(|f| f.to_proto(cx)),
873 base_text: self.base_text().to_string(),
874 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
875 saved_version: proto::serialize_version(&self.saved_version),
876 saved_mtime: self.saved_mtime.map(|time| time.into()),
877 }
878 }
879
880 /// Serialize as protobufs all of the changes to the buffer since the given version.
881 pub fn serialize_ops(
882 &self,
883 since: Option<clock::Global>,
884 cx: &App,
885 ) -> Task<Vec<proto::Operation>> {
886 let mut operations = Vec::new();
887 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
888
889 operations.extend(self.remote_selections.iter().map(|(_, set)| {
890 proto::serialize_operation(&Operation::UpdateSelections {
891 selections: set.selections.clone(),
892 lamport_timestamp: set.lamport_timestamp,
893 line_mode: set.line_mode,
894 cursor_shape: set.cursor_shape,
895 })
896 }));
897
898 for (server_id, diagnostics) in &self.diagnostics {
899 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
900 lamport_timestamp: self.diagnostics_timestamp,
901 server_id: *server_id,
902 diagnostics: diagnostics.iter().cloned().collect(),
903 }));
904 }
905
906 for (server_id, completions) in &self.completion_triggers_per_language_server {
907 operations.push(proto::serialize_operation(
908 &Operation::UpdateCompletionTriggers {
909 triggers: completions.iter().cloned().collect(),
910 lamport_timestamp: self.completion_triggers_timestamp,
911 server_id: *server_id,
912 },
913 ));
914 }
915
916 let text_operations = self.text.operations().clone();
917 cx.background_spawn(async move {
918 let since = since.unwrap_or_default();
919 operations.extend(
920 text_operations
921 .iter()
922 .filter(|(_, op)| !since.observed(op.timestamp()))
923 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
924 );
925 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
926 operations
927 })
928 }
929
930 /// Assign a language to the buffer, returning the buffer.
931 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
932 self.set_language(Some(language), cx);
933 self
934 }
935
936 /// Returns the [`Capability`] of this buffer.
937 pub fn capability(&self) -> Capability {
938 self.capability
939 }
940
941 /// Whether this buffer can only be read.
942 pub fn read_only(&self) -> bool {
943 self.capability == Capability::ReadOnly
944 }
945
946 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
947 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
948 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
949 let snapshot = buffer.snapshot();
950 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
951 Self {
952 saved_mtime,
953 saved_version: buffer.version(),
954 preview_version: buffer.version(),
955 reload_task: None,
956 transaction_depth: 0,
957 was_dirty_before_starting_transaction: None,
958 has_unsaved_edits: Cell::new((buffer.version(), false)),
959 text: buffer,
960 branch_state: None,
961 file,
962 capability,
963 syntax_map,
964 reparse: None,
965 non_text_state_update_count: 0,
966 sync_parse_timeout: Duration::from_millis(1),
967 parse_status: watch::channel(ParseStatus::Idle),
968 autoindent_requests: Default::default(),
969 wait_for_autoindent_txs: Default::default(),
970 pending_autoindent: Default::default(),
971 language: None,
972 remote_selections: Default::default(),
973 diagnostics: Default::default(),
974 diagnostics_timestamp: Default::default(),
975 completion_triggers: Default::default(),
976 completion_triggers_per_language_server: Default::default(),
977 completion_triggers_timestamp: Default::default(),
978 deferred_ops: OperationQueue::new(),
979 has_conflict: false,
980 change_bits: Default::default(),
981 _subscriptions: Vec::new(),
982 }
983 }
984
985 pub fn build_snapshot(
986 text: Rope,
987 language: Option<Arc<Language>>,
988 language_registry: Option<Arc<LanguageRegistry>>,
989 cx: &mut App,
990 ) -> impl Future<Output = BufferSnapshot> + use<> {
991 let entity_id = cx.reserve_entity::<Self>().entity_id();
992 let buffer_id = entity_id.as_non_zero_u64().into();
993 async move {
994 let text =
995 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
996 let mut syntax = SyntaxMap::new(&text).snapshot();
997 if let Some(language) = language.clone() {
998 let language_registry = language_registry.clone();
999 syntax.reparse(&text, language_registry, language);
1000 }
1001 BufferSnapshot {
1002 text,
1003 syntax,
1004 file: None,
1005 diagnostics: Default::default(),
1006 remote_selections: Default::default(),
1007 language,
1008 non_text_state_update_count: 0,
1009 }
1010 }
1011 }
1012
1013 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1014 let entity_id = cx.reserve_entity::<Self>().entity_id();
1015 let buffer_id = entity_id.as_non_zero_u64().into();
1016 let text =
1017 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1018 let syntax = SyntaxMap::new(&text).snapshot();
1019 BufferSnapshot {
1020 text,
1021 syntax,
1022 file: None,
1023 diagnostics: Default::default(),
1024 remote_selections: Default::default(),
1025 language: None,
1026 non_text_state_update_count: 0,
1027 }
1028 }
1029
1030 #[cfg(any(test, feature = "test-support"))]
1031 pub fn build_snapshot_sync(
1032 text: Rope,
1033 language: Option<Arc<Language>>,
1034 language_registry: Option<Arc<LanguageRegistry>>,
1035 cx: &mut App,
1036 ) -> BufferSnapshot {
1037 let entity_id = cx.reserve_entity::<Self>().entity_id();
1038 let buffer_id = entity_id.as_non_zero_u64().into();
1039 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1040 let mut syntax = SyntaxMap::new(&text).snapshot();
1041 if let Some(language) = language.clone() {
1042 syntax.reparse(&text, language_registry, language);
1043 }
1044 BufferSnapshot {
1045 text,
1046 syntax,
1047 file: None,
1048 diagnostics: Default::default(),
1049 remote_selections: Default::default(),
1050 language,
1051 non_text_state_update_count: 0,
1052 }
1053 }
1054
1055 /// Retrieve a snapshot of the buffer's current state. This is computationally
1056 /// cheap, and allows reading from the buffer on a background thread.
1057 pub fn snapshot(&self) -> BufferSnapshot {
1058 let text = self.text.snapshot();
1059 let mut syntax_map = self.syntax_map.lock();
1060 syntax_map.interpolate(&text);
1061 let syntax = syntax_map.snapshot();
1062
1063 BufferSnapshot {
1064 text,
1065 syntax,
1066 file: self.file.clone(),
1067 remote_selections: self.remote_selections.clone(),
1068 diagnostics: self.diagnostics.clone(),
1069 language: self.language.clone(),
1070 non_text_state_update_count: self.non_text_state_update_count,
1071 }
1072 }
1073
1074 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1075 let this = cx.entity();
1076 cx.new(|cx| {
1077 let mut branch = Self {
1078 branch_state: Some(BufferBranchState {
1079 base_buffer: this.clone(),
1080 merged_operations: Default::default(),
1081 }),
1082 language: self.language.clone(),
1083 has_conflict: self.has_conflict,
1084 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1085 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1086 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1087 };
1088 if let Some(language_registry) = self.language_registry() {
1089 branch.set_language_registry(language_registry);
1090 }
1091
1092 // Reparse the branch buffer so that we get syntax highlighting immediately.
1093 branch.reparse(cx);
1094
1095 branch
1096 })
1097 }
1098
1099 pub fn preview_edits(
1100 &self,
1101 edits: Arc<[(Range<Anchor>, String)]>,
1102 cx: &App,
1103 ) -> Task<EditPreview> {
1104 let registry = self.language_registry();
1105 let language = self.language().cloned();
1106 let old_snapshot = self.text.snapshot();
1107 let mut branch_buffer = self.text.branch();
1108 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1109 cx.background_spawn(async move {
1110 if !edits.is_empty() {
1111 if let Some(language) = language.clone() {
1112 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1113 }
1114
1115 branch_buffer.edit(edits.iter().cloned());
1116 let snapshot = branch_buffer.snapshot();
1117 syntax_snapshot.interpolate(&snapshot);
1118
1119 if let Some(language) = language {
1120 syntax_snapshot.reparse(&snapshot, registry, language);
1121 }
1122 }
1123 EditPreview {
1124 old_snapshot,
1125 applied_edits_snapshot: branch_buffer.snapshot(),
1126 syntax_snapshot,
1127 }
1128 })
1129 }
1130
1131 /// Applies all of the changes in this buffer that intersect any of the
1132 /// given `ranges` to its base buffer.
1133 ///
1134 /// If `ranges` is empty, then all changes will be applied. This buffer must
1135 /// be a branch buffer to call this method.
1136 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1137 let Some(base_buffer) = self.base_buffer() else {
1138 debug_panic!("not a branch buffer");
1139 return;
1140 };
1141
1142 let mut ranges = if ranges.is_empty() {
1143 &[0..usize::MAX]
1144 } else {
1145 ranges.as_slice()
1146 }
1147 .iter()
1148 .peekable();
1149
1150 let mut edits = Vec::new();
1151 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1152 let mut is_included = false;
1153 while let Some(range) = ranges.peek() {
1154 if range.end < edit.new.start {
1155 ranges.next().unwrap();
1156 } else {
1157 if range.start <= edit.new.end {
1158 is_included = true;
1159 }
1160 break;
1161 }
1162 }
1163
1164 if is_included {
1165 edits.push((
1166 edit.old.clone(),
1167 self.text_for_range(edit.new.clone()).collect::<String>(),
1168 ));
1169 }
1170 }
1171
1172 let operation = base_buffer.update(cx, |base_buffer, cx| {
1173 // cx.emit(BufferEvent::DiffBaseChanged);
1174 base_buffer.edit(edits, None, cx)
1175 });
1176
1177 if let Some(operation) = operation
1178 && let Some(BufferBranchState {
1179 merged_operations, ..
1180 }) = &mut self.branch_state
1181 {
1182 merged_operations.push(operation);
1183 }
1184 }
1185
1186 fn on_base_buffer_event(
1187 &mut self,
1188 _: Entity<Buffer>,
1189 event: &BufferEvent,
1190 cx: &mut Context<Self>,
1191 ) {
1192 let BufferEvent::Operation { operation, .. } = event else {
1193 return;
1194 };
1195 let Some(BufferBranchState {
1196 merged_operations, ..
1197 }) = &mut self.branch_state
1198 else {
1199 return;
1200 };
1201
1202 let mut operation_to_undo = None;
1203 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1204 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1205 {
1206 merged_operations.remove(ix);
1207 operation_to_undo = Some(operation.timestamp);
1208 }
1209
1210 self.apply_ops([operation.clone()], cx);
1211
1212 if let Some(timestamp) = operation_to_undo {
1213 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1214 self.undo_operations(counts, cx);
1215 }
1216 }
1217
1218 #[cfg(test)]
1219 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1220 &self.text
1221 }
1222
1223 /// Retrieve a snapshot of the buffer's raw text, without any
1224 /// language-related state like the syntax tree or diagnostics.
1225 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1226 self.text.snapshot()
1227 }
1228
1229 /// The file associated with the buffer, if any.
1230 pub fn file(&self) -> Option<&Arc<dyn File>> {
1231 self.file.as_ref()
1232 }
1233
1234 /// The version of the buffer that was last saved or reloaded from disk.
1235 pub fn saved_version(&self) -> &clock::Global {
1236 &self.saved_version
1237 }
1238
1239 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1240 pub fn saved_mtime(&self) -> Option<MTime> {
1241 self.saved_mtime
1242 }
1243
1244 /// Assign a language to the buffer.
1245 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1246 self.non_text_state_update_count += 1;
1247 self.syntax_map.lock().clear(&self.text);
1248 self.language = language;
1249 self.was_changed();
1250 self.reparse(cx);
1251 cx.emit(BufferEvent::LanguageChanged);
1252 }
1253
1254 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1255 /// other languages if parts of the buffer are written in different languages.
1256 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1257 self.syntax_map
1258 .lock()
1259 .set_language_registry(language_registry);
1260 }
1261
1262 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1263 self.syntax_map.lock().language_registry()
1264 }
1265
1266 /// Assign the line ending type to the buffer.
1267 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1268 self.text.set_line_ending(line_ending);
1269
1270 let lamport_timestamp = self.text.lamport_clock.tick();
1271 self.send_operation(
1272 Operation::UpdateLineEnding {
1273 line_ending,
1274 lamport_timestamp,
1275 },
1276 true,
1277 cx,
1278 );
1279 }
1280
1281 /// Assign the buffer a new [`Capability`].
1282 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1283 if self.capability != capability {
1284 self.capability = capability;
1285 cx.emit(BufferEvent::CapabilityChanged)
1286 }
1287 }
1288
1289 /// This method is called to signal that the buffer has been saved.
1290 pub fn did_save(
1291 &mut self,
1292 version: clock::Global,
1293 mtime: Option<MTime>,
1294 cx: &mut Context<Self>,
1295 ) {
1296 self.saved_version = version;
1297 self.has_unsaved_edits
1298 .set((self.saved_version().clone(), false));
1299 self.has_conflict = false;
1300 self.saved_mtime = mtime;
1301 self.was_changed();
1302 cx.emit(BufferEvent::Saved);
1303 cx.notify();
1304 }
1305
1306 /// Reloads the contents of the buffer from disk.
1307 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1308 let (tx, rx) = futures::channel::oneshot::channel();
1309 let prev_version = self.text.version();
1310 self.reload_task = Some(cx.spawn(async move |this, cx| {
1311 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1312 let file = this.file.as_ref()?.as_local()?;
1313
1314 Some((file.disk_state().mtime(), file.load(cx)))
1315 })?
1316 else {
1317 return Ok(());
1318 };
1319
1320 let new_text = new_text.await?;
1321 let diff = this
1322 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1323 .await;
1324 this.update(cx, |this, cx| {
1325 if this.version() == diff.base_version {
1326 this.finalize_last_transaction();
1327 this.apply_diff(diff, cx);
1328 tx.send(this.finalize_last_transaction().cloned()).ok();
1329 this.has_conflict = false;
1330 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1331 } else {
1332 if !diff.edits.is_empty()
1333 || this
1334 .edits_since::<usize>(&diff.base_version)
1335 .next()
1336 .is_some()
1337 {
1338 this.has_conflict = true;
1339 }
1340
1341 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1342 }
1343
1344 this.reload_task.take();
1345 })
1346 }));
1347 rx
1348 }
1349
1350 /// This method is called to signal that the buffer has been reloaded.
1351 pub fn did_reload(
1352 &mut self,
1353 version: clock::Global,
1354 line_ending: LineEnding,
1355 mtime: Option<MTime>,
1356 cx: &mut Context<Self>,
1357 ) {
1358 self.saved_version = version;
1359 self.has_unsaved_edits
1360 .set((self.saved_version.clone(), false));
1361 self.text.set_line_ending(line_ending);
1362 self.saved_mtime = mtime;
1363 cx.emit(BufferEvent::Reloaded);
1364 cx.notify();
1365 }
1366
1367 /// Updates the [`File`] backing this buffer. This should be called when
1368 /// the file has changed or has been deleted.
1369 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1370 let was_dirty = self.is_dirty();
1371 let mut file_changed = false;
1372
1373 if let Some(old_file) = self.file.as_ref() {
1374 if new_file.path() != old_file.path() {
1375 file_changed = true;
1376 }
1377
1378 let old_state = old_file.disk_state();
1379 let new_state = new_file.disk_state();
1380 if old_state != new_state {
1381 file_changed = true;
1382 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1383 cx.emit(BufferEvent::ReloadNeeded)
1384 }
1385 }
1386 } else {
1387 file_changed = true;
1388 };
1389
1390 self.file = Some(new_file);
1391 if file_changed {
1392 self.was_changed();
1393 self.non_text_state_update_count += 1;
1394 if was_dirty != self.is_dirty() {
1395 cx.emit(BufferEvent::DirtyChanged);
1396 }
1397 cx.emit(BufferEvent::FileHandleChanged);
1398 cx.notify();
1399 }
1400 }
1401
1402 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1403 Some(self.branch_state.as_ref()?.base_buffer.clone())
1404 }
1405
1406 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1407 pub fn language(&self) -> Option<&Arc<Language>> {
1408 self.language.as_ref()
1409 }
1410
1411 /// Returns the [`Language`] at the given location.
1412 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1413 let offset = position.to_offset(self);
1414 let mut is_first = true;
1415 let start_anchor = self.anchor_before(offset);
1416 let end_anchor = self.anchor_after(offset);
1417 self.syntax_map
1418 .lock()
1419 .layers_for_range(offset..offset, &self.text, false)
1420 .filter(|layer| {
1421 if is_first {
1422 is_first = false;
1423 return true;
1424 }
1425
1426 layer
1427 .included_sub_ranges
1428 .map(|sub_ranges| {
1429 sub_ranges.iter().any(|sub_range| {
1430 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1431 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1432 !is_before_start && !is_after_end
1433 })
1434 })
1435 .unwrap_or(true)
1436 })
1437 .last()
1438 .map(|info| info.language.clone())
1439 .or_else(|| self.language.clone())
1440 }
1441
1442 /// Returns each [`Language`] for the active syntax layers at the given location.
1443 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1444 let offset = position.to_offset(self);
1445 let mut languages: Vec<Arc<Language>> = self
1446 .syntax_map
1447 .lock()
1448 .layers_for_range(offset..offset, &self.text, false)
1449 .map(|info| info.language.clone())
1450 .collect();
1451
1452 if languages.is_empty()
1453 && let Some(buffer_language) = self.language()
1454 {
1455 languages.push(buffer_language.clone());
1456 }
1457
1458 languages
1459 }
1460
1461 /// An integer version number that accounts for all updates besides
1462 /// the buffer's text itself (which is versioned via a version vector).
1463 pub fn non_text_state_update_count(&self) -> usize {
1464 self.non_text_state_update_count
1465 }
1466
1467 /// Whether the buffer is being parsed in the background.
1468 #[cfg(any(test, feature = "test-support"))]
1469 pub fn is_parsing(&self) -> bool {
1470 self.reparse.is_some()
1471 }
1472
1473 /// Indicates whether the buffer contains any regions that may be
1474 /// written in a language that hasn't been loaded yet.
1475 pub fn contains_unknown_injections(&self) -> bool {
1476 self.syntax_map.lock().contains_unknown_injections()
1477 }
1478
1479 #[cfg(any(test, feature = "test-support"))]
1480 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1481 self.sync_parse_timeout = timeout;
1482 }
1483
1484 /// Called after an edit to synchronize the buffer's main parse tree with
1485 /// the buffer's new underlying state.
1486 ///
1487 /// Locks the syntax map and interpolates the edits since the last reparse
1488 /// into the foreground syntax tree.
1489 ///
1490 /// Then takes a stable snapshot of the syntax map before unlocking it.
1491 /// The snapshot with the interpolated edits is sent to a background thread,
1492 /// where we ask Tree-sitter to perform an incremental parse.
1493 ///
1494 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1495 /// waiting on the parse to complete. As soon as it completes, we proceed
1496 /// synchronously, unless a 1ms timeout elapses.
1497 ///
1498 /// If we time out waiting on the parse, we spawn a second task waiting
1499 /// until the parse does complete and return with the interpolated tree still
1500 /// in the foreground. When the background parse completes, call back into
1501 /// the main thread and assign the foreground parse state.
1502 ///
1503 /// If the buffer or grammar changed since the start of the background parse,
1504 /// initiate an additional reparse recursively. To avoid concurrent parses
1505 /// for the same buffer, we only initiate a new parse if we are not already
1506 /// parsing in the background.
1507 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1508 if self.reparse.is_some() {
1509 return;
1510 }
1511 let language = if let Some(language) = self.language.clone() {
1512 language
1513 } else {
1514 return;
1515 };
1516
1517 let text = self.text_snapshot();
1518 let parsed_version = self.version();
1519
1520 let mut syntax_map = self.syntax_map.lock();
1521 syntax_map.interpolate(&text);
1522 let language_registry = syntax_map.language_registry();
1523 let mut syntax_snapshot = syntax_map.snapshot();
1524 drop(syntax_map);
1525
1526 let parse_task = cx.background_spawn({
1527 let language = language.clone();
1528 let language_registry = language_registry.clone();
1529 async move {
1530 syntax_snapshot.reparse(&text, language_registry, language);
1531 syntax_snapshot
1532 }
1533 });
1534
1535 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1536 match cx
1537 .background_executor()
1538 .block_with_timeout(self.sync_parse_timeout, parse_task)
1539 {
1540 Ok(new_syntax_snapshot) => {
1541 self.did_finish_parsing(new_syntax_snapshot, cx);
1542 self.reparse = None;
1543 }
1544 Err(parse_task) => {
1545 self.reparse = Some(cx.spawn(async move |this, cx| {
1546 let new_syntax_map = parse_task.await;
1547 this.update(cx, move |this, cx| {
1548 let grammar_changed =
1549 this.language.as_ref().is_none_or(|current_language| {
1550 !Arc::ptr_eq(&language, current_language)
1551 });
1552 let language_registry_changed = new_syntax_map
1553 .contains_unknown_injections()
1554 && language_registry.is_some_and(|registry| {
1555 registry.version() != new_syntax_map.language_registry_version()
1556 });
1557 let parse_again = language_registry_changed
1558 || grammar_changed
1559 || this.version.changed_since(&parsed_version);
1560 this.did_finish_parsing(new_syntax_map, cx);
1561 this.reparse = None;
1562 if parse_again {
1563 this.reparse(cx);
1564 }
1565 })
1566 .ok();
1567 }));
1568 }
1569 }
1570 }
1571
1572 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1573 self.was_changed();
1574 self.non_text_state_update_count += 1;
1575 self.syntax_map.lock().did_parse(syntax_snapshot);
1576 self.request_autoindent(cx);
1577 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1578 cx.emit(BufferEvent::Reparsed);
1579 cx.notify();
1580 }
1581
1582 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1583 self.parse_status.1.clone()
1584 }
1585
1586 /// Assign to the buffer a set of diagnostics created by a given language server.
1587 pub fn update_diagnostics(
1588 &mut self,
1589 server_id: LanguageServerId,
1590 diagnostics: DiagnosticSet,
1591 cx: &mut Context<Self>,
1592 ) {
1593 let lamport_timestamp = self.text.lamport_clock.tick();
1594 let op = Operation::UpdateDiagnostics {
1595 server_id,
1596 diagnostics: diagnostics.iter().cloned().collect(),
1597 lamport_timestamp,
1598 };
1599
1600 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1601 self.send_operation(op, true, cx);
1602 }
1603
1604 pub fn buffer_diagnostics(
1605 &self,
1606 for_server: Option<LanguageServerId>,
1607 ) -> Vec<&DiagnosticEntry<Anchor>> {
1608 match for_server {
1609 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1610 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1611 Err(_) => Vec::new(),
1612 },
1613 None => self
1614 .diagnostics
1615 .iter()
1616 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1617 .collect(),
1618 }
1619 }
1620
1621 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1622 if let Some(indent_sizes) = self.compute_autoindents() {
1623 let indent_sizes = cx.background_spawn(indent_sizes);
1624 match cx
1625 .background_executor()
1626 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1627 {
1628 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1629 Err(indent_sizes) => {
1630 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1631 let indent_sizes = indent_sizes.await;
1632 this.update(cx, |this, cx| {
1633 this.apply_autoindents(indent_sizes, cx);
1634 })
1635 .ok();
1636 }));
1637 }
1638 }
1639 } else {
1640 self.autoindent_requests.clear();
1641 for tx in self.wait_for_autoindent_txs.drain(..) {
1642 tx.send(()).ok();
1643 }
1644 }
1645 }
1646
1647 fn compute_autoindents(
1648 &self,
1649 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1650 let max_rows_between_yields = 100;
1651 let snapshot = self.snapshot();
1652 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1653 return None;
1654 }
1655
1656 let autoindent_requests = self.autoindent_requests.clone();
1657 Some(async move {
1658 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1659 for request in autoindent_requests {
1660 // Resolve each edited range to its row in the current buffer and in the
1661 // buffer before this batch of edits.
1662 let mut row_ranges = Vec::new();
1663 let mut old_to_new_rows = BTreeMap::new();
1664 let mut language_indent_sizes_by_new_row = Vec::new();
1665 for entry in &request.entries {
1666 let position = entry.range.start;
1667 let new_row = position.to_point(&snapshot).row;
1668 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1669 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1670
1671 if !entry.first_line_is_new {
1672 let old_row = position.to_point(&request.before_edit).row;
1673 old_to_new_rows.insert(old_row, new_row);
1674 }
1675 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1676 }
1677
1678 // Build a map containing the suggested indentation for each of the edited lines
1679 // with respect to the state of the buffer before these edits. This map is keyed
1680 // by the rows for these lines in the current state of the buffer.
1681 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1682 let old_edited_ranges =
1683 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1684 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1685 let mut language_indent_size = IndentSize::default();
1686 for old_edited_range in old_edited_ranges {
1687 let suggestions = request
1688 .before_edit
1689 .suggest_autoindents(old_edited_range.clone())
1690 .into_iter()
1691 .flatten();
1692 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1693 if let Some(suggestion) = suggestion {
1694 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1695
1696 // Find the indent size based on the language for this row.
1697 while let Some((row, size)) = language_indent_sizes.peek() {
1698 if *row > new_row {
1699 break;
1700 }
1701 language_indent_size = *size;
1702 language_indent_sizes.next();
1703 }
1704
1705 let suggested_indent = old_to_new_rows
1706 .get(&suggestion.basis_row)
1707 .and_then(|from_row| {
1708 Some(old_suggestions.get(from_row).copied()?.0)
1709 })
1710 .unwrap_or_else(|| {
1711 request
1712 .before_edit
1713 .indent_size_for_line(suggestion.basis_row)
1714 })
1715 .with_delta(suggestion.delta, language_indent_size);
1716 old_suggestions
1717 .insert(new_row, (suggested_indent, suggestion.within_error));
1718 }
1719 }
1720 yield_now().await;
1721 }
1722
1723 // Compute new suggestions for each line, but only include them in the result
1724 // if they differ from the old suggestion for that line.
1725 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1726 let mut language_indent_size = IndentSize::default();
1727 for (row_range, original_indent_column) in row_ranges {
1728 let new_edited_row_range = if request.is_block_mode {
1729 row_range.start..row_range.start + 1
1730 } else {
1731 row_range.clone()
1732 };
1733
1734 let suggestions = snapshot
1735 .suggest_autoindents(new_edited_row_range.clone())
1736 .into_iter()
1737 .flatten();
1738 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1739 if let Some(suggestion) = suggestion {
1740 // Find the indent size based on the language for this row.
1741 while let Some((row, size)) = language_indent_sizes.peek() {
1742 if *row > new_row {
1743 break;
1744 }
1745 language_indent_size = *size;
1746 language_indent_sizes.next();
1747 }
1748
1749 let suggested_indent = indent_sizes
1750 .get(&suggestion.basis_row)
1751 .copied()
1752 .map(|e| e.0)
1753 .unwrap_or_else(|| {
1754 snapshot.indent_size_for_line(suggestion.basis_row)
1755 })
1756 .with_delta(suggestion.delta, language_indent_size);
1757
1758 if old_suggestions.get(&new_row).is_none_or(
1759 |(old_indentation, was_within_error)| {
1760 suggested_indent != *old_indentation
1761 && (!suggestion.within_error || *was_within_error)
1762 },
1763 ) {
1764 indent_sizes.insert(
1765 new_row,
1766 (suggested_indent, request.ignore_empty_lines),
1767 );
1768 }
1769 }
1770 }
1771
1772 if let (true, Some(original_indent_column)) =
1773 (request.is_block_mode, original_indent_column)
1774 {
1775 let new_indent =
1776 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1777 *indent
1778 } else {
1779 snapshot.indent_size_for_line(row_range.start)
1780 };
1781 let delta = new_indent.len as i64 - original_indent_column as i64;
1782 if delta != 0 {
1783 for row in row_range.skip(1) {
1784 indent_sizes.entry(row).or_insert_with(|| {
1785 let mut size = snapshot.indent_size_for_line(row);
1786 if size.kind == new_indent.kind {
1787 match delta.cmp(&0) {
1788 Ordering::Greater => size.len += delta as u32,
1789 Ordering::Less => {
1790 size.len = size.len.saturating_sub(-delta as u32)
1791 }
1792 Ordering::Equal => {}
1793 }
1794 }
1795 (size, request.ignore_empty_lines)
1796 });
1797 }
1798 }
1799 }
1800
1801 yield_now().await;
1802 }
1803 }
1804
1805 indent_sizes
1806 .into_iter()
1807 .filter_map(|(row, (indent, ignore_empty_lines))| {
1808 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1809 None
1810 } else {
1811 Some((row, indent))
1812 }
1813 })
1814 .collect()
1815 })
1816 }
1817
1818 fn apply_autoindents(
1819 &mut self,
1820 indent_sizes: BTreeMap<u32, IndentSize>,
1821 cx: &mut Context<Self>,
1822 ) {
1823 self.autoindent_requests.clear();
1824 for tx in self.wait_for_autoindent_txs.drain(..) {
1825 tx.send(()).ok();
1826 }
1827
1828 let edits: Vec<_> = indent_sizes
1829 .into_iter()
1830 .filter_map(|(row, indent_size)| {
1831 let current_size = indent_size_for_line(self, row);
1832 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1833 })
1834 .collect();
1835
1836 let preserve_preview = self.preserve_preview();
1837 self.edit(edits, None, cx);
1838 if preserve_preview {
1839 self.refresh_preview();
1840 }
1841 }
1842
1843 /// Create a minimal edit that will cause the given row to be indented
1844 /// with the given size. After applying this edit, the length of the line
1845 /// will always be at least `new_size.len`.
1846 pub fn edit_for_indent_size_adjustment(
1847 row: u32,
1848 current_size: IndentSize,
1849 new_size: IndentSize,
1850 ) -> Option<(Range<Point>, String)> {
1851 if new_size.kind == current_size.kind {
1852 match new_size.len.cmp(¤t_size.len) {
1853 Ordering::Greater => {
1854 let point = Point::new(row, 0);
1855 Some((
1856 point..point,
1857 iter::repeat(new_size.char())
1858 .take((new_size.len - current_size.len) as usize)
1859 .collect::<String>(),
1860 ))
1861 }
1862
1863 Ordering::Less => Some((
1864 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1865 String::new(),
1866 )),
1867
1868 Ordering::Equal => None,
1869 }
1870 } else {
1871 Some((
1872 Point::new(row, 0)..Point::new(row, current_size.len),
1873 iter::repeat(new_size.char())
1874 .take(new_size.len as usize)
1875 .collect::<String>(),
1876 ))
1877 }
1878 }
1879
1880 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1881 /// and the given new text.
1882 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1883 let old_text = self.as_rope().clone();
1884 let base_version = self.version();
1885 cx.background_executor()
1886 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1887 let old_text = old_text.to_string();
1888 let line_ending = LineEnding::detect(&new_text);
1889 LineEnding::normalize(&mut new_text);
1890 let edits = text_diff(&old_text, &new_text);
1891 Diff {
1892 base_version,
1893 line_ending,
1894 edits,
1895 }
1896 })
1897 }
1898
1899 /// Spawns a background task that searches the buffer for any whitespace
1900 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1901 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1902 let old_text = self.as_rope().clone();
1903 let line_ending = self.line_ending();
1904 let base_version = self.version();
1905 cx.background_spawn(async move {
1906 let ranges = trailing_whitespace_ranges(&old_text);
1907 let empty = Arc::<str>::from("");
1908 Diff {
1909 base_version,
1910 line_ending,
1911 edits: ranges
1912 .into_iter()
1913 .map(|range| (range, empty.clone()))
1914 .collect(),
1915 }
1916 })
1917 }
1918
1919 /// Ensures that the buffer ends with a single newline character, and
1920 /// no other whitespace. Skips if the buffer is empty.
1921 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1922 let len = self.len();
1923 if len == 0 {
1924 return;
1925 }
1926 let mut offset = len;
1927 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1928 let non_whitespace_len = chunk
1929 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1930 .len();
1931 offset -= chunk.len();
1932 offset += non_whitespace_len;
1933 if non_whitespace_len != 0 {
1934 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1935 return;
1936 }
1937 break;
1938 }
1939 }
1940 self.edit([(offset..len, "\n")], None, cx);
1941 }
1942
1943 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1944 /// calculated, then adjust the diff to account for those changes, and discard any
1945 /// parts of the diff that conflict with those changes.
1946 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1947 let snapshot = self.snapshot();
1948 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1949 let mut delta = 0;
1950 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1951 while let Some(edit_since) = edits_since.peek() {
1952 // If the edit occurs after a diff hunk, then it does not
1953 // affect that hunk.
1954 if edit_since.old.start > range.end {
1955 break;
1956 }
1957 // If the edit precedes the diff hunk, then adjust the hunk
1958 // to reflect the edit.
1959 else if edit_since.old.end < range.start {
1960 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1961 edits_since.next();
1962 }
1963 // If the edit intersects a diff hunk, then discard that hunk.
1964 else {
1965 return None;
1966 }
1967 }
1968
1969 let start = (range.start as i64 + delta) as usize;
1970 let end = (range.end as i64 + delta) as usize;
1971 Some((start..end, new_text))
1972 });
1973
1974 self.start_transaction();
1975 self.text.set_line_ending(diff.line_ending);
1976 self.edit(adjusted_edits, None, cx);
1977 self.end_transaction(cx)
1978 }
1979
1980 fn has_unsaved_edits(&self) -> bool {
1981 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1982
1983 if last_version == self.version {
1984 self.has_unsaved_edits
1985 .set((last_version, has_unsaved_edits));
1986 return has_unsaved_edits;
1987 }
1988
1989 let has_edits = self.has_edits_since(&self.saved_version);
1990 self.has_unsaved_edits
1991 .set((self.version.clone(), has_edits));
1992 has_edits
1993 }
1994
1995 /// Checks if the buffer has unsaved changes.
1996 pub fn is_dirty(&self) -> bool {
1997 if self.capability == Capability::ReadOnly {
1998 return false;
1999 }
2000 if self.has_conflict {
2001 return true;
2002 }
2003 match self.file.as_ref().map(|f| f.disk_state()) {
2004 Some(DiskState::New) | Some(DiskState::Deleted) => {
2005 !self.is_empty() && self.has_unsaved_edits()
2006 }
2007 _ => self.has_unsaved_edits(),
2008 }
2009 }
2010
2011 /// Checks if the buffer and its file have both changed since the buffer
2012 /// was last saved or reloaded.
2013 pub fn has_conflict(&self) -> bool {
2014 if self.has_conflict {
2015 return true;
2016 }
2017 let Some(file) = self.file.as_ref() else {
2018 return false;
2019 };
2020 match file.disk_state() {
2021 DiskState::New => false,
2022 DiskState::Present { mtime } => match self.saved_mtime {
2023 Some(saved_mtime) => {
2024 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2025 }
2026 None => true,
2027 },
2028 DiskState::Deleted => false,
2029 }
2030 }
2031
2032 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2033 pub fn subscribe(&mut self) -> Subscription {
2034 self.text.subscribe()
2035 }
2036
2037 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2038 ///
2039 /// This allows downstream code to check if the buffer's text has changed without
2040 /// waiting for an effect cycle, which would be required if using eents.
2041 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2042 if let Err(ix) = self
2043 .change_bits
2044 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2045 {
2046 self.change_bits.insert(ix, bit);
2047 }
2048 }
2049
2050 fn was_changed(&mut self) {
2051 self.change_bits.retain(|change_bit| {
2052 change_bit.upgrade().is_some_and(|bit| {
2053 bit.replace(true);
2054 true
2055 })
2056 });
2057 }
2058
2059 /// Starts a transaction, if one is not already in-progress. When undoing or
2060 /// redoing edits, all of the edits performed within a transaction are undone
2061 /// or redone together.
2062 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2063 self.start_transaction_at(Instant::now())
2064 }
2065
2066 /// Starts a transaction, providing the current time. Subsequent transactions
2067 /// that occur within a short period of time will be grouped together. This
2068 /// is controlled by the buffer's undo grouping duration.
2069 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2070 self.transaction_depth += 1;
2071 if self.was_dirty_before_starting_transaction.is_none() {
2072 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2073 }
2074 self.text.start_transaction_at(now)
2075 }
2076
2077 /// Terminates the current transaction, if this is the outermost transaction.
2078 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2079 self.end_transaction_at(Instant::now(), cx)
2080 }
2081
2082 /// Terminates the current transaction, providing the current time. Subsequent transactions
2083 /// that occur within a short period of time will be grouped together. This
2084 /// is controlled by the buffer's undo grouping duration.
2085 pub fn end_transaction_at(
2086 &mut self,
2087 now: Instant,
2088 cx: &mut Context<Self>,
2089 ) -> Option<TransactionId> {
2090 assert!(self.transaction_depth > 0);
2091 self.transaction_depth -= 1;
2092 let was_dirty = if self.transaction_depth == 0 {
2093 self.was_dirty_before_starting_transaction.take().unwrap()
2094 } else {
2095 false
2096 };
2097 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2098 self.did_edit(&start_version, was_dirty, cx);
2099 Some(transaction_id)
2100 } else {
2101 None
2102 }
2103 }
2104
2105 /// Manually add a transaction to the buffer's undo history.
2106 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2107 self.text.push_transaction(transaction, now);
2108 }
2109
2110 /// Differs from `push_transaction` in that it does not clear the redo
2111 /// stack. Intended to be used to create a parent transaction to merge
2112 /// potential child transactions into.
2113 ///
2114 /// The caller is responsible for removing it from the undo history using
2115 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2116 /// are merged into this transaction, the caller is responsible for ensuring
2117 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2118 /// cleared is to create transactions with the usual `start_transaction` and
2119 /// `end_transaction` methods and merging the resulting transactions into
2120 /// the transaction created by this method
2121 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2122 self.text.push_empty_transaction(now)
2123 }
2124
2125 /// Prevent the last transaction from being grouped with any subsequent transactions,
2126 /// even if they occur with the buffer's undo grouping duration.
2127 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2128 self.text.finalize_last_transaction()
2129 }
2130
2131 /// Manually group all changes since a given transaction.
2132 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2133 self.text.group_until_transaction(transaction_id);
2134 }
2135
2136 /// Manually remove a transaction from the buffer's undo history
2137 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2138 self.text.forget_transaction(transaction_id)
2139 }
2140
2141 /// Retrieve a transaction from the buffer's undo history
2142 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2143 self.text.get_transaction(transaction_id)
2144 }
2145
2146 /// Manually merge two transactions in the buffer's undo history.
2147 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2148 self.text.merge_transactions(transaction, destination);
2149 }
2150
2151 /// Waits for the buffer to receive operations with the given timestamps.
2152 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2153 &mut self,
2154 edit_ids: It,
2155 ) -> impl Future<Output = Result<()>> + use<It> {
2156 self.text.wait_for_edits(edit_ids)
2157 }
2158
2159 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2160 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2161 &mut self,
2162 anchors: It,
2163 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2164 self.text.wait_for_anchors(anchors)
2165 }
2166
2167 /// Waits for the buffer to receive operations up to the given version.
2168 pub fn wait_for_version(
2169 &mut self,
2170 version: clock::Global,
2171 ) -> impl Future<Output = Result<()>> + use<> {
2172 self.text.wait_for_version(version)
2173 }
2174
2175 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2176 /// [`Buffer::wait_for_version`] to resolve with an error.
2177 pub fn give_up_waiting(&mut self) {
2178 self.text.give_up_waiting();
2179 }
2180
2181 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2182 let mut rx = None;
2183 if !self.autoindent_requests.is_empty() {
2184 let channel = oneshot::channel();
2185 self.wait_for_autoindent_txs.push(channel.0);
2186 rx = Some(channel.1);
2187 }
2188 rx
2189 }
2190
2191 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2192 pub fn set_active_selections(
2193 &mut self,
2194 selections: Arc<[Selection<Anchor>]>,
2195 line_mode: bool,
2196 cursor_shape: CursorShape,
2197 cx: &mut Context<Self>,
2198 ) {
2199 let lamport_timestamp = self.text.lamport_clock.tick();
2200 self.remote_selections.insert(
2201 self.text.replica_id(),
2202 SelectionSet {
2203 selections: selections.clone(),
2204 lamport_timestamp,
2205 line_mode,
2206 cursor_shape,
2207 },
2208 );
2209 self.send_operation(
2210 Operation::UpdateSelections {
2211 selections,
2212 line_mode,
2213 lamport_timestamp,
2214 cursor_shape,
2215 },
2216 true,
2217 cx,
2218 );
2219 self.non_text_state_update_count += 1;
2220 cx.notify();
2221 }
2222
2223 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2224 /// this replica.
2225 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2226 if self
2227 .remote_selections
2228 .get(&self.text.replica_id())
2229 .is_none_or(|set| !set.selections.is_empty())
2230 {
2231 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2232 }
2233 }
2234
2235 pub fn set_agent_selections(
2236 &mut self,
2237 selections: Arc<[Selection<Anchor>]>,
2238 line_mode: bool,
2239 cursor_shape: CursorShape,
2240 cx: &mut Context<Self>,
2241 ) {
2242 let lamport_timestamp = self.text.lamport_clock.tick();
2243 self.remote_selections.insert(
2244 AGENT_REPLICA_ID,
2245 SelectionSet {
2246 selections,
2247 lamport_timestamp,
2248 line_mode,
2249 cursor_shape,
2250 },
2251 );
2252 self.non_text_state_update_count += 1;
2253 cx.notify();
2254 }
2255
2256 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2257 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2258 }
2259
2260 /// Replaces the buffer's entire text.
2261 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2262 where
2263 T: Into<Arc<str>>,
2264 {
2265 self.autoindent_requests.clear();
2266 self.edit([(0..self.len(), text)], None, cx)
2267 }
2268
2269 /// Appends the given text to the end of the buffer.
2270 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2271 where
2272 T: Into<Arc<str>>,
2273 {
2274 self.edit([(self.len()..self.len(), text)], None, cx)
2275 }
2276
2277 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2278 /// delete, and a string of text to insert at that location.
2279 ///
2280 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2281 /// request for the edited ranges, which will be processed when the buffer finishes
2282 /// parsing.
2283 ///
2284 /// Parsing takes place at the end of a transaction, and may compute synchronously
2285 /// or asynchronously, depending on the changes.
2286 pub fn edit<I, S, T>(
2287 &mut self,
2288 edits_iter: I,
2289 autoindent_mode: Option<AutoindentMode>,
2290 cx: &mut Context<Self>,
2291 ) -> Option<clock::Lamport>
2292 where
2293 I: IntoIterator<Item = (Range<S>, T)>,
2294 S: ToOffset,
2295 T: Into<Arc<str>>,
2296 {
2297 // Skip invalid edits and coalesce contiguous ones.
2298 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2299
2300 for (range, new_text) in edits_iter {
2301 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2302
2303 if range.start > range.end {
2304 mem::swap(&mut range.start, &mut range.end);
2305 }
2306 let new_text = new_text.into();
2307 if !new_text.is_empty() || !range.is_empty() {
2308 if let Some((prev_range, prev_text)) = edits.last_mut()
2309 && prev_range.end >= range.start
2310 {
2311 prev_range.end = cmp::max(prev_range.end, range.end);
2312 *prev_text = format!("{prev_text}{new_text}").into();
2313 } else {
2314 edits.push((range, new_text));
2315 }
2316 }
2317 }
2318 if edits.is_empty() {
2319 return None;
2320 }
2321
2322 self.start_transaction();
2323 self.pending_autoindent.take();
2324 let autoindent_request = autoindent_mode
2325 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2326
2327 let edit_operation = self.text.edit(edits.iter().cloned());
2328 let edit_id = edit_operation.timestamp();
2329
2330 if let Some((before_edit, mode)) = autoindent_request {
2331 let mut delta = 0isize;
2332 let mut previous_setting = None;
2333 let entries: Vec<_> = edits
2334 .into_iter()
2335 .enumerate()
2336 .zip(&edit_operation.as_edit().unwrap().new_text)
2337 .filter(|((_, (range, _)), _)| {
2338 let language = before_edit.language_at(range.start);
2339 let language_id = language.map(|l| l.id());
2340 if let Some((cached_language_id, auto_indent)) = previous_setting
2341 && cached_language_id == language_id
2342 {
2343 auto_indent
2344 } else {
2345 // The auto-indent setting is not present in editorconfigs, hence
2346 // we can avoid passing the file here.
2347 let auto_indent =
2348 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2349 previous_setting = Some((language_id, auto_indent));
2350 auto_indent
2351 }
2352 })
2353 .map(|((ix, (range, _)), new_text)| {
2354 let new_text_length = new_text.len();
2355 let old_start = range.start.to_point(&before_edit);
2356 let new_start = (delta + range.start as isize) as usize;
2357 let range_len = range.end - range.start;
2358 delta += new_text_length as isize - range_len as isize;
2359
2360 // Decide what range of the insertion to auto-indent, and whether
2361 // the first line of the insertion should be considered a newly-inserted line
2362 // or an edit to an existing line.
2363 let mut range_of_insertion_to_indent = 0..new_text_length;
2364 let mut first_line_is_new = true;
2365
2366 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2367 let old_line_end = before_edit.line_len(old_start.row);
2368
2369 if old_start.column > old_line_start {
2370 first_line_is_new = false;
2371 }
2372
2373 if !new_text.contains('\n')
2374 && (old_start.column + (range_len as u32) < old_line_end
2375 || old_line_end == old_line_start)
2376 {
2377 first_line_is_new = false;
2378 }
2379
2380 // When inserting text starting with a newline, avoid auto-indenting the
2381 // previous line.
2382 if new_text.starts_with('\n') {
2383 range_of_insertion_to_indent.start += 1;
2384 first_line_is_new = true;
2385 }
2386
2387 let mut original_indent_column = None;
2388 if let AutoindentMode::Block {
2389 original_indent_columns,
2390 } = &mode
2391 {
2392 original_indent_column = Some(if new_text.starts_with('\n') {
2393 indent_size_for_text(
2394 new_text[range_of_insertion_to_indent.clone()].chars(),
2395 )
2396 .len
2397 } else {
2398 original_indent_columns
2399 .get(ix)
2400 .copied()
2401 .flatten()
2402 .unwrap_or_else(|| {
2403 indent_size_for_text(
2404 new_text[range_of_insertion_to_indent.clone()].chars(),
2405 )
2406 .len
2407 })
2408 });
2409
2410 // Avoid auto-indenting the line after the edit.
2411 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2412 range_of_insertion_to_indent.end -= 1;
2413 }
2414 }
2415
2416 AutoindentRequestEntry {
2417 first_line_is_new,
2418 original_indent_column,
2419 indent_size: before_edit.language_indent_size_at(range.start, cx),
2420 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2421 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2422 }
2423 })
2424 .collect();
2425
2426 if !entries.is_empty() {
2427 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2428 before_edit,
2429 entries,
2430 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2431 ignore_empty_lines: false,
2432 }));
2433 }
2434 }
2435
2436 self.end_transaction(cx);
2437 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2438 Some(edit_id)
2439 }
2440
2441 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2442 self.was_changed();
2443
2444 if self.edits_since::<usize>(old_version).next().is_none() {
2445 return;
2446 }
2447
2448 self.reparse(cx);
2449 cx.emit(BufferEvent::Edited);
2450 if was_dirty != self.is_dirty() {
2451 cx.emit(BufferEvent::DirtyChanged);
2452 }
2453 cx.notify();
2454 }
2455
2456 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2457 where
2458 I: IntoIterator<Item = Range<T>>,
2459 T: ToOffset + Copy,
2460 {
2461 let before_edit = self.snapshot();
2462 let entries = ranges
2463 .into_iter()
2464 .map(|range| AutoindentRequestEntry {
2465 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2466 first_line_is_new: true,
2467 indent_size: before_edit.language_indent_size_at(range.start, cx),
2468 original_indent_column: None,
2469 })
2470 .collect();
2471 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2472 before_edit,
2473 entries,
2474 is_block_mode: false,
2475 ignore_empty_lines: true,
2476 }));
2477 self.request_autoindent(cx);
2478 }
2479
2480 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2481 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2482 pub fn insert_empty_line(
2483 &mut self,
2484 position: impl ToPoint,
2485 space_above: bool,
2486 space_below: bool,
2487 cx: &mut Context<Self>,
2488 ) -> Point {
2489 let mut position = position.to_point(self);
2490
2491 self.start_transaction();
2492
2493 self.edit(
2494 [(position..position, "\n")],
2495 Some(AutoindentMode::EachLine),
2496 cx,
2497 );
2498
2499 if position.column > 0 {
2500 position += Point::new(1, 0);
2501 }
2502
2503 if !self.is_line_blank(position.row) {
2504 self.edit(
2505 [(position..position, "\n")],
2506 Some(AutoindentMode::EachLine),
2507 cx,
2508 );
2509 }
2510
2511 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2512 self.edit(
2513 [(position..position, "\n")],
2514 Some(AutoindentMode::EachLine),
2515 cx,
2516 );
2517 position.row += 1;
2518 }
2519
2520 if space_below
2521 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2522 {
2523 self.edit(
2524 [(position..position, "\n")],
2525 Some(AutoindentMode::EachLine),
2526 cx,
2527 );
2528 }
2529
2530 self.end_transaction(cx);
2531
2532 position
2533 }
2534
2535 /// Applies the given remote operations to the buffer.
2536 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2537 self.pending_autoindent.take();
2538 let was_dirty = self.is_dirty();
2539 let old_version = self.version.clone();
2540 let mut deferred_ops = Vec::new();
2541 let buffer_ops = ops
2542 .into_iter()
2543 .filter_map(|op| match op {
2544 Operation::Buffer(op) => Some(op),
2545 _ => {
2546 if self.can_apply_op(&op) {
2547 self.apply_op(op, cx);
2548 } else {
2549 deferred_ops.push(op);
2550 }
2551 None
2552 }
2553 })
2554 .collect::<Vec<_>>();
2555 for operation in buffer_ops.iter() {
2556 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2557 }
2558 self.text.apply_ops(buffer_ops);
2559 self.deferred_ops.insert(deferred_ops);
2560 self.flush_deferred_ops(cx);
2561 self.did_edit(&old_version, was_dirty, cx);
2562 // Notify independently of whether the buffer was edited as the operations could include a
2563 // selection update.
2564 cx.notify();
2565 }
2566
2567 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2568 let mut deferred_ops = Vec::new();
2569 for op in self.deferred_ops.drain().iter().cloned() {
2570 if self.can_apply_op(&op) {
2571 self.apply_op(op, cx);
2572 } else {
2573 deferred_ops.push(op);
2574 }
2575 }
2576 self.deferred_ops.insert(deferred_ops);
2577 }
2578
2579 pub fn has_deferred_ops(&self) -> bool {
2580 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2581 }
2582
2583 fn can_apply_op(&self, operation: &Operation) -> bool {
2584 match operation {
2585 Operation::Buffer(_) => {
2586 unreachable!("buffer operations should never be applied at this layer")
2587 }
2588 Operation::UpdateDiagnostics {
2589 diagnostics: diagnostic_set,
2590 ..
2591 } => diagnostic_set.iter().all(|diagnostic| {
2592 self.text.can_resolve(&diagnostic.range.start)
2593 && self.text.can_resolve(&diagnostic.range.end)
2594 }),
2595 Operation::UpdateSelections { selections, .. } => selections
2596 .iter()
2597 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2598 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2599 }
2600 }
2601
2602 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2603 match operation {
2604 Operation::Buffer(_) => {
2605 unreachable!("buffer operations should never be applied at this layer")
2606 }
2607 Operation::UpdateDiagnostics {
2608 server_id,
2609 diagnostics: diagnostic_set,
2610 lamport_timestamp,
2611 } => {
2612 let snapshot = self.snapshot();
2613 self.apply_diagnostic_update(
2614 server_id,
2615 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2616 lamport_timestamp,
2617 cx,
2618 );
2619 }
2620 Operation::UpdateSelections {
2621 selections,
2622 lamport_timestamp,
2623 line_mode,
2624 cursor_shape,
2625 } => {
2626 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2627 && set.lamport_timestamp > lamport_timestamp
2628 {
2629 return;
2630 }
2631
2632 self.remote_selections.insert(
2633 lamport_timestamp.replica_id,
2634 SelectionSet {
2635 selections,
2636 lamport_timestamp,
2637 line_mode,
2638 cursor_shape,
2639 },
2640 );
2641 self.text.lamport_clock.observe(lamport_timestamp);
2642 self.non_text_state_update_count += 1;
2643 }
2644 Operation::UpdateCompletionTriggers {
2645 triggers,
2646 lamport_timestamp,
2647 server_id,
2648 } => {
2649 if triggers.is_empty() {
2650 self.completion_triggers_per_language_server
2651 .remove(&server_id);
2652 self.completion_triggers = self
2653 .completion_triggers_per_language_server
2654 .values()
2655 .flat_map(|triggers| triggers.iter().cloned())
2656 .collect();
2657 } else {
2658 self.completion_triggers_per_language_server
2659 .insert(server_id, triggers.iter().cloned().collect());
2660 self.completion_triggers.extend(triggers);
2661 }
2662 self.text.lamport_clock.observe(lamport_timestamp);
2663 }
2664 Operation::UpdateLineEnding {
2665 line_ending,
2666 lamport_timestamp,
2667 } => {
2668 self.text.set_line_ending(line_ending);
2669 self.text.lamport_clock.observe(lamport_timestamp);
2670 }
2671 }
2672 }
2673
2674 fn apply_diagnostic_update(
2675 &mut self,
2676 server_id: LanguageServerId,
2677 diagnostics: DiagnosticSet,
2678 lamport_timestamp: clock::Lamport,
2679 cx: &mut Context<Self>,
2680 ) {
2681 if lamport_timestamp > self.diagnostics_timestamp {
2682 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2683 if diagnostics.is_empty() {
2684 if let Ok(ix) = ix {
2685 self.diagnostics.remove(ix);
2686 }
2687 } else {
2688 match ix {
2689 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2690 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2691 };
2692 }
2693 self.diagnostics_timestamp = lamport_timestamp;
2694 self.non_text_state_update_count += 1;
2695 self.text.lamport_clock.observe(lamport_timestamp);
2696 cx.notify();
2697 cx.emit(BufferEvent::DiagnosticsUpdated);
2698 }
2699 }
2700
2701 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2702 self.was_changed();
2703 cx.emit(BufferEvent::Operation {
2704 operation,
2705 is_local,
2706 });
2707 }
2708
2709 /// Removes the selections for a given peer.
2710 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2711 self.remote_selections.remove(&replica_id);
2712 cx.notify();
2713 }
2714
2715 /// Undoes the most recent transaction.
2716 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2717 let was_dirty = self.is_dirty();
2718 let old_version = self.version.clone();
2719
2720 if let Some((transaction_id, operation)) = self.text.undo() {
2721 self.send_operation(Operation::Buffer(operation), true, cx);
2722 self.did_edit(&old_version, was_dirty, cx);
2723 Some(transaction_id)
2724 } else {
2725 None
2726 }
2727 }
2728
2729 /// Manually undoes a specific transaction in the buffer's undo history.
2730 pub fn undo_transaction(
2731 &mut self,
2732 transaction_id: TransactionId,
2733 cx: &mut Context<Self>,
2734 ) -> bool {
2735 let was_dirty = self.is_dirty();
2736 let old_version = self.version.clone();
2737 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2738 self.send_operation(Operation::Buffer(operation), true, cx);
2739 self.did_edit(&old_version, was_dirty, cx);
2740 true
2741 } else {
2742 false
2743 }
2744 }
2745
2746 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2747 pub fn undo_to_transaction(
2748 &mut self,
2749 transaction_id: TransactionId,
2750 cx: &mut Context<Self>,
2751 ) -> bool {
2752 let was_dirty = self.is_dirty();
2753 let old_version = self.version.clone();
2754
2755 let operations = self.text.undo_to_transaction(transaction_id);
2756 let undone = !operations.is_empty();
2757 for operation in operations {
2758 self.send_operation(Operation::Buffer(operation), true, cx);
2759 }
2760 if undone {
2761 self.did_edit(&old_version, was_dirty, cx)
2762 }
2763 undone
2764 }
2765
2766 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2767 let was_dirty = self.is_dirty();
2768 let operation = self.text.undo_operations(counts);
2769 let old_version = self.version.clone();
2770 self.send_operation(Operation::Buffer(operation), true, cx);
2771 self.did_edit(&old_version, was_dirty, cx);
2772 }
2773
2774 /// Manually redoes a specific transaction in the buffer's redo history.
2775 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2776 let was_dirty = self.is_dirty();
2777 let old_version = self.version.clone();
2778
2779 if let Some((transaction_id, operation)) = self.text.redo() {
2780 self.send_operation(Operation::Buffer(operation), true, cx);
2781 self.did_edit(&old_version, was_dirty, cx);
2782 Some(transaction_id)
2783 } else {
2784 None
2785 }
2786 }
2787
2788 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2789 pub fn redo_to_transaction(
2790 &mut self,
2791 transaction_id: TransactionId,
2792 cx: &mut Context<Self>,
2793 ) -> bool {
2794 let was_dirty = self.is_dirty();
2795 let old_version = self.version.clone();
2796
2797 let operations = self.text.redo_to_transaction(transaction_id);
2798 let redone = !operations.is_empty();
2799 for operation in operations {
2800 self.send_operation(Operation::Buffer(operation), true, cx);
2801 }
2802 if redone {
2803 self.did_edit(&old_version, was_dirty, cx)
2804 }
2805 redone
2806 }
2807
2808 /// Override current completion triggers with the user-provided completion triggers.
2809 pub fn set_completion_triggers(
2810 &mut self,
2811 server_id: LanguageServerId,
2812 triggers: BTreeSet<String>,
2813 cx: &mut Context<Self>,
2814 ) {
2815 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2816 if triggers.is_empty() {
2817 self.completion_triggers_per_language_server
2818 .remove(&server_id);
2819 self.completion_triggers = self
2820 .completion_triggers_per_language_server
2821 .values()
2822 .flat_map(|triggers| triggers.iter().cloned())
2823 .collect();
2824 } else {
2825 self.completion_triggers_per_language_server
2826 .insert(server_id, triggers.clone());
2827 self.completion_triggers.extend(triggers.iter().cloned());
2828 }
2829 self.send_operation(
2830 Operation::UpdateCompletionTriggers {
2831 triggers: triggers.into_iter().collect(),
2832 lamport_timestamp: self.completion_triggers_timestamp,
2833 server_id,
2834 },
2835 true,
2836 cx,
2837 );
2838 cx.notify();
2839 }
2840
2841 /// Returns a list of strings which trigger a completion menu for this language.
2842 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2843 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2844 &self.completion_triggers
2845 }
2846
2847 /// Call this directly after performing edits to prevent the preview tab
2848 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2849 /// to return false until there are additional edits.
2850 pub fn refresh_preview(&mut self) {
2851 self.preview_version = self.version.clone();
2852 }
2853
2854 /// Whether we should preserve the preview status of a tab containing this buffer.
2855 pub fn preserve_preview(&self) -> bool {
2856 !self.has_edits_since(&self.preview_version)
2857 }
2858}
2859
2860#[doc(hidden)]
2861#[cfg(any(test, feature = "test-support"))]
2862impl Buffer {
2863 pub fn edit_via_marked_text(
2864 &mut self,
2865 marked_string: &str,
2866 autoindent_mode: Option<AutoindentMode>,
2867 cx: &mut Context<Self>,
2868 ) {
2869 let edits = self.edits_for_marked_text(marked_string);
2870 self.edit(edits, autoindent_mode, cx);
2871 }
2872
2873 pub fn set_group_interval(&mut self, group_interval: Duration) {
2874 self.text.set_group_interval(group_interval);
2875 }
2876
2877 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2878 where
2879 T: rand::Rng,
2880 {
2881 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2882 let mut last_end = None;
2883 for _ in 0..old_range_count {
2884 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2885 break;
2886 }
2887
2888 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2889 let mut range = self.random_byte_range(new_start, rng);
2890 if rng.random_bool(0.2) {
2891 mem::swap(&mut range.start, &mut range.end);
2892 }
2893 last_end = Some(range.end);
2894
2895 let new_text_len = rng.random_range(0..10);
2896 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2897 new_text = new_text.to_uppercase();
2898
2899 edits.push((range, new_text));
2900 }
2901 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2902 self.edit(edits, None, cx);
2903 }
2904
2905 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2906 let was_dirty = self.is_dirty();
2907 let old_version = self.version.clone();
2908
2909 let ops = self.text.randomly_undo_redo(rng);
2910 if !ops.is_empty() {
2911 for op in ops {
2912 self.send_operation(Operation::Buffer(op), true, cx);
2913 self.did_edit(&old_version, was_dirty, cx);
2914 }
2915 }
2916 }
2917}
2918
2919impl EventEmitter<BufferEvent> for Buffer {}
2920
2921impl Deref for Buffer {
2922 type Target = TextBuffer;
2923
2924 fn deref(&self) -> &Self::Target {
2925 &self.text
2926 }
2927}
2928
2929impl BufferSnapshot {
2930 /// Returns [`IndentSize`] for a given line that respects user settings and
2931 /// language preferences.
2932 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2933 indent_size_for_line(self, row)
2934 }
2935
2936 /// Returns [`IndentSize`] for a given position that respects user settings
2937 /// and language preferences.
2938 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2939 let settings = language_settings(
2940 self.language_at(position).map(|l| l.name()),
2941 self.file(),
2942 cx,
2943 );
2944 if settings.hard_tabs {
2945 IndentSize::tab()
2946 } else {
2947 IndentSize::spaces(settings.tab_size.get())
2948 }
2949 }
2950
2951 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2952 /// is passed in as `single_indent_size`.
2953 pub fn suggested_indents(
2954 &self,
2955 rows: impl Iterator<Item = u32>,
2956 single_indent_size: IndentSize,
2957 ) -> BTreeMap<u32, IndentSize> {
2958 let mut result = BTreeMap::new();
2959
2960 for row_range in contiguous_ranges(rows, 10) {
2961 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2962 Some(suggestions) => suggestions,
2963 _ => break,
2964 };
2965
2966 for (row, suggestion) in row_range.zip(suggestions) {
2967 let indent_size = if let Some(suggestion) = suggestion {
2968 result
2969 .get(&suggestion.basis_row)
2970 .copied()
2971 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2972 .with_delta(suggestion.delta, single_indent_size)
2973 } else {
2974 self.indent_size_for_line(row)
2975 };
2976
2977 result.insert(row, indent_size);
2978 }
2979 }
2980
2981 result
2982 }
2983
2984 fn suggest_autoindents(
2985 &self,
2986 row_range: Range<u32>,
2987 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2988 let config = &self.language.as_ref()?.config;
2989 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2990
2991 #[derive(Debug, Clone)]
2992 struct StartPosition {
2993 start: Point,
2994 suffix: SharedString,
2995 }
2996
2997 // Find the suggested indentation ranges based on the syntax tree.
2998 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2999 let end = Point::new(row_range.end, 0);
3000 let range = (start..end).to_offset(&self.text);
3001 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3002 Some(&grammar.indents_config.as_ref()?.query)
3003 });
3004 let indent_configs = matches
3005 .grammars()
3006 .iter()
3007 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3008 .collect::<Vec<_>>();
3009
3010 let mut indent_ranges = Vec::<Range<Point>>::new();
3011 let mut start_positions = Vec::<StartPosition>::new();
3012 let mut outdent_positions = Vec::<Point>::new();
3013 while let Some(mat) = matches.peek() {
3014 let mut start: Option<Point> = None;
3015 let mut end: Option<Point> = None;
3016
3017 let config = indent_configs[mat.grammar_index];
3018 for capture in mat.captures {
3019 if capture.index == config.indent_capture_ix {
3020 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3021 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3022 } else if Some(capture.index) == config.start_capture_ix {
3023 start = Some(Point::from_ts_point(capture.node.end_position()));
3024 } else if Some(capture.index) == config.end_capture_ix {
3025 end = Some(Point::from_ts_point(capture.node.start_position()));
3026 } else if Some(capture.index) == config.outdent_capture_ix {
3027 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3028 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3029 start_positions.push(StartPosition {
3030 start: Point::from_ts_point(capture.node.start_position()),
3031 suffix: suffix.clone(),
3032 });
3033 }
3034 }
3035
3036 matches.advance();
3037 if let Some((start, end)) = start.zip(end) {
3038 if start.row == end.row {
3039 continue;
3040 }
3041 let range = start..end;
3042 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3043 Err(ix) => indent_ranges.insert(ix, range),
3044 Ok(ix) => {
3045 let prev_range = &mut indent_ranges[ix];
3046 prev_range.end = prev_range.end.max(range.end);
3047 }
3048 }
3049 }
3050 }
3051
3052 let mut error_ranges = Vec::<Range<Point>>::new();
3053 let mut matches = self
3054 .syntax
3055 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3056 while let Some(mat) = matches.peek() {
3057 let node = mat.captures[0].node;
3058 let start = Point::from_ts_point(node.start_position());
3059 let end = Point::from_ts_point(node.end_position());
3060 let range = start..end;
3061 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3062 Ok(ix) | Err(ix) => ix,
3063 };
3064 let mut end_ix = ix;
3065 while let Some(existing_range) = error_ranges.get(end_ix) {
3066 if existing_range.end < end {
3067 end_ix += 1;
3068 } else {
3069 break;
3070 }
3071 }
3072 error_ranges.splice(ix..end_ix, [range]);
3073 matches.advance();
3074 }
3075
3076 outdent_positions.sort();
3077 for outdent_position in outdent_positions {
3078 // find the innermost indent range containing this outdent_position
3079 // set its end to the outdent position
3080 if let Some(range_to_truncate) = indent_ranges
3081 .iter_mut()
3082 .filter(|indent_range| indent_range.contains(&outdent_position))
3083 .next_back()
3084 {
3085 range_to_truncate.end = outdent_position;
3086 }
3087 }
3088
3089 start_positions.sort_by_key(|b| b.start);
3090
3091 // Find the suggested indentation increases and decreased based on regexes.
3092 let mut regex_outdent_map = HashMap::default();
3093 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3094 let mut start_positions_iter = start_positions.iter().peekable();
3095
3096 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3097 self.for_each_line(
3098 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3099 ..Point::new(row_range.end, 0),
3100 |row, line| {
3101 if config
3102 .decrease_indent_pattern
3103 .as_ref()
3104 .is_some_and(|regex| regex.is_match(line))
3105 {
3106 indent_change_rows.push((row, Ordering::Less));
3107 }
3108 if config
3109 .increase_indent_pattern
3110 .as_ref()
3111 .is_some_and(|regex| regex.is_match(line))
3112 {
3113 indent_change_rows.push((row + 1, Ordering::Greater));
3114 }
3115 while let Some(pos) = start_positions_iter.peek() {
3116 if pos.start.row < row {
3117 let pos = start_positions_iter.next().unwrap();
3118 last_seen_suffix
3119 .entry(pos.suffix.to_string())
3120 .or_default()
3121 .push(pos.start);
3122 } else {
3123 break;
3124 }
3125 }
3126 for rule in &config.decrease_indent_patterns {
3127 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3128 let row_start_column = self.indent_size_for_line(row).len;
3129 let basis_row = rule
3130 .valid_after
3131 .iter()
3132 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3133 .flatten()
3134 .filter(|start_point| start_point.column <= row_start_column)
3135 .max_by_key(|start_point| start_point.row);
3136 if let Some(outdent_to_row) = basis_row {
3137 regex_outdent_map.insert(row, outdent_to_row.row);
3138 }
3139 break;
3140 }
3141 }
3142 },
3143 );
3144
3145 let mut indent_changes = indent_change_rows.into_iter().peekable();
3146 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3147 prev_non_blank_row.unwrap_or(0)
3148 } else {
3149 row_range.start.saturating_sub(1)
3150 };
3151
3152 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3153 Some(row_range.map(move |row| {
3154 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3155
3156 let mut indent_from_prev_row = false;
3157 let mut outdent_from_prev_row = false;
3158 let mut outdent_to_row = u32::MAX;
3159 let mut from_regex = false;
3160
3161 while let Some((indent_row, delta)) = indent_changes.peek() {
3162 match indent_row.cmp(&row) {
3163 Ordering::Equal => match delta {
3164 Ordering::Less => {
3165 from_regex = true;
3166 outdent_from_prev_row = true
3167 }
3168 Ordering::Greater => {
3169 indent_from_prev_row = true;
3170 from_regex = true
3171 }
3172 _ => {}
3173 },
3174
3175 Ordering::Greater => break,
3176 Ordering::Less => {}
3177 }
3178
3179 indent_changes.next();
3180 }
3181
3182 for range in &indent_ranges {
3183 if range.start.row >= row {
3184 break;
3185 }
3186 if range.start.row == prev_row && range.end > row_start {
3187 indent_from_prev_row = true;
3188 }
3189 if range.end > prev_row_start && range.end <= row_start {
3190 outdent_to_row = outdent_to_row.min(range.start.row);
3191 }
3192 }
3193
3194 if let Some(basis_row) = regex_outdent_map.get(&row) {
3195 indent_from_prev_row = false;
3196 outdent_to_row = *basis_row;
3197 from_regex = true;
3198 }
3199
3200 let within_error = error_ranges
3201 .iter()
3202 .any(|e| e.start.row < row && e.end > row_start);
3203
3204 let suggestion = if outdent_to_row == prev_row
3205 || (outdent_from_prev_row && indent_from_prev_row)
3206 {
3207 Some(IndentSuggestion {
3208 basis_row: prev_row,
3209 delta: Ordering::Equal,
3210 within_error: within_error && !from_regex,
3211 })
3212 } else if indent_from_prev_row {
3213 Some(IndentSuggestion {
3214 basis_row: prev_row,
3215 delta: Ordering::Greater,
3216 within_error: within_error && !from_regex,
3217 })
3218 } else if outdent_to_row < prev_row {
3219 Some(IndentSuggestion {
3220 basis_row: outdent_to_row,
3221 delta: Ordering::Equal,
3222 within_error: within_error && !from_regex,
3223 })
3224 } else if outdent_from_prev_row {
3225 Some(IndentSuggestion {
3226 basis_row: prev_row,
3227 delta: Ordering::Less,
3228 within_error: within_error && !from_regex,
3229 })
3230 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3231 {
3232 Some(IndentSuggestion {
3233 basis_row: prev_row,
3234 delta: Ordering::Equal,
3235 within_error: within_error && !from_regex,
3236 })
3237 } else {
3238 None
3239 };
3240
3241 prev_row = row;
3242 prev_row_start = row_start;
3243 suggestion
3244 }))
3245 }
3246
3247 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3248 while row > 0 {
3249 row -= 1;
3250 if !self.is_line_blank(row) {
3251 return Some(row);
3252 }
3253 }
3254 None
3255 }
3256
3257 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3258 let captures = self.syntax.captures(range, &self.text, |grammar| {
3259 grammar
3260 .highlights_config
3261 .as_ref()
3262 .map(|config| &config.query)
3263 });
3264 let highlight_maps = captures
3265 .grammars()
3266 .iter()
3267 .map(|grammar| grammar.highlight_map())
3268 .collect();
3269 (captures, highlight_maps)
3270 }
3271
3272 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3273 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3274 /// returned in chunks where each chunk has a single syntax highlighting style and
3275 /// diagnostic status.
3276 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3277 let range = range.start.to_offset(self)..range.end.to_offset(self);
3278
3279 let mut syntax = None;
3280 if language_aware {
3281 syntax = Some(self.get_highlights(range.clone()));
3282 }
3283 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3284 let diagnostics = language_aware;
3285 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3286 }
3287
3288 pub fn highlighted_text_for_range<T: ToOffset>(
3289 &self,
3290 range: Range<T>,
3291 override_style: Option<HighlightStyle>,
3292 syntax_theme: &SyntaxTheme,
3293 ) -> HighlightedText {
3294 HighlightedText::from_buffer_range(
3295 range,
3296 &self.text,
3297 &self.syntax,
3298 override_style,
3299 syntax_theme,
3300 )
3301 }
3302
3303 /// Invokes the given callback for each line of text in the given range of the buffer.
3304 /// Uses callback to avoid allocating a string for each line.
3305 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3306 let mut line = String::new();
3307 let mut row = range.start.row;
3308 for chunk in self
3309 .as_rope()
3310 .chunks_in_range(range.to_offset(self))
3311 .chain(["\n"])
3312 {
3313 for (newline_ix, text) in chunk.split('\n').enumerate() {
3314 if newline_ix > 0 {
3315 callback(row, &line);
3316 row += 1;
3317 line.clear();
3318 }
3319 line.push_str(text);
3320 }
3321 }
3322 }
3323
3324 /// Iterates over every [`SyntaxLayer`] in the buffer.
3325 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3326 self.syntax_layers_for_range(0..self.len(), true)
3327 }
3328
3329 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3330 let offset = position.to_offset(self);
3331 self.syntax_layers_for_range(offset..offset, false)
3332 .filter(|l| l.node().end_byte() > offset)
3333 .last()
3334 }
3335
3336 pub fn syntax_layers_for_range<D: ToOffset>(
3337 &self,
3338 range: Range<D>,
3339 include_hidden: bool,
3340 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3341 self.syntax
3342 .layers_for_range(range, &self.text, include_hidden)
3343 }
3344
3345 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3346 &self,
3347 range: Range<D>,
3348 ) -> Option<SyntaxLayer<'_>> {
3349 let range = range.to_offset(self);
3350 self.syntax
3351 .layers_for_range(range, &self.text, false)
3352 .max_by(|a, b| {
3353 if a.depth != b.depth {
3354 a.depth.cmp(&b.depth)
3355 } else if a.offset.0 != b.offset.0 {
3356 a.offset.0.cmp(&b.offset.0)
3357 } else {
3358 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3359 }
3360 })
3361 }
3362
3363 /// Returns the main [`Language`].
3364 pub fn language(&self) -> Option<&Arc<Language>> {
3365 self.language.as_ref()
3366 }
3367
3368 /// Returns the [`Language`] at the given location.
3369 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3370 self.syntax_layer_at(position)
3371 .map(|info| info.language)
3372 .or(self.language.as_ref())
3373 }
3374
3375 /// Returns the settings for the language at the given location.
3376 pub fn settings_at<'a, D: ToOffset>(
3377 &'a self,
3378 position: D,
3379 cx: &'a App,
3380 ) -> Cow<'a, LanguageSettings> {
3381 language_settings(
3382 self.language_at(position).map(|l| l.name()),
3383 self.file.as_ref(),
3384 cx,
3385 )
3386 }
3387
3388 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3389 CharClassifier::new(self.language_scope_at(point))
3390 }
3391
3392 /// Returns the [`LanguageScope`] at the given location.
3393 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3394 let offset = position.to_offset(self);
3395 let mut scope = None;
3396 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3397
3398 // Use the layer that has the smallest node intersecting the given point.
3399 for layer in self
3400 .syntax
3401 .layers_for_range(offset..offset, &self.text, false)
3402 {
3403 let mut cursor = layer.node().walk();
3404
3405 let mut range = None;
3406 loop {
3407 let child_range = cursor.node().byte_range();
3408 if !child_range.contains(&offset) {
3409 break;
3410 }
3411
3412 range = Some(child_range);
3413 if cursor.goto_first_child_for_byte(offset).is_none() {
3414 break;
3415 }
3416 }
3417
3418 if let Some(range) = range
3419 && smallest_range_and_depth.as_ref().is_none_or(
3420 |(smallest_range, smallest_range_depth)| {
3421 if layer.depth > *smallest_range_depth {
3422 true
3423 } else if layer.depth == *smallest_range_depth {
3424 range.len() < smallest_range.len()
3425 } else {
3426 false
3427 }
3428 },
3429 )
3430 {
3431 smallest_range_and_depth = Some((range, layer.depth));
3432 scope = Some(LanguageScope {
3433 language: layer.language.clone(),
3434 override_id: layer.override_id(offset, &self.text),
3435 });
3436 }
3437 }
3438
3439 scope.or_else(|| {
3440 self.language.clone().map(|language| LanguageScope {
3441 language,
3442 override_id: None,
3443 })
3444 })
3445 }
3446
3447 /// Returns a tuple of the range and character kind of the word
3448 /// surrounding the given position.
3449 pub fn surrounding_word<T: ToOffset>(
3450 &self,
3451 start: T,
3452 for_completion: bool,
3453 ) -> (Range<usize>, Option<CharKind>) {
3454 let mut start = start.to_offset(self);
3455 let mut end = start;
3456 let mut next_chars = self.chars_at(start).take(128).peekable();
3457 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3458
3459 let classifier = self
3460 .char_classifier_at(start)
3461 .for_completion(for_completion);
3462 let word_kind = cmp::max(
3463 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3464 next_chars.peek().copied().map(|c| classifier.kind(c)),
3465 );
3466
3467 for ch in prev_chars {
3468 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3469 start -= ch.len_utf8();
3470 } else {
3471 break;
3472 }
3473 }
3474
3475 for ch in next_chars {
3476 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3477 end += ch.len_utf8();
3478 } else {
3479 break;
3480 }
3481 }
3482
3483 (start..end, word_kind)
3484 }
3485
3486 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3487 /// range. When `require_larger` is true, the node found must be larger than the query range.
3488 ///
3489 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3490 /// be moved to the root of the tree.
3491 fn goto_node_enclosing_range(
3492 cursor: &mut tree_sitter::TreeCursor,
3493 query_range: &Range<usize>,
3494 require_larger: bool,
3495 ) -> bool {
3496 let mut ascending = false;
3497 loop {
3498 let mut range = cursor.node().byte_range();
3499 if query_range.is_empty() {
3500 // When the query range is empty and the current node starts after it, move to the
3501 // previous sibling to find the node the containing node.
3502 if range.start > query_range.start {
3503 cursor.goto_previous_sibling();
3504 range = cursor.node().byte_range();
3505 }
3506 } else {
3507 // When the query range is non-empty and the current node ends exactly at the start,
3508 // move to the next sibling to find a node that extends beyond the start.
3509 if range.end == query_range.start {
3510 cursor.goto_next_sibling();
3511 range = cursor.node().byte_range();
3512 }
3513 }
3514
3515 let encloses = range.contains_inclusive(query_range)
3516 && (!require_larger || range.len() > query_range.len());
3517 if !encloses {
3518 ascending = true;
3519 if !cursor.goto_parent() {
3520 return false;
3521 }
3522 continue;
3523 } else if ascending {
3524 return true;
3525 }
3526
3527 // Descend into the current node.
3528 if cursor
3529 .goto_first_child_for_byte(query_range.start)
3530 .is_none()
3531 {
3532 return true;
3533 }
3534 }
3535 }
3536
3537 pub fn syntax_ancestor<'a, T: ToOffset>(
3538 &'a self,
3539 range: Range<T>,
3540 ) -> Option<tree_sitter::Node<'a>> {
3541 let range = range.start.to_offset(self)..range.end.to_offset(self);
3542 let mut result: Option<tree_sitter::Node<'a>> = None;
3543 for layer in self
3544 .syntax
3545 .layers_for_range(range.clone(), &self.text, true)
3546 {
3547 let mut cursor = layer.node().walk();
3548
3549 // Find the node that both contains the range and is larger than it.
3550 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3551 continue;
3552 }
3553
3554 let left_node = cursor.node();
3555 let mut layer_result = left_node;
3556
3557 // For an empty range, try to find another node immediately to the right of the range.
3558 if left_node.end_byte() == range.start {
3559 let mut right_node = None;
3560 while !cursor.goto_next_sibling() {
3561 if !cursor.goto_parent() {
3562 break;
3563 }
3564 }
3565
3566 while cursor.node().start_byte() == range.start {
3567 right_node = Some(cursor.node());
3568 if !cursor.goto_first_child() {
3569 break;
3570 }
3571 }
3572
3573 // If there is a candidate node on both sides of the (empty) range, then
3574 // decide between the two by favoring a named node over an anonymous token.
3575 // If both nodes are the same in that regard, favor the right one.
3576 if let Some(right_node) = right_node
3577 && (right_node.is_named() || !left_node.is_named())
3578 {
3579 layer_result = right_node;
3580 }
3581 }
3582
3583 if let Some(previous_result) = &result
3584 && previous_result.byte_range().len() < layer_result.byte_range().len()
3585 {
3586 continue;
3587 }
3588 result = Some(layer_result);
3589 }
3590
3591 result
3592 }
3593
3594 /// Find the previous sibling syntax node at the given range.
3595 ///
3596 /// This function locates the syntax node that precedes the node containing
3597 /// the given range. It searches hierarchically by:
3598 /// 1. Finding the node that contains the given range
3599 /// 2. Looking for the previous sibling at the same tree level
3600 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3601 ///
3602 /// Returns `None` if there is no previous sibling at any ancestor level.
3603 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3604 &'a self,
3605 range: Range<T>,
3606 ) -> Option<tree_sitter::Node<'a>> {
3607 let range = range.start.to_offset(self)..range.end.to_offset(self);
3608 let mut result: Option<tree_sitter::Node<'a>> = None;
3609
3610 for layer in self
3611 .syntax
3612 .layers_for_range(range.clone(), &self.text, true)
3613 {
3614 let mut cursor = layer.node().walk();
3615
3616 // Find the node that contains the range
3617 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3618 continue;
3619 }
3620
3621 // Look for the previous sibling, moving up ancestor levels if needed
3622 loop {
3623 if cursor.goto_previous_sibling() {
3624 let layer_result = cursor.node();
3625
3626 if let Some(previous_result) = &result {
3627 if previous_result.byte_range().end < layer_result.byte_range().end {
3628 continue;
3629 }
3630 }
3631 result = Some(layer_result);
3632 break;
3633 }
3634
3635 // No sibling found at this level, try moving up to parent
3636 if !cursor.goto_parent() {
3637 break;
3638 }
3639 }
3640 }
3641
3642 result
3643 }
3644
3645 /// Find the next sibling syntax node at the given range.
3646 ///
3647 /// This function locates the syntax node that follows the node containing
3648 /// the given range. It searches hierarchically by:
3649 /// 1. Finding the node that contains the given range
3650 /// 2. Looking for the next sibling at the same tree level
3651 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3652 ///
3653 /// Returns `None` if there is no next sibling at any ancestor level.
3654 pub fn syntax_next_sibling<'a, T: ToOffset>(
3655 &'a self,
3656 range: Range<T>,
3657 ) -> Option<tree_sitter::Node<'a>> {
3658 let range = range.start.to_offset(self)..range.end.to_offset(self);
3659 let mut result: Option<tree_sitter::Node<'a>> = None;
3660
3661 for layer in self
3662 .syntax
3663 .layers_for_range(range.clone(), &self.text, true)
3664 {
3665 let mut cursor = layer.node().walk();
3666
3667 // Find the node that contains the range
3668 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3669 continue;
3670 }
3671
3672 // Look for the next sibling, moving up ancestor levels if needed
3673 loop {
3674 if cursor.goto_next_sibling() {
3675 let layer_result = cursor.node();
3676
3677 if let Some(previous_result) = &result {
3678 if previous_result.byte_range().start > layer_result.byte_range().start {
3679 continue;
3680 }
3681 }
3682 result = Some(layer_result);
3683 break;
3684 }
3685
3686 // No sibling found at this level, try moving up to parent
3687 if !cursor.goto_parent() {
3688 break;
3689 }
3690 }
3691 }
3692
3693 result
3694 }
3695
3696 /// Returns the root syntax node within the given row
3697 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3698 let start_offset = position.to_offset(self);
3699
3700 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3701
3702 let layer = self
3703 .syntax
3704 .layers_for_range(start_offset..start_offset, &self.text, true)
3705 .next()?;
3706
3707 let mut cursor = layer.node().walk();
3708
3709 // Descend to the first leaf that touches the start of the range.
3710 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3711 if cursor.node().end_byte() == start_offset {
3712 cursor.goto_next_sibling();
3713 }
3714 }
3715
3716 // Ascend to the root node within the same row.
3717 while cursor.goto_parent() {
3718 if cursor.node().start_position().row != row {
3719 break;
3720 }
3721 }
3722
3723 Some(cursor.node())
3724 }
3725
3726 /// Returns the outline for the buffer.
3727 ///
3728 /// This method allows passing an optional [`SyntaxTheme`] to
3729 /// syntax-highlight the returned symbols.
3730 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3731 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3732 }
3733
3734 /// Returns all the symbols that contain the given position.
3735 ///
3736 /// This method allows passing an optional [`SyntaxTheme`] to
3737 /// syntax-highlight the returned symbols.
3738 pub fn symbols_containing<T: ToOffset>(
3739 &self,
3740 position: T,
3741 theme: Option<&SyntaxTheme>,
3742 ) -> Vec<OutlineItem<Anchor>> {
3743 let position = position.to_offset(self);
3744 let mut items = self.outline_items_containing(
3745 position.saturating_sub(1)..self.len().min(position + 1),
3746 false,
3747 theme,
3748 );
3749 let mut prev_depth = None;
3750 items.retain(|item| {
3751 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3752 prev_depth = Some(item.depth);
3753 result
3754 });
3755 items
3756 }
3757
3758 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3759 let range = range.to_offset(self);
3760 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3761 grammar.outline_config.as_ref().map(|c| &c.query)
3762 });
3763 let configs = matches
3764 .grammars()
3765 .iter()
3766 .map(|g| g.outline_config.as_ref().unwrap())
3767 .collect::<Vec<_>>();
3768
3769 while let Some(mat) = matches.peek() {
3770 let config = &configs[mat.grammar_index];
3771 let containing_item_node = maybe!({
3772 let item_node = mat.captures.iter().find_map(|cap| {
3773 if cap.index == config.item_capture_ix {
3774 Some(cap.node)
3775 } else {
3776 None
3777 }
3778 })?;
3779
3780 let item_byte_range = item_node.byte_range();
3781 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3782 None
3783 } else {
3784 Some(item_node)
3785 }
3786 });
3787
3788 if let Some(item_node) = containing_item_node {
3789 return Some(
3790 Point::from_ts_point(item_node.start_position())
3791 ..Point::from_ts_point(item_node.end_position()),
3792 );
3793 }
3794
3795 matches.advance();
3796 }
3797 None
3798 }
3799
3800 pub fn outline_items_containing<T: ToOffset>(
3801 &self,
3802 range: Range<T>,
3803 include_extra_context: bool,
3804 theme: Option<&SyntaxTheme>,
3805 ) -> Vec<OutlineItem<Anchor>> {
3806 let range = range.to_offset(self);
3807 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3808 grammar.outline_config.as_ref().map(|c| &c.query)
3809 });
3810
3811 let mut items = Vec::new();
3812 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3813 while let Some(mat) = matches.peek() {
3814 let config = matches.grammars()[mat.grammar_index]
3815 .outline_config
3816 .as_ref()
3817 .unwrap();
3818 if let Some(item) =
3819 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3820 {
3821 items.push(item);
3822 } else if let Some(capture) = mat
3823 .captures
3824 .iter()
3825 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3826 {
3827 let capture_range = capture.node.start_position()..capture.node.end_position();
3828 let mut capture_row_range =
3829 capture_range.start.row as u32..capture_range.end.row as u32;
3830 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3831 {
3832 capture_row_range.end -= 1;
3833 }
3834 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3835 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3836 last_row_range.end = capture_row_range.end;
3837 } else {
3838 annotation_row_ranges.push(capture_row_range);
3839 }
3840 } else {
3841 annotation_row_ranges.push(capture_row_range);
3842 }
3843 }
3844 matches.advance();
3845 }
3846
3847 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3848
3849 // Assign depths based on containment relationships and convert to anchors.
3850 let mut item_ends_stack = Vec::<Point>::new();
3851 let mut anchor_items = Vec::new();
3852 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3853 for item in items {
3854 while let Some(last_end) = item_ends_stack.last().copied() {
3855 if last_end < item.range.end {
3856 item_ends_stack.pop();
3857 } else {
3858 break;
3859 }
3860 }
3861
3862 let mut annotation_row_range = None;
3863 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3864 let row_preceding_item = item.range.start.row.saturating_sub(1);
3865 if next_annotation_row_range.end < row_preceding_item {
3866 annotation_row_ranges.next();
3867 } else {
3868 if next_annotation_row_range.end == row_preceding_item {
3869 annotation_row_range = Some(next_annotation_row_range.clone());
3870 annotation_row_ranges.next();
3871 }
3872 break;
3873 }
3874 }
3875
3876 anchor_items.push(OutlineItem {
3877 depth: item_ends_stack.len(),
3878 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3879 text: item.text,
3880 highlight_ranges: item.highlight_ranges,
3881 name_ranges: item.name_ranges,
3882 signature_range: item
3883 .signature_range
3884 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3885 body_range: item
3886 .body_range
3887 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3888 annotation_range: annotation_row_range.map(|annotation_range| {
3889 self.anchor_after(Point::new(annotation_range.start, 0))
3890 ..self.anchor_before(Point::new(
3891 annotation_range.end,
3892 self.line_len(annotation_range.end),
3893 ))
3894 }),
3895 });
3896 item_ends_stack.push(item.range.end);
3897 }
3898
3899 anchor_items
3900 }
3901
3902 fn next_outline_item(
3903 &self,
3904 config: &OutlineConfig,
3905 mat: &SyntaxMapMatch,
3906 range: &Range<usize>,
3907 include_extra_context: bool,
3908 theme: Option<&SyntaxTheme>,
3909 ) -> Option<OutlineItem<Point>> {
3910 let item_node = mat.captures.iter().find_map(|cap| {
3911 if cap.index == config.item_capture_ix {
3912 Some(cap.node)
3913 } else {
3914 None
3915 }
3916 })?;
3917
3918 let item_byte_range = item_node.byte_range();
3919 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3920 return None;
3921 }
3922 let item_point_range = Point::from_ts_point(item_node.start_position())
3923 ..Point::from_ts_point(item_node.end_position());
3924
3925 let mut open_point = None;
3926 let mut close_point = None;
3927
3928 let mut signature_start = None;
3929 let mut signature_end = None;
3930 let mut extend_signature_range = |node: tree_sitter::Node| {
3931 if signature_start.is_none() {
3932 signature_start = Some(Point::from_ts_point(node.start_position()));
3933 }
3934 signature_end = Some(Point::from_ts_point(node.end_position()));
3935 };
3936
3937 let mut buffer_ranges = Vec::new();
3938 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3939 let mut range = node.start_byte()..node.end_byte();
3940 let start = node.start_position();
3941 if node.end_position().row > start.row {
3942 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3943 }
3944
3945 if !range.is_empty() {
3946 buffer_ranges.push((range, node_is_name));
3947 }
3948 };
3949
3950 for capture in mat.captures {
3951 if capture.index == config.name_capture_ix {
3952 add_to_buffer_ranges(capture.node, true);
3953 extend_signature_range(capture.node);
3954 } else if Some(capture.index) == config.context_capture_ix
3955 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3956 {
3957 add_to_buffer_ranges(capture.node, false);
3958 extend_signature_range(capture.node);
3959 } else {
3960 if Some(capture.index) == config.open_capture_ix {
3961 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3962 } else if Some(capture.index) == config.close_capture_ix {
3963 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3964 }
3965 }
3966 }
3967
3968 if buffer_ranges.is_empty() {
3969 return None;
3970 }
3971
3972 let mut text = String::new();
3973 let mut highlight_ranges = Vec::new();
3974 let mut name_ranges = Vec::new();
3975 let mut chunks = self.chunks(
3976 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3977 true,
3978 );
3979 let mut last_buffer_range_end = 0;
3980 for (buffer_range, is_name) in buffer_ranges {
3981 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3982 if space_added {
3983 text.push(' ');
3984 }
3985 let before_append_len = text.len();
3986 let mut offset = buffer_range.start;
3987 chunks.seek(buffer_range.clone());
3988 for mut chunk in chunks.by_ref() {
3989 if chunk.text.len() > buffer_range.end - offset {
3990 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3991 offset = buffer_range.end;
3992 } else {
3993 offset += chunk.text.len();
3994 }
3995 let style = chunk
3996 .syntax_highlight_id
3997 .zip(theme)
3998 .and_then(|(highlight, theme)| highlight.style(theme));
3999 if let Some(style) = style {
4000 let start = text.len();
4001 let end = start + chunk.text.len();
4002 highlight_ranges.push((start..end, style));
4003 }
4004 text.push_str(chunk.text);
4005 if offset >= buffer_range.end {
4006 break;
4007 }
4008 }
4009 if is_name {
4010 let after_append_len = text.len();
4011 let start = if space_added && !name_ranges.is_empty() {
4012 before_append_len - 1
4013 } else {
4014 before_append_len
4015 };
4016 name_ranges.push(start..after_append_len);
4017 }
4018 last_buffer_range_end = buffer_range.end;
4019 }
4020
4021 let signature_range = signature_start
4022 .zip(signature_end)
4023 .map(|(start, end)| start..end);
4024
4025 Some(OutlineItem {
4026 depth: 0, // We'll calculate the depth later
4027 range: item_point_range,
4028 text,
4029 highlight_ranges,
4030 name_ranges,
4031 signature_range,
4032 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4033 annotation_range: None,
4034 })
4035 }
4036
4037 pub fn function_body_fold_ranges<T: ToOffset>(
4038 &self,
4039 within: Range<T>,
4040 ) -> impl Iterator<Item = Range<usize>> + '_ {
4041 self.text_object_ranges(within, TreeSitterOptions::default())
4042 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4043 }
4044
4045 /// For each grammar in the language, runs the provided
4046 /// [`tree_sitter::Query`] against the given range.
4047 pub fn matches(
4048 &self,
4049 range: Range<usize>,
4050 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4051 ) -> SyntaxMapMatches<'_> {
4052 self.syntax.matches(range, self, query)
4053 }
4054
4055 pub fn all_bracket_ranges(
4056 &self,
4057 range: Range<usize>,
4058 ) -> impl Iterator<Item = BracketMatch> + '_ {
4059 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4060 grammar.brackets_config.as_ref().map(|c| &c.query)
4061 });
4062 let configs = matches
4063 .grammars()
4064 .iter()
4065 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4066 .collect::<Vec<_>>();
4067
4068 iter::from_fn(move || {
4069 while let Some(mat) = matches.peek() {
4070 let mut open = None;
4071 let mut close = None;
4072 let config = &configs[mat.grammar_index];
4073 let pattern = &config.patterns[mat.pattern_index];
4074 for capture in mat.captures {
4075 if capture.index == config.open_capture_ix {
4076 open = Some(capture.node.byte_range());
4077 } else if capture.index == config.close_capture_ix {
4078 close = Some(capture.node.byte_range());
4079 }
4080 }
4081
4082 matches.advance();
4083
4084 let Some((open_range, close_range)) = open.zip(close) else {
4085 continue;
4086 };
4087
4088 let bracket_range = open_range.start..=close_range.end;
4089 if !bracket_range.overlaps(&range) {
4090 continue;
4091 }
4092
4093 return Some(BracketMatch {
4094 open_range,
4095 close_range,
4096 newline_only: pattern.newline_only,
4097 });
4098 }
4099 None
4100 })
4101 }
4102
4103 /// Returns bracket range pairs overlapping or adjacent to `range`
4104 pub fn bracket_ranges<T: ToOffset>(
4105 &self,
4106 range: Range<T>,
4107 ) -> impl Iterator<Item = BracketMatch> + '_ {
4108 // Find bracket pairs that *inclusively* contain the given range.
4109 let range = range.start.to_offset(self).saturating_sub(1)
4110 ..self.len().min(range.end.to_offset(self) + 1);
4111 self.all_bracket_ranges(range)
4112 .filter(|pair| !pair.newline_only)
4113 }
4114
4115 pub fn debug_variables_query<T: ToOffset>(
4116 &self,
4117 range: Range<T>,
4118 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4119 let range = range.start.to_offset(self).saturating_sub(1)
4120 ..self.len().min(range.end.to_offset(self) + 1);
4121
4122 let mut matches = self.syntax.matches_with_options(
4123 range.clone(),
4124 &self.text,
4125 TreeSitterOptions::default(),
4126 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4127 );
4128
4129 let configs = matches
4130 .grammars()
4131 .iter()
4132 .map(|grammar| grammar.debug_variables_config.as_ref())
4133 .collect::<Vec<_>>();
4134
4135 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4136
4137 iter::from_fn(move || {
4138 loop {
4139 while let Some(capture) = captures.pop() {
4140 if capture.0.overlaps(&range) {
4141 return Some(capture);
4142 }
4143 }
4144
4145 let mat = matches.peek()?;
4146
4147 let Some(config) = configs[mat.grammar_index].as_ref() else {
4148 matches.advance();
4149 continue;
4150 };
4151
4152 for capture in mat.captures {
4153 let Some(ix) = config
4154 .objects_by_capture_ix
4155 .binary_search_by_key(&capture.index, |e| e.0)
4156 .ok()
4157 else {
4158 continue;
4159 };
4160 let text_object = config.objects_by_capture_ix[ix].1;
4161 let byte_range = capture.node.byte_range();
4162
4163 let mut found = false;
4164 for (range, existing) in captures.iter_mut() {
4165 if existing == &text_object {
4166 range.start = range.start.min(byte_range.start);
4167 range.end = range.end.max(byte_range.end);
4168 found = true;
4169 break;
4170 }
4171 }
4172
4173 if !found {
4174 captures.push((byte_range, text_object));
4175 }
4176 }
4177
4178 matches.advance();
4179 }
4180 })
4181 }
4182
4183 pub fn text_object_ranges<T: ToOffset>(
4184 &self,
4185 range: Range<T>,
4186 options: TreeSitterOptions,
4187 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4188 let range = range.start.to_offset(self).saturating_sub(1)
4189 ..self.len().min(range.end.to_offset(self) + 1);
4190
4191 let mut matches =
4192 self.syntax
4193 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4194 grammar.text_object_config.as_ref().map(|c| &c.query)
4195 });
4196
4197 let configs = matches
4198 .grammars()
4199 .iter()
4200 .map(|grammar| grammar.text_object_config.as_ref())
4201 .collect::<Vec<_>>();
4202
4203 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4204
4205 iter::from_fn(move || {
4206 loop {
4207 while let Some(capture) = captures.pop() {
4208 if capture.0.overlaps(&range) {
4209 return Some(capture);
4210 }
4211 }
4212
4213 let mat = matches.peek()?;
4214
4215 let Some(config) = configs[mat.grammar_index].as_ref() else {
4216 matches.advance();
4217 continue;
4218 };
4219
4220 for capture in mat.captures {
4221 let Some(ix) = config
4222 .text_objects_by_capture_ix
4223 .binary_search_by_key(&capture.index, |e| e.0)
4224 .ok()
4225 else {
4226 continue;
4227 };
4228 let text_object = config.text_objects_by_capture_ix[ix].1;
4229 let byte_range = capture.node.byte_range();
4230
4231 let mut found = false;
4232 for (range, existing) in captures.iter_mut() {
4233 if existing == &text_object {
4234 range.start = range.start.min(byte_range.start);
4235 range.end = range.end.max(byte_range.end);
4236 found = true;
4237 break;
4238 }
4239 }
4240
4241 if !found {
4242 captures.push((byte_range, text_object));
4243 }
4244 }
4245
4246 matches.advance();
4247 }
4248 })
4249 }
4250
4251 /// Returns enclosing bracket ranges containing the given range
4252 pub fn enclosing_bracket_ranges<T: ToOffset>(
4253 &self,
4254 range: Range<T>,
4255 ) -> impl Iterator<Item = BracketMatch> + '_ {
4256 let range = range.start.to_offset(self)..range.end.to_offset(self);
4257
4258 self.bracket_ranges(range.clone()).filter(move |pair| {
4259 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4260 })
4261 }
4262
4263 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4264 ///
4265 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4266 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4267 &self,
4268 range: Range<T>,
4269 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4270 ) -> Option<(Range<usize>, Range<usize>)> {
4271 let range = range.start.to_offset(self)..range.end.to_offset(self);
4272
4273 // Get the ranges of the innermost pair of brackets.
4274 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4275
4276 for pair in self.enclosing_bracket_ranges(range) {
4277 if let Some(range_filter) = range_filter
4278 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4279 {
4280 continue;
4281 }
4282
4283 let len = pair.close_range.end - pair.open_range.start;
4284
4285 if let Some((existing_open, existing_close)) = &result {
4286 let existing_len = existing_close.end - existing_open.start;
4287 if len > existing_len {
4288 continue;
4289 }
4290 }
4291
4292 result = Some((pair.open_range, pair.close_range));
4293 }
4294
4295 result
4296 }
4297
4298 /// Returns anchor ranges for any matches of the redaction query.
4299 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4300 /// will be run on the relevant section of the buffer.
4301 pub fn redacted_ranges<T: ToOffset>(
4302 &self,
4303 range: Range<T>,
4304 ) -> impl Iterator<Item = Range<usize>> + '_ {
4305 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4306 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4307 grammar
4308 .redactions_config
4309 .as_ref()
4310 .map(|config| &config.query)
4311 });
4312
4313 let configs = syntax_matches
4314 .grammars()
4315 .iter()
4316 .map(|grammar| grammar.redactions_config.as_ref())
4317 .collect::<Vec<_>>();
4318
4319 iter::from_fn(move || {
4320 let redacted_range = syntax_matches
4321 .peek()
4322 .and_then(|mat| {
4323 configs[mat.grammar_index].and_then(|config| {
4324 mat.captures
4325 .iter()
4326 .find(|capture| capture.index == config.redaction_capture_ix)
4327 })
4328 })
4329 .map(|mat| mat.node.byte_range());
4330 syntax_matches.advance();
4331 redacted_range
4332 })
4333 }
4334
4335 pub fn injections_intersecting_range<T: ToOffset>(
4336 &self,
4337 range: Range<T>,
4338 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4339 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4340
4341 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4342 grammar
4343 .injection_config
4344 .as_ref()
4345 .map(|config| &config.query)
4346 });
4347
4348 let configs = syntax_matches
4349 .grammars()
4350 .iter()
4351 .map(|grammar| grammar.injection_config.as_ref())
4352 .collect::<Vec<_>>();
4353
4354 iter::from_fn(move || {
4355 let ranges = syntax_matches.peek().and_then(|mat| {
4356 let config = &configs[mat.grammar_index]?;
4357 let content_capture_range = mat.captures.iter().find_map(|capture| {
4358 if capture.index == config.content_capture_ix {
4359 Some(capture.node.byte_range())
4360 } else {
4361 None
4362 }
4363 })?;
4364 let language = self.language_at(content_capture_range.start)?;
4365 Some((content_capture_range, language))
4366 });
4367 syntax_matches.advance();
4368 ranges
4369 })
4370 }
4371
4372 pub fn runnable_ranges(
4373 &self,
4374 offset_range: Range<usize>,
4375 ) -> impl Iterator<Item = RunnableRange> + '_ {
4376 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4377 grammar.runnable_config.as_ref().map(|config| &config.query)
4378 });
4379
4380 let test_configs = syntax_matches
4381 .grammars()
4382 .iter()
4383 .map(|grammar| grammar.runnable_config.as_ref())
4384 .collect::<Vec<_>>();
4385
4386 iter::from_fn(move || {
4387 loop {
4388 let mat = syntax_matches.peek()?;
4389
4390 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4391 let mut run_range = None;
4392 let full_range = mat.captures.iter().fold(
4393 Range {
4394 start: usize::MAX,
4395 end: 0,
4396 },
4397 |mut acc, next| {
4398 let byte_range = next.node.byte_range();
4399 if acc.start > byte_range.start {
4400 acc.start = byte_range.start;
4401 }
4402 if acc.end < byte_range.end {
4403 acc.end = byte_range.end;
4404 }
4405 acc
4406 },
4407 );
4408 if full_range.start > full_range.end {
4409 // We did not find a full spanning range of this match.
4410 return None;
4411 }
4412 let extra_captures: SmallVec<[_; 1]> =
4413 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4414 test_configs
4415 .extra_captures
4416 .get(capture.index as usize)
4417 .cloned()
4418 .and_then(|tag_name| match tag_name {
4419 RunnableCapture::Named(name) => {
4420 Some((capture.node.byte_range(), name))
4421 }
4422 RunnableCapture::Run => {
4423 let _ = run_range.insert(capture.node.byte_range());
4424 None
4425 }
4426 })
4427 }));
4428 let run_range = run_range?;
4429 let tags = test_configs
4430 .query
4431 .property_settings(mat.pattern_index)
4432 .iter()
4433 .filter_map(|property| {
4434 if *property.key == *"tag" {
4435 property
4436 .value
4437 .as_ref()
4438 .map(|value| RunnableTag(value.to_string().into()))
4439 } else {
4440 None
4441 }
4442 })
4443 .collect();
4444 let extra_captures = extra_captures
4445 .into_iter()
4446 .map(|(range, name)| {
4447 (
4448 name.to_string(),
4449 self.text_for_range(range).collect::<String>(),
4450 )
4451 })
4452 .collect();
4453 // All tags should have the same range.
4454 Some(RunnableRange {
4455 run_range,
4456 full_range,
4457 runnable: Runnable {
4458 tags,
4459 language: mat.language,
4460 buffer: self.remote_id(),
4461 },
4462 extra_captures,
4463 buffer_id: self.remote_id(),
4464 })
4465 });
4466
4467 syntax_matches.advance();
4468 if test_range.is_some() {
4469 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4470 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4471 return test_range;
4472 }
4473 }
4474 })
4475 }
4476
4477 /// Returns selections for remote peers intersecting the given range.
4478 #[allow(clippy::type_complexity)]
4479 pub fn selections_in_range(
4480 &self,
4481 range: Range<Anchor>,
4482 include_local: bool,
4483 ) -> impl Iterator<
4484 Item = (
4485 ReplicaId,
4486 bool,
4487 CursorShape,
4488 impl Iterator<Item = &Selection<Anchor>> + '_,
4489 ),
4490 > + '_ {
4491 self.remote_selections
4492 .iter()
4493 .filter(move |(replica_id, set)| {
4494 (include_local || **replica_id != self.text.replica_id())
4495 && !set.selections.is_empty()
4496 })
4497 .map(move |(replica_id, set)| {
4498 let start_ix = match set.selections.binary_search_by(|probe| {
4499 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4500 }) {
4501 Ok(ix) | Err(ix) => ix,
4502 };
4503 let end_ix = match set.selections.binary_search_by(|probe| {
4504 probe.start.cmp(&range.end, self).then(Ordering::Less)
4505 }) {
4506 Ok(ix) | Err(ix) => ix,
4507 };
4508
4509 (
4510 *replica_id,
4511 set.line_mode,
4512 set.cursor_shape,
4513 set.selections[start_ix..end_ix].iter(),
4514 )
4515 })
4516 }
4517
4518 /// Returns if the buffer contains any diagnostics.
4519 pub fn has_diagnostics(&self) -> bool {
4520 !self.diagnostics.is_empty()
4521 }
4522
4523 /// Returns all the diagnostics intersecting the given range.
4524 pub fn diagnostics_in_range<'a, T, O>(
4525 &'a self,
4526 search_range: Range<T>,
4527 reversed: bool,
4528 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4529 where
4530 T: 'a + Clone + ToOffset,
4531 O: 'a + FromAnchor,
4532 {
4533 let mut iterators: Vec<_> = self
4534 .diagnostics
4535 .iter()
4536 .map(|(_, collection)| {
4537 collection
4538 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4539 .peekable()
4540 })
4541 .collect();
4542
4543 std::iter::from_fn(move || {
4544 let (next_ix, _) = iterators
4545 .iter_mut()
4546 .enumerate()
4547 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4548 .min_by(|(_, a), (_, b)| {
4549 let cmp = a
4550 .range
4551 .start
4552 .cmp(&b.range.start, self)
4553 // when range is equal, sort by diagnostic severity
4554 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4555 // and stabilize order with group_id
4556 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4557 if reversed { cmp.reverse() } else { cmp }
4558 })?;
4559 iterators[next_ix]
4560 .next()
4561 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4562 diagnostic,
4563 range: FromAnchor::from_anchor(&range.start, self)
4564 ..FromAnchor::from_anchor(&range.end, self),
4565 })
4566 })
4567 }
4568
4569 /// Returns all the diagnostic groups associated with the given
4570 /// language server ID. If no language server ID is provided,
4571 /// all diagnostics groups are returned.
4572 pub fn diagnostic_groups(
4573 &self,
4574 language_server_id: Option<LanguageServerId>,
4575 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4576 let mut groups = Vec::new();
4577
4578 if let Some(language_server_id) = language_server_id {
4579 if let Ok(ix) = self
4580 .diagnostics
4581 .binary_search_by_key(&language_server_id, |e| e.0)
4582 {
4583 self.diagnostics[ix]
4584 .1
4585 .groups(language_server_id, &mut groups, self);
4586 }
4587 } else {
4588 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4589 diagnostics.groups(*language_server_id, &mut groups, self);
4590 }
4591 }
4592
4593 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4594 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4595 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4596 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4597 });
4598
4599 groups
4600 }
4601
4602 /// Returns an iterator over the diagnostics for the given group.
4603 pub fn diagnostic_group<O>(
4604 &self,
4605 group_id: usize,
4606 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4607 where
4608 O: FromAnchor + 'static,
4609 {
4610 self.diagnostics
4611 .iter()
4612 .flat_map(move |(_, set)| set.group(group_id, self))
4613 }
4614
4615 /// An integer version number that accounts for all updates besides
4616 /// the buffer's text itself (which is versioned via a version vector).
4617 pub fn non_text_state_update_count(&self) -> usize {
4618 self.non_text_state_update_count
4619 }
4620
4621 /// An integer version that changes when the buffer's syntax changes.
4622 pub fn syntax_update_count(&self) -> usize {
4623 self.syntax.update_count()
4624 }
4625
4626 /// Returns a snapshot of underlying file.
4627 pub fn file(&self) -> Option<&Arc<dyn File>> {
4628 self.file.as_ref()
4629 }
4630
4631 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4632 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4633 if let Some(file) = self.file() {
4634 if file.path().file_name().is_none() || include_root {
4635 Some(file.full_path(cx))
4636 } else {
4637 Some(file.path().to_path_buf())
4638 }
4639 } else {
4640 None
4641 }
4642 }
4643
4644 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4645 let query_str = query.fuzzy_contents;
4646 if query_str.is_some_and(|query| query.is_empty()) {
4647 return BTreeMap::default();
4648 }
4649
4650 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4651 language,
4652 override_id: None,
4653 }));
4654
4655 let mut query_ix = 0;
4656 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4657 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4658
4659 let mut words = BTreeMap::default();
4660 let mut current_word_start_ix = None;
4661 let mut chunk_ix = query.range.start;
4662 for chunk in self.chunks(query.range, false) {
4663 for (i, c) in chunk.text.char_indices() {
4664 let ix = chunk_ix + i;
4665 if classifier.is_word(c) {
4666 if current_word_start_ix.is_none() {
4667 current_word_start_ix = Some(ix);
4668 }
4669
4670 if let Some(query_chars) = &query_chars
4671 && query_ix < query_len
4672 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4673 {
4674 query_ix += 1;
4675 }
4676 continue;
4677 } else if let Some(word_start) = current_word_start_ix.take()
4678 && query_ix == query_len
4679 {
4680 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4681 let mut word_text = self.text_for_range(word_start..ix).peekable();
4682 let first_char = word_text
4683 .peek()
4684 .and_then(|first_chunk| first_chunk.chars().next());
4685 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4686 if !query.skip_digits
4687 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4688 {
4689 words.insert(word_text.collect(), word_range);
4690 }
4691 }
4692 query_ix = 0;
4693 }
4694 chunk_ix += chunk.text.len();
4695 }
4696
4697 words
4698 }
4699}
4700
4701pub struct WordsQuery<'a> {
4702 /// Only returns words with all chars from the fuzzy string in them.
4703 pub fuzzy_contents: Option<&'a str>,
4704 /// Skips words that start with a digit.
4705 pub skip_digits: bool,
4706 /// Buffer offset range, to look for words.
4707 pub range: Range<usize>,
4708}
4709
4710fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4711 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4712}
4713
4714fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4715 let mut result = IndentSize::spaces(0);
4716 for c in text {
4717 let kind = match c {
4718 ' ' => IndentKind::Space,
4719 '\t' => IndentKind::Tab,
4720 _ => break,
4721 };
4722 if result.len == 0 {
4723 result.kind = kind;
4724 }
4725 result.len += 1;
4726 }
4727 result
4728}
4729
4730impl Clone for BufferSnapshot {
4731 fn clone(&self) -> Self {
4732 Self {
4733 text: self.text.clone(),
4734 syntax: self.syntax.clone(),
4735 file: self.file.clone(),
4736 remote_selections: self.remote_selections.clone(),
4737 diagnostics: self.diagnostics.clone(),
4738 language: self.language.clone(),
4739 non_text_state_update_count: self.non_text_state_update_count,
4740 }
4741 }
4742}
4743
4744impl Deref for BufferSnapshot {
4745 type Target = text::BufferSnapshot;
4746
4747 fn deref(&self) -> &Self::Target {
4748 &self.text
4749 }
4750}
4751
4752unsafe impl Send for BufferChunks<'_> {}
4753
4754impl<'a> BufferChunks<'a> {
4755 pub(crate) fn new(
4756 text: &'a Rope,
4757 range: Range<usize>,
4758 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4759 diagnostics: bool,
4760 buffer_snapshot: Option<&'a BufferSnapshot>,
4761 ) -> Self {
4762 let mut highlights = None;
4763 if let Some((captures, highlight_maps)) = syntax {
4764 highlights = Some(BufferChunkHighlights {
4765 captures,
4766 next_capture: None,
4767 stack: Default::default(),
4768 highlight_maps,
4769 })
4770 }
4771
4772 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4773 let chunks = text.chunks_in_range(range.clone());
4774
4775 let mut this = BufferChunks {
4776 range,
4777 buffer_snapshot,
4778 chunks,
4779 diagnostic_endpoints,
4780 error_depth: 0,
4781 warning_depth: 0,
4782 information_depth: 0,
4783 hint_depth: 0,
4784 unnecessary_depth: 0,
4785 underline: true,
4786 highlights,
4787 };
4788 this.initialize_diagnostic_endpoints();
4789 this
4790 }
4791
4792 /// Seeks to the given byte offset in the buffer.
4793 pub fn seek(&mut self, range: Range<usize>) {
4794 let old_range = std::mem::replace(&mut self.range, range.clone());
4795 self.chunks.set_range(self.range.clone());
4796 if let Some(highlights) = self.highlights.as_mut() {
4797 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4798 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4799 highlights
4800 .stack
4801 .retain(|(end_offset, _)| *end_offset > range.start);
4802 if let Some(capture) = &highlights.next_capture
4803 && range.start >= capture.node.start_byte()
4804 {
4805 let next_capture_end = capture.node.end_byte();
4806 if range.start < next_capture_end {
4807 highlights.stack.push((
4808 next_capture_end,
4809 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4810 ));
4811 }
4812 highlights.next_capture.take();
4813 }
4814 } else if let Some(snapshot) = self.buffer_snapshot {
4815 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4816 *highlights = BufferChunkHighlights {
4817 captures,
4818 next_capture: None,
4819 stack: Default::default(),
4820 highlight_maps,
4821 };
4822 } else {
4823 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4824 // Seeking such BufferChunks is not supported.
4825 debug_assert!(
4826 false,
4827 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4828 );
4829 }
4830
4831 highlights.captures.set_byte_range(self.range.clone());
4832 self.initialize_diagnostic_endpoints();
4833 }
4834 }
4835
4836 fn initialize_diagnostic_endpoints(&mut self) {
4837 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4838 && let Some(buffer) = self.buffer_snapshot
4839 {
4840 let mut diagnostic_endpoints = Vec::new();
4841 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4842 diagnostic_endpoints.push(DiagnosticEndpoint {
4843 offset: entry.range.start,
4844 is_start: true,
4845 severity: entry.diagnostic.severity,
4846 is_unnecessary: entry.diagnostic.is_unnecessary,
4847 underline: entry.diagnostic.underline,
4848 });
4849 diagnostic_endpoints.push(DiagnosticEndpoint {
4850 offset: entry.range.end,
4851 is_start: false,
4852 severity: entry.diagnostic.severity,
4853 is_unnecessary: entry.diagnostic.is_unnecessary,
4854 underline: entry.diagnostic.underline,
4855 });
4856 }
4857 diagnostic_endpoints
4858 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4859 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4860 self.hint_depth = 0;
4861 self.error_depth = 0;
4862 self.warning_depth = 0;
4863 self.information_depth = 0;
4864 }
4865 }
4866
4867 /// The current byte offset in the buffer.
4868 pub fn offset(&self) -> usize {
4869 self.range.start
4870 }
4871
4872 pub fn range(&self) -> Range<usize> {
4873 self.range.clone()
4874 }
4875
4876 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4877 let depth = match endpoint.severity {
4878 DiagnosticSeverity::ERROR => &mut self.error_depth,
4879 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4880 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4881 DiagnosticSeverity::HINT => &mut self.hint_depth,
4882 _ => return,
4883 };
4884 if endpoint.is_start {
4885 *depth += 1;
4886 } else {
4887 *depth -= 1;
4888 }
4889
4890 if endpoint.is_unnecessary {
4891 if endpoint.is_start {
4892 self.unnecessary_depth += 1;
4893 } else {
4894 self.unnecessary_depth -= 1;
4895 }
4896 }
4897 }
4898
4899 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4900 if self.error_depth > 0 {
4901 Some(DiagnosticSeverity::ERROR)
4902 } else if self.warning_depth > 0 {
4903 Some(DiagnosticSeverity::WARNING)
4904 } else if self.information_depth > 0 {
4905 Some(DiagnosticSeverity::INFORMATION)
4906 } else if self.hint_depth > 0 {
4907 Some(DiagnosticSeverity::HINT)
4908 } else {
4909 None
4910 }
4911 }
4912
4913 fn current_code_is_unnecessary(&self) -> bool {
4914 self.unnecessary_depth > 0
4915 }
4916}
4917
4918impl<'a> Iterator for BufferChunks<'a> {
4919 type Item = Chunk<'a>;
4920
4921 fn next(&mut self) -> Option<Self::Item> {
4922 let mut next_capture_start = usize::MAX;
4923 let mut next_diagnostic_endpoint = usize::MAX;
4924
4925 if let Some(highlights) = self.highlights.as_mut() {
4926 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4927 if *parent_capture_end <= self.range.start {
4928 highlights.stack.pop();
4929 } else {
4930 break;
4931 }
4932 }
4933
4934 if highlights.next_capture.is_none() {
4935 highlights.next_capture = highlights.captures.next();
4936 }
4937
4938 while let Some(capture) = highlights.next_capture.as_ref() {
4939 if self.range.start < capture.node.start_byte() {
4940 next_capture_start = capture.node.start_byte();
4941 break;
4942 } else {
4943 let highlight_id =
4944 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4945 highlights
4946 .stack
4947 .push((capture.node.end_byte(), highlight_id));
4948 highlights.next_capture = highlights.captures.next();
4949 }
4950 }
4951 }
4952
4953 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4954 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4955 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4956 if endpoint.offset <= self.range.start {
4957 self.update_diagnostic_depths(endpoint);
4958 diagnostic_endpoints.next();
4959 self.underline = endpoint.underline;
4960 } else {
4961 next_diagnostic_endpoint = endpoint.offset;
4962 break;
4963 }
4964 }
4965 }
4966 self.diagnostic_endpoints = diagnostic_endpoints;
4967
4968 if let Some(ChunkBitmaps {
4969 text: chunk,
4970 chars: chars_map,
4971 tabs,
4972 }) = self.chunks.peek_tabs()
4973 {
4974 let chunk_start = self.range.start;
4975 let mut chunk_end = (self.chunks.offset() + chunk.len())
4976 .min(next_capture_start)
4977 .min(next_diagnostic_endpoint);
4978 let mut highlight_id = None;
4979 if let Some(highlights) = self.highlights.as_ref()
4980 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4981 {
4982 chunk_end = chunk_end.min(*parent_capture_end);
4983 highlight_id = Some(*parent_highlight_id);
4984 }
4985
4986 let slice =
4987 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4988 let bit_end = chunk_end - self.chunks.offset();
4989
4990 let mask = if bit_end >= 128 {
4991 u128::MAX
4992 } else {
4993 (1u128 << bit_end) - 1
4994 };
4995 let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
4996 let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
4997
4998 self.range.start = chunk_end;
4999 if self.range.start == self.chunks.offset() + chunk.len() {
5000 self.chunks.next().unwrap();
5001 }
5002
5003 Some(Chunk {
5004 text: slice,
5005 syntax_highlight_id: highlight_id,
5006 underline: self.underline,
5007 diagnostic_severity: self.current_diagnostic_severity(),
5008 is_unnecessary: self.current_code_is_unnecessary(),
5009 tabs,
5010 chars: chars_map,
5011 ..Chunk::default()
5012 })
5013 } else {
5014 None
5015 }
5016 }
5017}
5018
5019impl operation_queue::Operation for Operation {
5020 fn lamport_timestamp(&self) -> clock::Lamport {
5021 match self {
5022 Operation::Buffer(_) => {
5023 unreachable!("buffer operations should never be deferred at this layer")
5024 }
5025 Operation::UpdateDiagnostics {
5026 lamport_timestamp, ..
5027 }
5028 | Operation::UpdateSelections {
5029 lamport_timestamp, ..
5030 }
5031 | Operation::UpdateCompletionTriggers {
5032 lamport_timestamp, ..
5033 }
5034 | Operation::UpdateLineEnding {
5035 lamport_timestamp, ..
5036 } => *lamport_timestamp,
5037 }
5038 }
5039}
5040
5041impl Default for Diagnostic {
5042 fn default() -> Self {
5043 Self {
5044 source: Default::default(),
5045 source_kind: DiagnosticSourceKind::Other,
5046 code: None,
5047 code_description: None,
5048 severity: DiagnosticSeverity::ERROR,
5049 message: Default::default(),
5050 markdown: None,
5051 group_id: 0,
5052 is_primary: false,
5053 is_disk_based: false,
5054 is_unnecessary: false,
5055 underline: true,
5056 data: None,
5057 }
5058 }
5059}
5060
5061impl IndentSize {
5062 /// Returns an [`IndentSize`] representing the given spaces.
5063 pub fn spaces(len: u32) -> Self {
5064 Self {
5065 len,
5066 kind: IndentKind::Space,
5067 }
5068 }
5069
5070 /// Returns an [`IndentSize`] representing a tab.
5071 pub fn tab() -> Self {
5072 Self {
5073 len: 1,
5074 kind: IndentKind::Tab,
5075 }
5076 }
5077
5078 /// An iterator over the characters represented by this [`IndentSize`].
5079 pub fn chars(&self) -> impl Iterator<Item = char> {
5080 iter::repeat(self.char()).take(self.len as usize)
5081 }
5082
5083 /// The character representation of this [`IndentSize`].
5084 pub fn char(&self) -> char {
5085 match self.kind {
5086 IndentKind::Space => ' ',
5087 IndentKind::Tab => '\t',
5088 }
5089 }
5090
5091 /// Consumes the current [`IndentSize`] and returns a new one that has
5092 /// been shrunk or enlarged by the given size along the given direction.
5093 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5094 match direction {
5095 Ordering::Less => {
5096 if self.kind == size.kind && self.len >= size.len {
5097 self.len -= size.len;
5098 }
5099 }
5100 Ordering::Equal => {}
5101 Ordering::Greater => {
5102 if self.len == 0 {
5103 self = size;
5104 } else if self.kind == size.kind {
5105 self.len += size.len;
5106 }
5107 }
5108 }
5109 self
5110 }
5111
5112 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5113 match self.kind {
5114 IndentKind::Space => self.len as usize,
5115 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5116 }
5117 }
5118}
5119
5120#[cfg(any(test, feature = "test-support"))]
5121pub struct TestFile {
5122 pub path: Arc<Path>,
5123 pub root_name: String,
5124 pub local_root: Option<PathBuf>,
5125}
5126
5127#[cfg(any(test, feature = "test-support"))]
5128impl File for TestFile {
5129 fn path(&self) -> &Arc<Path> {
5130 &self.path
5131 }
5132
5133 fn full_path(&self, _: &gpui::App) -> PathBuf {
5134 PathBuf::from(&self.root_name).join(self.path.as_ref())
5135 }
5136
5137 fn as_local(&self) -> Option<&dyn LocalFile> {
5138 if self.local_root.is_some() {
5139 Some(self)
5140 } else {
5141 None
5142 }
5143 }
5144
5145 fn disk_state(&self) -> DiskState {
5146 unimplemented!()
5147 }
5148
5149 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
5150 self.path().file_name().unwrap_or(self.root_name.as_ref())
5151 }
5152
5153 fn worktree_id(&self, _: &App) -> WorktreeId {
5154 WorktreeId::from_usize(0)
5155 }
5156
5157 fn to_proto(&self, _: &App) -> rpc::proto::File {
5158 unimplemented!()
5159 }
5160
5161 fn is_private(&self) -> bool {
5162 false
5163 }
5164}
5165
5166#[cfg(any(test, feature = "test-support"))]
5167impl LocalFile for TestFile {
5168 fn abs_path(&self, _cx: &App) -> PathBuf {
5169 PathBuf::from(self.local_root.as_ref().unwrap())
5170 .join(&self.root_name)
5171 .join(self.path.as_ref())
5172 }
5173
5174 fn load(&self, _cx: &App) -> Task<Result<String>> {
5175 unimplemented!()
5176 }
5177
5178 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5179 unimplemented!()
5180 }
5181}
5182
5183pub(crate) fn contiguous_ranges(
5184 values: impl Iterator<Item = u32>,
5185 max_len: usize,
5186) -> impl Iterator<Item = Range<u32>> {
5187 let mut values = values;
5188 let mut current_range: Option<Range<u32>> = None;
5189 std::iter::from_fn(move || {
5190 loop {
5191 if let Some(value) = values.next() {
5192 if let Some(range) = &mut current_range
5193 && value == range.end
5194 && range.len() < max_len
5195 {
5196 range.end += 1;
5197 continue;
5198 }
5199
5200 let prev_range = current_range.clone();
5201 current_range = Some(value..(value + 1));
5202 if prev_range.is_some() {
5203 return prev_range;
5204 }
5205 } else {
5206 return current_range.take();
5207 }
5208 }
5209 })
5210}
5211
5212#[derive(Default, Debug)]
5213pub struct CharClassifier {
5214 scope: Option<LanguageScope>,
5215 for_completion: bool,
5216 ignore_punctuation: bool,
5217}
5218
5219impl CharClassifier {
5220 pub fn new(scope: Option<LanguageScope>) -> Self {
5221 Self {
5222 scope,
5223 for_completion: false,
5224 ignore_punctuation: false,
5225 }
5226 }
5227
5228 pub fn for_completion(self, for_completion: bool) -> Self {
5229 Self {
5230 for_completion,
5231 ..self
5232 }
5233 }
5234
5235 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5236 Self {
5237 ignore_punctuation,
5238 ..self
5239 }
5240 }
5241
5242 pub fn is_whitespace(&self, c: char) -> bool {
5243 self.kind(c) == CharKind::Whitespace
5244 }
5245
5246 pub fn is_word(&self, c: char) -> bool {
5247 self.kind(c) == CharKind::Word
5248 }
5249
5250 pub fn is_punctuation(&self, c: char) -> bool {
5251 self.kind(c) == CharKind::Punctuation
5252 }
5253
5254 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5255 if c.is_alphanumeric() || c == '_' {
5256 return CharKind::Word;
5257 }
5258
5259 if let Some(scope) = &self.scope {
5260 let characters = if self.for_completion {
5261 scope.completion_query_characters()
5262 } else {
5263 scope.word_characters()
5264 };
5265 if let Some(characters) = characters
5266 && characters.contains(&c)
5267 {
5268 return CharKind::Word;
5269 }
5270 }
5271
5272 if c.is_whitespace() {
5273 return CharKind::Whitespace;
5274 }
5275
5276 if ignore_punctuation {
5277 CharKind::Word
5278 } else {
5279 CharKind::Punctuation
5280 }
5281 }
5282
5283 pub fn kind(&self, c: char) -> CharKind {
5284 self.kind_with(c, self.ignore_punctuation)
5285 }
5286}
5287
5288/// Find all of the ranges of whitespace that occur at the ends of lines
5289/// in the given rope.
5290///
5291/// This could also be done with a regex search, but this implementation
5292/// avoids copying text.
5293pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5294 let mut ranges = Vec::new();
5295
5296 let mut offset = 0;
5297 let mut prev_chunk_trailing_whitespace_range = 0..0;
5298 for chunk in rope.chunks() {
5299 let mut prev_line_trailing_whitespace_range = 0..0;
5300 for (i, line) in chunk.split('\n').enumerate() {
5301 let line_end_offset = offset + line.len();
5302 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5303 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5304
5305 if i == 0 && trimmed_line_len == 0 {
5306 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5307 }
5308 if !prev_line_trailing_whitespace_range.is_empty() {
5309 ranges.push(prev_line_trailing_whitespace_range);
5310 }
5311
5312 offset = line_end_offset + 1;
5313 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5314 }
5315
5316 offset -= 1;
5317 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5318 }
5319
5320 if !prev_chunk_trailing_whitespace_range.is_empty() {
5321 ranges.push(prev_chunk_trailing_whitespace_range);
5322 }
5323
5324 ranges
5325}